hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
2a12dd912fe3b4ddd8f8b7fe49af04027be5fa5b
1,908
py
Python
model_lstm/model_lstm/utils/preprocessors.py
ofbennett/sentiment-analysis-app
94362ae3e638daeec29e09065549fd4078af8a1a
[ "MIT" ]
2
2020-10-04T16:58:54.000Z
2021-10-04T13:51:10.000Z
model_lstm/model_lstm/utils/preprocessors.py
ofbennett/sentiment-analysis-app
94362ae3e638daeec29e09065549fd4078af8a1a
[ "MIT" ]
null
null
null
model_lstm/model_lstm/utils/preprocessors.py
ofbennett/sentiment-analysis-app
94362ae3e638daeec29e09065549fd4078af8a1a
[ "MIT" ]
null
null
null
from sklearn.base import BaseEstimator, TransformerMixin import re from nltk.stem import SnowballStemmer class TextCleaning(BaseEstimator, TransformerMixin): def __init__(self, regex): self.regex = regex def fit(self, X, y = None): return self def transform(self, X): X = X.copy() X["text"] = X["text"].apply(lambda x: re.sub(self.regex, " ", x).strip()) return X class TextStemming(BaseEstimator, TransformerMixin): def __init__(self, skip=False): self.skip = skip def fit(self, X, y = None): return self def transform(self, X): def _stem(x): stemmer = SnowballStemmer('english') final_tokens = [] for token in x.split(): token = stemmer.stem(token) final_tokens.append(token) x_stemmed = " ".join(final_tokens) return x_stemmed if self.skip: return X else: X = X.copy() X["text"] = X["text"].apply(_stem) return X class TextStopWordRemoval(BaseEstimator, TransformerMixin): def __init__(self, skip=False, stop_words=[]): self.skip = skip self.stop_words = stop_words def fit(self, X, y = None): return self def transform(self, X): def _removeStopWords(x): final_tokens = [] for token in x.split(): if token.lower() in self.stop_words: continue final_tokens.append(token) x_sw_removed = " ".join(final_tokens) return x_sw_removed if self.skip: return X else: X = X.copy() X["text"] = X["text"].apply(_removeStopWords) return X def convert_labels(label): convert_dict = {4: 1, 0: 0} new_label = convert_dict[label] return new_label
27.257143
81
0.55608
222
1,908
4.612613
0.27027
0.047852
0.09375
0.105469
0.509766
0.382813
0.382813
0.234375
0.213867
0.213867
0
0.00317
0.338574
1,908
70
82
27.257143
0.808241
0
0
0.508772
0
0
0.01781
0
0
0
0
0
0
1
0.210526
false
0
0.052632
0.052632
0.508772
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
2a270fb938d4444ba6c9ab7a54f96ad2510ba16f
523
py
Python
tests/a2/models.py
movermeyer/nicedjango
c38ada1e50efb5ef0874ef063074b621579c2954
[ "MIT" ]
1
2016-10-18T18:40:45.000Z
2016-10-18T18:40:45.000Z
tests/a2/models.py
movermeyer/nicedjango
c38ada1e50efb5ef0874ef063074b621579c2954
[ "MIT" ]
null
null
null
tests/a2/models.py
movermeyer/nicedjango
c38ada1e50efb5ef0874ef063074b621579c2954
[ "MIT" ]
1
2018-03-05T01:21:23.000Z
2018-03-05T01:21:23.000Z
""" Multiple inheritance sample 1 from docs. Note: not more than one review per book looks like a wrong example. """ from django.db import models class Model(models.Model): class Meta: app_label = 'a2' abstract = True class Article(Model): article_id = models.AutoField(primary_key=True) headline = models.CharField(max_length=10) class Book(Model): book_id = models.AutoField(primary_key=True) title = models.CharField(max_length=10) class BookReview(Book, Article): pass
18.678571
67
0.705545
72
523
5.027778
0.611111
0.044199
0.093923
0.132597
0.342541
0.342541
0
0
0
0
0
0.014354
0.200765
523
27
68
19.37037
0.851675
0.208413
0
0
0
0
0.004926
0
0
0
0
0
0
1
0
false
0.076923
0.076923
0
0.769231
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
2a392d234d6418ac71e824ad727b1bc6e9642f0f
418
py
Python
tests/test_default.py
kenneyhe-zingbox/ansible-letsencrypt
fe42cf118bb5dd60019b504ce0eee282cf1230c1
[ "MIT" ]
null
null
null
tests/test_default.py
kenneyhe-zingbox/ansible-letsencrypt
fe42cf118bb5dd60019b504ce0eee282cf1230c1
[ "MIT" ]
3
2017-12-21T18:12:10.000Z
2018-01-10T23:08:55.000Z
tests/test_default.py
kenneyhe-zingbox/ansible-letsencrypt
fe42cf118bb5dd60019b504ce0eee282cf1230c1
[ "MIT" ]
1
2021-06-17T14:28:41.000Z
2021-06-17T14:28:41.000Z
from testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_letsencrypt_folder(File): # letsencrypt = host.file("/etc/letsencrypt") letsencrypt = File("/var/log/certbot") assert letsencrypt.is_directory assert letsencrypt.user == "root" assert letsencrypt.group == "root" assert letsencrypt.mode == 0o755
32.153846
79
0.748804
47
418
6.510638
0.638298
0.222222
0.137255
0
0
0
0
0
0
0
0
0.01108
0.136364
418
12
80
34.833333
0.836565
0.102871
0
0
0
0
0.144772
0.072386
0
0
0
0
0.5
1
0.125
false
0
0.125
0
0.25
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
2
2a43ee2f4b2f1e1da2bc07144f2d3edda65c20f2
902
py
Python
tests/tasks.py
microhuang/Flask-Celery-Tools
61f9050d123815dd4fb1e5494fdf9bf2d7cfbeae
[ "MIT" ]
4
2020-09-17T06:21:39.000Z
2021-10-13T14:18:37.000Z
tests/tasks.py
microhuang/Flask-Celery-Tools
61f9050d123815dd4fb1e5494fdf9bf2d7cfbeae
[ "MIT" ]
2
2020-10-20T16:50:24.000Z
2020-12-11T15:05:02.000Z
tests/tasks.py
microhuang/Flask-Celery-Tools
61f9050d123815dd4fb1e5494fdf9bf2d7cfbeae
[ "MIT" ]
2
2018-05-30T05:57:35.000Z
2018-06-11T12:04:18.000Z
"""Tasks for tests.""" from celery import shared_task from flask_celery import single_instance @shared_task(bind=True) @single_instance def add(self, x, y): """Celery task: add numbers.""" return x + y @shared_task(bind=True) @single_instance(include_args=True, lock_timeout=20) def mul(self, x, y): """Celery task: multiply numbers.""" return x * y @shared_task(bind=True) @single_instance() def sub(self, x, y): """Celery task: subtract numbers.""" return x - y @shared_task(bind=True, time_limit=70) @single_instance def add2(self, x, y): """Celery task: add numbers.""" return x + y @shared_task(bind=True, soft_time_limit=80) @single_instance def add3(self, x, y): """Celery task: add numbers.""" return x + y @shared_task(name='celery.ping') def ping(): # type: () -> str """Simple task that just returns 'pong'.""" return 'pong'
18.791667
52
0.660754
134
902
4.30597
0.335821
0.034662
0.121317
0.155979
0.540728
0.485269
0.485269
0.419411
0.362218
0.362218
0
0.01087
0.184035
902
47
53
19.191489
0.773098
0.233925
0
0.36
0
0
0.022901
0
0
0
0
0
0
1
0.24
false
0
0.08
0
0.56
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
2a43fdf09e6cc99387b75899e8ec3f22d6b55ff2
1,414
py
Python
setup.py
ikirudennis/alternativefacts
c34f119679bcc7b6ecbb227e17aa7b2022f04108
[ "MIT" ]
1
2017-01-24T02:27:26.000Z
2017-01-24T02:27:26.000Z
setup.py
ikirudennis/alternativefacts
c34f119679bcc7b6ecbb227e17aa7b2022f04108
[ "MIT" ]
null
null
null
setup.py
ikirudennis/alternativefacts
c34f119679bcc7b6ecbb227e17aa7b2022f04108
[ "MIT" ]
null
null
null
from setuptools import setup, find_packages import os def get_version(): basedir = os.path.dirname(__file__) with open(os.path.join(basedir, 'alternativefacts/version.py')) as f: variables = {} exec(f.read(), variables) return variables.get('VERSION') raise RuntimeError('No version info found.') setup( name='alternativefacts', version=get_version(), author='Dennis Burke', author_email='ikirudennis@gmail.com', description='An alternativefacts package for python', url='http://github.com/ikirudennis/alternativefacts', packages=find_packages(), classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], keywords='alternative facts', include_package_data=True, zip_safe=False, )
34.487805
73
0.635078
144
1,414
6.152778
0.541667
0.214447
0.282167
0.146727
0.060948
0
0
0
0
0
0
0.013812
0.231966
1,414
40
74
35.35
0.802026
0
0
0.054054
0
0
0.5138
0.03397
0
0
0
0
0
1
0.027027
false
0
0.054054
0
0.108108
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
2a52742f1628620c11ff02307e95d32101a66cf5
565
py
Python
packages/reporting-server/parsers/test_doors_state_parser.py
baviera08/romi-dashboard
ac3a15014ad3c3bdac523a6550934a06653cfba1
[ "Apache-2.0" ]
23
2021-04-13T23:01:12.000Z
2022-03-21T02:15:24.000Z
packages/reporting-server/parsers/test_doors_state_parser.py
baviera08/romi-dashboard
ac3a15014ad3c3bdac523a6550934a06653cfba1
[ "Apache-2.0" ]
326
2021-03-10T17:32:17.000Z
2022-03-30T04:42:14.000Z
packages/reporting-server/parsers/test_doors_state_parser.py
baviera08/romi-dashboard
ac3a15014ad3c3bdac523a6550934a06653cfba1
[ "Apache-2.0" ]
13
2021-04-10T10:33:36.000Z
2022-02-22T15:39:58.000Z
import unittest from models.tortoise_models.door_state import DoorState from rest_server.__mocks__.parsed_data import mock_door_state from .doors_state_parser import doors_state_parser class TestCaseDoorsState(unittest.IsolatedAsyncioTestCase): def setUp(self): self.data = mock_door_state async def test_parse_and_get_values(self): parsed_values = await doors_state_parser(self.data) self.assertEqual(parsed_values["state"], DoorState.service.get_state_name(0)) self.assertEqual(parsed_values["name"], "hardware_door")
33.235294
85
0.787611
74
565
5.635135
0.459459
0.064748
0.115108
0.129496
0
0
0
0
0
0
0
0.002049
0.136283
565
16
86
35.3125
0.852459
0
0
0
0
0
0.038938
0
0
0
0
0
0.181818
1
0.090909
false
0
0.363636
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
2a59b7f24394db2c7747d7b2f4070826d66fc30b
639
py
Python
Spread/standarddeviation.py
InnovAnon-Inc/sceadar
56ab87a5e813cbf5129f0bc8bb518c25331041dc
[ "Unlicense" ]
null
null
null
Spread/standarddeviation.py
InnovAnon-Inc/sceadar
56ab87a5e813cbf5129f0bc8bb518c25331041dc
[ "Unlicense" ]
null
null
null
Spread/standarddeviation.py
InnovAnon-Inc/sceadar
56ab87a5e813cbf5129f0bc8bb518c25331041dc
[ "Unlicense" ]
null
null
null
from Spread.stddevct import StdDevCT from Operations.differencepower import DifferencePower from Spread.generalizedvariance import GeneralizedVariance class StandardDeviation (GeneralizedVariance): def __init__ (self, length, min_value, max_value, arithmetic_mean): GeneralizedVariance.__init__ (self, length, min_value, max_value, StdDevCT, DifferencePower, arithmetic_mean) #def update (self, elem): #def finish (self): # GeneralizedVariance.finish (self) # self.value = sqrt (self.ct.value) #self.ct.finish () #self.value = sqrt (self.ct.value) #Spread.finish (self) #def validate (self): #def cupdates (self, elem):
35.5
68
0.771518
75
639
6.386667
0.333333
0.083507
0.058455
0.070981
0.22547
0.22547
0.125261
0
0
0
0
0
0.123631
639
18
69
35.5
0.855357
0.353678
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
2a748d0f475ebffede2fa602544de658685cf4a0
206
py
Python
pyramm/config.py
nzta-captif/pyramm
ac895841011bf54cb79b55982240632148dc51b1
[ "MIT" ]
null
null
null
pyramm/config.py
nzta-captif/pyramm
ac895841011bf54cb79b55982240632148dc51b1
[ "MIT" ]
7
2020-10-22T16:45:40.000Z
2022-03-23T03:23:47.000Z
pyramm/config.py
nzta-captif/pyramm
ac895841011bf54cb79b55982240632148dc51b1
[ "MIT" ]
2
2020-10-22T16:18:45.000Z
2021-03-11T22:13:23.000Z
from pathlib import Path from configparser import ConfigParser CONFIG_FILE = Path.home().joinpath(".pyramm.ini") def config(): parser = ConfigParser() parser.read(CONFIG_FILE) return parser
17.166667
49
0.73301
25
206
5.96
0.6
0.134228
0
0
0
0
0
0
0
0
0
0
0.169903
206
11
50
18.727273
0.871345
0
0
0
0
0
0.053398
0
0
0
0
0
0
1
0.142857
false
0
0.285714
0
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
2a8770b6658f0fad86fcfb94fd38177506430743
263
py
Python
anadroid/_version.py
greensoftwarelab/PyAnaDroid
c37a85a2c8e0206895f2e96d28c2baa045f6a211
[ "MIT" ]
null
null
null
anadroid/_version.py
greensoftwarelab/PyAnaDroid
c37a85a2c8e0206895f2e96d28c2baa045f6a211
[ "MIT" ]
2
2022-03-31T17:33:48.000Z
2022-03-31T17:38:22.000Z
anadroid/_version.py
greensoftwarelab/PyAnaDroid
c37a85a2c8e0206895f2e96d28c2baa045f6a211
[ "MIT" ]
null
null
null
""" Provides anadroid version information. """ # This file is auto-generated! Do not edit! # Use `python -m incremental.update anadroid` to change this file. from incremental import Version __version__ = Version("anadroid", 0, 5, 26) __all__ = ["__version__"]
21.916667
66
0.73384
34
263
5.323529
0.735294
0.088398
0
0
0
0
0
0
0
0
0
0.017937
0.152091
263
11
67
23.909091
0.793722
0.555133
0
0
1
0
0.175926
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
2a87da7190ca02342b1d1a7ce3fd3cb4a1a5a035
1,835
py
Python
django_react_drf_same_origin/backend/api/views.py
devidw/django-spa-cookie-auth
91d514d3f4dff3c88e618c3307d7d529804f21bb
[ "MIT" ]
66
2020-12-18T13:06:27.000Z
2022-03-12T23:19:22.000Z
django_react_drf_same_origin/backend/api/views.py
mostafijur-rahman299/django-spa-cookie-auth
1ee1495613fe78e5951f9484613aa1299e66ba94
[ "MIT" ]
1
2021-02-01T13:37:45.000Z
2021-02-06T21:27:13.000Z
django_react_drf_same_origin/backend/api/views.py
mostafijur-rahman299/django-spa-cookie-auth
1ee1495613fe78e5951f9484613aa1299e66ba94
[ "MIT" ]
23
2020-11-24T13:43:12.000Z
2022-03-17T10:42:27.000Z
import json from django.contrib.auth import authenticate, login, logout from django.http import JsonResponse from django.middleware.csrf import get_token from django.views.decorators.http import require_POST from rest_framework.authentication import SessionAuthentication, BasicAuthentication from rest_framework.permissions import IsAuthenticated from rest_framework.views import APIView def get_csrf(request): response = JsonResponse({'detail': 'CSRF cookie set'}) response['X-CSRFToken'] = get_token(request) return response @require_POST def login_view(request): data = json.loads(request.body) username = data.get('username') password = data.get('password') if username is None or password is None: return JsonResponse({'detail': 'Please provide username and password.'}, status=400) user = authenticate(username=username, password=password) if user is None: return JsonResponse({'detail': 'Invalid credentials.'}, status=400) login(request, user) return JsonResponse({'detail': 'Successfully logged in.'}) def logout_view(request): if not request.user.is_authenticated: return JsonResponse({'detail': 'You\'re not logged in.'}, status=400) logout(request) return JsonResponse({'detail': 'Successfully logged out.'}) class SessionView(APIView): authentication_classes = [SessionAuthentication, BasicAuthentication] permission_classes = [IsAuthenticated] @staticmethod def get(request, format=None): return JsonResponse({'isAuthenticated': True}) class WhoAmIView(APIView): authentication_classes = [SessionAuthentication, BasicAuthentication] permission_classes = [IsAuthenticated] @staticmethod def get(request, format=None): return JsonResponse({'username': request.user.username})
30.583333
92
0.743324
200
1,835
6.745
0.35
0.093403
0.088955
0.035582
0.33358
0.226835
0.226835
0.226835
0.226835
0.226835
0
0.005825
0.158038
1,835
59
93
31.101695
0.867314
0
0
0.195122
0
0
0.113896
0
0
0
0
0
0
1
0.121951
false
0.097561
0.195122
0.04878
0.658537
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
2a8b8feb872585fe2dca7f2c17be84fc1e0231f6
276
py
Python
lecciones/05/funciones.py
ImAlexisSaez/curso-python-desde-0
c4a84dae0804adefe4ee6024b411d8ed288da759
[ "MIT" ]
2
2020-08-31T02:17:36.000Z
2022-01-29T15:25:27.000Z
lecciones/05/funciones.py
ImAlexisSaez/curso-python-desde-0
c4a84dae0804adefe4ee6024b411d8ed288da759
[ "MIT" ]
null
null
null
lecciones/05/funciones.py
ImAlexisSaez/curso-python-desde-0
c4a84dae0804adefe4ee6024b411d8ed288da759
[ "MIT" ]
null
null
null
def mensaje(): # Declaración de la función print("Estamos aprendiendo Python.") print("Estamos aprendiendo instrucciones básicas.") print("Poco a poco iremos avanzando.") mensaje() # Llamada a la función print("Ejecutando código fuera de función") mensaje()
23
55
0.721014
33
276
6.030303
0.606061
0.090452
0.140704
0
0
0
0
0
0
0
0
0
0.177536
276
11
56
25.090909
0.876652
0.166667
0
0.285714
0
0
0.581498
0
0
0
0
0
0
1
0.142857
true
0
0
0
0.142857
0.571429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
2
aa541043061e3a6e568f68931746230c76adfc8e
883
py
Python
api/views/quiz.py
evan-rusin/fly-project
8afc697f2a9fb63317cca2763ed0ed76f9ef2ead
[ "BSD-2-Clause" ]
15
2016-11-17T08:34:52.000Z
2021-11-12T07:08:58.000Z
api/views/quiz.py
evan-rusin/fly-project
8afc697f2a9fb63317cca2763ed0ed76f9ef2ead
[ "BSD-2-Clause" ]
137
2015-12-07T19:48:03.000Z
2016-10-11T20:19:33.000Z
api/views/quiz.py
evan-rusin/fly-project
8afc697f2a9fb63317cca2763ed0ed76f9ef2ead
[ "BSD-2-Clause" ]
11
2016-10-21T22:43:54.000Z
2021-08-28T14:41:02.000Z
import django_filters from django.contrib.auth.models import User, Group from rest_framework import viewsets, mixins from rest_framework.response import Response from rest_framework.authentication import TokenAuthentication from rest_framework import filters from api.pagination import LargeResultsSetPagination from api.permissions import IsAdminUserOrReadOnly from api.serializers import QuizSerializer from api.models import Quiz class QuizFilter(django_filters.FilterSet): class Meta: model = Quiz fields = ['id', 'created', 'course', 'title', 'description',] class QuizViewSet(viewsets.ModelViewSet): queryset = Quiz.objects.all() serializer_class = QuizSerializer pagination_class = LargeResultsSetPagination authentication_classes = (TokenAuthentication,) permission_classes = (IsAdminUserOrReadOnly,) filter_class = QuizFilter
33.961538
69
0.799547
92
883
7.554348
0.478261
0.046043
0.097842
0.066187
0
0
0
0
0
0
0
0
0.138165
883
25
70
35.32
0.913272
0
0
0
0
0
0.035108
0
0
0
0
0
0
1
0
false
0
0.47619
0
0.904762
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
aa6316bcb4faf828d06c87393e40f312b542878e
226
py
Python
testing/example.py
gndctrl2mjrtm/dkeras
97bcf176dbc96bd52cc6e19283159a6e73f0a4dc
[ "MIT" ]
195
2019-09-05T08:41:53.000Z
2022-03-09T14:04:07.000Z
testing/example.py
gndctrl2mjrtm/dkeras
97bcf176dbc96bd52cc6e19283159a6e73f0a4dc
[ "MIT" ]
4
2019-11-27T00:45:31.000Z
2020-03-11T14:12:44.000Z
testing/example.py
gndctrl2mjrtm/dkeras
97bcf176dbc96bd52cc6e19283159a6e73f0a4dc
[ "MIT" ]
14
2019-11-27T17:32:36.000Z
2022-02-12T03:55:15.000Z
# Ideas """ server = dkeras.DataServer() model1.link(model3) model1.postprocess = lambda z: np.float16(z) server = model1 + model2 + model3 server.add(camera1, dest=('m1', 'm2')) server.add_address('192.168.1.42') """
12.555556
44
0.668142
31
226
4.83871
0.741935
0.12
0
0
0
0
0
0
0
0
0
0.103093
0.141593
226
17
45
13.294118
0.670103
0.942478
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
aa6700aa443cee4ea5ff2c63f4018e63ec72bd19
377
py
Python
tests/test_operations.py
LadaOndris/text_classification
3f9e60f0a40f2817f081adf53cda696a8b0f51b2
[ "MIT" ]
null
null
null
tests/test_operations.py
LadaOndris/text_classification
3f9e60f0a40f2817f081adf53cda696a8b0f51b2
[ "MIT" ]
null
null
null
tests/test_operations.py
LadaOndris/text_classification
3f9e60f0a40f2817f081adf53cda696a8b0f51b2
[ "MIT" ]
null
null
null
from unittest import TestCase import torch from src.transformer.operations import softmax class Test(TestCase): def test_softmax(self): data = torch.ones(size=[16, 8, 4]) expected_output = torch.full_like(data, fill_value=0.25, dtype=torch.float32) output = softmax(data, dim=-1) torch.testing.assert_allclose(output, expected_output)
23.5625
85
0.708223
51
377
5.117647
0.666667
0.10728
0
0
0
0
0
0
0
0
0
0.032787
0.190981
377
15
86
25.133333
0.822951
0
0
0
0
0
0
0
0
0
0
0
0.111111
1
0.111111
false
0
0.333333
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
aa72a99c59272a2127c464ed1720defc3e659e92
242
py
Python
setup.py
tulcod/PyBRML
55357ef60e394d7146af037dbfcab97f7b29d44b
[ "CNRI-Python" ]
1
2015-03-20T04:05:50.000Z
2015-03-20T04:05:50.000Z
setup.py
tulcod/PyBRML
55357ef60e394d7146af037dbfcab97f7b29d44b
[ "CNRI-Python" ]
null
null
null
setup.py
tulcod/PyBRML
55357ef60e394d7146af037dbfcab97f7b29d44b
[ "CNRI-Python" ]
null
null
null
#!/usr/bin/env python from distutils.core import setup setup(name='brml', version='0', description='Python version of Bayesian Reasoning and Machine Learning library', packages=['brml'], install_requires=['numpy'] )
22
86
0.681818
29
242
5.655172
0.862069
0
0
0
0
0
0
0
0
0
0
0.005102
0.190083
242
10
87
24.2
0.831633
0.082645
0
0
0
0
0.357466
0
0
0
0
0
0
1
0
true
0
0.142857
0
0.142857
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
aa7980500283639d4b4935edd9b6a3a29a849d13
1,847
py
Python
spug_api/apps/schedule/models.py
zysam/spug
77f3427e2b817dad3a4a37f8e45eba0dd667a858
[ "MIT" ]
3
2019-06-07T03:26:33.000Z
2021-12-28T01:51:30.000Z
spug_api/apps/schedule/models.py
markexin/spug
3dce00995ebb4196a0c75bef0f6e50612a98e610
[ "MIT" ]
10
2021-03-19T10:59:00.000Z
2022-02-27T05:10:31.000Z
spug_api/apps/schedule/models.py
lgq9220/spug
83098fbd1d5780f827a4aca41eb87321f3d2ab21
[ "MIT" ]
1
2020-04-22T14:37:44.000Z
2020-04-22T14:37:44.000Z
# Copyright: (c) OpenSpug Organization. https://github.com/openspug/spug # Copyright: (c) <spug.dev@gmail.com> # Released under the MIT License. from django.db import models from libs import ModelMixin, human_datetime from apps.account.models import User import json class Task(models.Model, ModelMixin): TRIGGERS = ( ('date', '一次性'), ('calendarinterval', '日历间隔'), ('cron', 'UNIX cron'), ('interval', '普通间隔') ) STATUS = ( (0, '成功'), (1, '异常'), (2, '失败'), ) name = models.CharField(max_length=50) type = models.CharField(max_length=50) command = models.TextField() targets = models.TextField() trigger = models.CharField(max_length=20, choices=TRIGGERS) trigger_args = models.CharField(max_length=255) is_active = models.BooleanField(default=False) desc = models.CharField(max_length=255, null=True) latest_status = models.SmallIntegerField(choices=STATUS, null=True) latest_run_time = models.CharField(max_length=20, null=True) latest_output = models.TextField(null=True) created_at = models.CharField(max_length=20, default=human_datetime) created_by = models.ForeignKey(User, models.PROTECT, related_name='+') updated_at = models.CharField(max_length=20, null=True) updated_by = models.ForeignKey(User, models.PROTECT, related_name='+', null=True) def to_dict(self, *args, **kwargs): tmp = super().to_dict(*args, **kwargs) tmp['targets'] = json.loads(self.targets) tmp['latest_status_alias'] = self.get_latest_status_display() if self.trigger == 'cron': tmp['trigger_args'] = json.loads(self.trigger_args) return tmp def __repr__(self): return '<Task %r>' % self.name class Meta: db_table = 'tasks' ordering = ('-id',)
34.849057
85
0.657282
227
1,847
5.189427
0.444934
0.101868
0.122241
0.162988
0.273345
0.16129
0.135823
0.078098
0
0
0
0.014286
0.204115
1,847
52
86
35.519231
0.787075
0.074716
0
0
0
0
0.069795
0
0
0
0
0
0
1
0.046512
false
0
0.093023
0.023256
0.627907
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
aa7c95f1c516c6ad68c0ad6ba4317bc5689246fb
2,741
py
Python
netbox/ipam/api/nested_serializers.py
BrnoPCmaniak/netbox
7b517abdb68a6324950dfd0375861163c7bfff00
[ "Apache-2.0" ]
6
2017-12-01T05:13:39.000Z
2020-01-23T13:04:43.000Z
netbox/ipam/api/nested_serializers.py
BrnoPCmaniak/netbox
7b517abdb68a6324950dfd0375861163c7bfff00
[ "Apache-2.0" ]
8
2021-04-16T01:38:00.000Z
2022-01-04T21:27:27.000Z
netbox/ipam/api/nested_serializers.py
BrnoPCmaniak/netbox
7b517abdb68a6324950dfd0375861163c7bfff00
[ "Apache-2.0" ]
3
2017-11-18T01:28:22.000Z
2018-05-17T14:04:43.000Z
from rest_framework import serializers from ipam.models import Aggregate, IPAddress, Prefix, RIR, Role, VLAN, VLANGroup, VRF from utilities.api import WritableNestedSerializer __all__ = [ 'NestedAggregateSerializer', 'NestedIPAddressSerializer', 'NestedPrefixSerializer', 'NestedRIRSerializer', 'NestedRoleSerializer', 'NestedVLANGroupSerializer', 'NestedVLANSerializer', 'NestedVRFSerializer', ] # # VRFs # class NestedVRFSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vrf-detail') prefix_count = serializers.IntegerField(read_only=True) class Meta: model = VRF fields = ['id', 'url', 'name', 'rd', 'prefix_count'] # # RIRs/aggregates # class NestedRIRSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:rir-detail') aggregate_count = serializers.IntegerField(read_only=True) class Meta: model = RIR fields = ['id', 'url', 'name', 'slug', 'aggregate_count'] class NestedAggregateSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:aggregate-detail') class Meta: model = Aggregate fields = ['id', 'url', 'family', 'prefix'] # # VLANs # class NestedRoleSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:role-detail') prefix_count = serializers.IntegerField(read_only=True) vlan_count = serializers.IntegerField(read_only=True) class Meta: model = Role fields = ['id', 'url', 'name', 'slug', 'prefix_count', 'vlan_count'] class NestedVLANGroupSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlangroup-detail') vlan_count = serializers.IntegerField(read_only=True) class Meta: model = VLANGroup fields = ['id', 'url', 'name', 'slug', 'vlan_count'] class NestedVLANSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:vlan-detail') class Meta: model = VLAN fields = ['id', 'url', 'vid', 'name', 'display_name'] # # Prefixes # class NestedPrefixSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:prefix-detail') class Meta: model = Prefix fields = ['id', 'url', 'family', 'prefix'] # # IP addresses # class NestedIPAddressSerializer(WritableNestedSerializer): url = serializers.HyperlinkedIdentityField(view_name='ipam-api:ipaddress-detail') class Meta: model = IPAddress fields = ['id', 'url', 'family', 'address']
25.858491
85
0.706676
253
2,741
7.541502
0.217391
0.113208
0.159329
0.259958
0.527778
0.473795
0.473795
0.473795
0.1174
0.060797
0
0
0.174024
2,741
105
86
26.104762
0.842756
0.017512
0
0.241379
0
0
0.199851
0.072469
0
0
0
0
0
1
0
false
0
0.051724
0
0.551724
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
aa7d4c1de367f2e8733b853fe165cd1bd3dbe7de
25,764
py
Python
test/test_androiddriver/test_adb.py
beijixing0202/QT4a
03ec513d5820afc37c32158c0d4a723b1e32876a
[ "BSD-3-Clause" ]
null
null
null
test/test_androiddriver/test_adb.py
beijixing0202/QT4a
03ec513d5820afc37c32158c0d4a723b1e32876a
[ "BSD-3-Clause" ]
null
null
null
test/test_androiddriver/test_adb.py
beijixing0202/QT4a
03ec513d5820afc37c32158c0d4a723b1e32876a
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: UTF-8 -*- # # Tencent is pleased to support the open source community by making QTA available. # Copyright (C) 2016THL A29 Limited, a Tencent company. All rights reserved. # Licensed under the BSD 3-Clause License (the "License"); you may not use this # file except in compliance with the License. You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS # OF ANY KIND, either express or implied. See the License for the specific language # governing permissions and limitations under the License. # '''adb模块单元测试 ''' try: from unittest import mock except: import mock import shlex import unittest from qt4a.androiddriver.adb import ADB, LocalADBBackend def mock_run_shell_cmd(cmd_line, root=False, **kwds): args = shlex.split(cmd_line) if args[0] == 'ls': path = args[-1] if '-l' in args: if path == '/data/data': if not root: return '''opendir failed, Permission denied''' else: return '''drwxr-x--x u0_a16 u0_a16 2017-06-20 09:44 com.android.apps.tag drwxr-x--x u0_a1 u0_a1 2017-06-20 09:44 com.android.backupconfirm drwxr-x--x bluetooth bluetooth 2017-06-20 11:07 com.android.bluetooth drwxr-x--x u0_a22 u0_a22 2017-07-21 08:11 com.android.browser drwxr-x--x u0_a25 u0_a25 2017-06-20 09:44 com.android.calculator2 drwxr-x--x u0_a26 u0_a26 2017-06-20 09:45 com.android.calendar drwxr-x--x u0_a27 u0_a27 2017-06-20 09:45 com.android.camera2 drwxr-x--x u0_a28 u0_a28 2017-06-20 09:44 com.android.captiveportallogin drwxr-x--x u0_a4 u0_a4 2017-06-20 09:45 com.android.cellbroadcastreceiver drwxr-x--x u0_a29 u0_a29 2017-06-20 09:44 com.android.certinstaller drwxr-x--x u0_a5 u0_a5 2017-06-20 09:44 com.android.contacts drwxr-x--x u0_a6 u0_a6 2017-06-20 10:02 com.android.defcontainer drwxr-x--x u0_a30 u0_a30 2017-06-20 09:45 com.android.deskclock drwxr-x--x system system 2017-06-20 09:44 com.android.development drwxr-x--x u0_a7 u0_a7 2017-06-20 09:45 com.android.dialer drwxr-x--x u0_a31 u0_a31 2017-06-20 10:50 com.android.documentsui drwxr-x--x u0_a21 u0_a21 2017-06-20 09:44 com.android.dreams.basic drwxr-x--x u0_a48 u0_a48 2017-06-20 09:44 com.android.dreams.phototable drwxr-x--x u0_a33 u0_a33 2017-06-20 09:45 com.android.email drwxr-x--x u0_a34 u0_a34 2017-06-20 09:45 com.android.exchange drwxr-x--x u0_a9 u0_a9 2017-06-20 10:50 com.android.externalstorage drwxr-x--x u0_a35 u0_a35 2017-06-20 09:44 com.android.galaxy4 drwxr-x--x u0_a36 u0_a36 2017-06-20 09:45 com.android.gallery3d drwxr-x--x u0_a37 u0_a37 2017-06-20 09:44 com.android.htmlviewer drwxr-x--x u0_a7 u0_a7 2017-06-20 09:45 com.android.incallui drwxr-x--x system system 2017-06-20 09:44 com.android.inputdevices drwxr-x--x u0_a39 u0_a39 2017-06-22 08:06 com.android.inputmethod.latin drwxr-x--x system system 2017-06-20 10:50 com.android.keychain drwxr-x--x system system 2017-06-20 09:44 com.android.location.fused drwxr-x--x u0_a10 u0_a10 2017-06-20 09:45 com.android.managedprovisioning drwxr-x--x u0_a11 u0_a11 2017-06-20 09:45 com.android.mms drwxr-x--x radio radio 2017-06-20 09:44 com.android.mms.service drwxr-x--x u0_a55 u0_a55 2017-06-20 09:44 com.android.musicvis drwxr-x--x nfc nfc 2017-06-20 09:45 com.android.nfc drwxr-x--x u0_a43 u0_a43 2017-06-20 09:44 com.android.noisefield drwxr-x--x u0_a12 u0_a12 2017-06-20 09:45 com.android.onetimeinitializer drwxr-x--x u0_a45 u0_a45 2017-06-20 09:45 com.android.packageinstaller drwxr-x--x u0_a44 u0_a44 2017-06-20 09:44 com.android.pacprocessor drwxr-x--x u0_a46 u0_a46 2017-06-20 09:44 com.android.phasebeam drwxr-x--x radio radio 2017-06-20 09:45 com.android.phone drwxr-x--x u0_a50 u0_a50 2017-06-20 09:45 com.android.printspooler drwxr-x--x u0_a3 u0_a3 2017-06-20 09:45 com.android.providers.calendar drwxr-x--x u0_a5 u0_a5 2017-06-20 09:45 com.android.providers.contacts drwxr-x--x u0_a8 u0_a8 2017-06-20 12:16 com.android.providers.downloads drwxr-x--x u0_a8 u0_a8 2017-06-20 09:44 com.android.providers.downloads.ui drwxr-x--x u0_a8 u0_a8 2017-06-20 09:45 com.android.providers.media drwxr-x--x system system 2017-06-20 09:44 com.android.providers.settings drwxr-x--x radio radio 2017-06-20 09:49 com.android.providers.telephony drwxr-x--x u0_a5 u0_a5 2017-06-20 09:49 com.android.providers.userdictionary drwxr-x--x u0_a52 u0_a52 2017-06-20 09:46 com.android.provision drwxr-x--x u0_a13 u0_a13 2017-06-20 09:44 com.android.proxyhandler drwxr-x--x system system 2017-06-20 09:45 com.android.server.telecom drwxr-x--x system system 2017-07-13 16:36 com.android.settings drwxr-x--x u0_a14 u0_a14 2017-06-20 09:44 com.android.sharedstoragebackup drwxr-x--x shell shell 2017-06-20 09:44 com.android.shell drwxr-x--x u0_a56 u0_a56 2017-06-20 09:45 com.android.smspush drwxr-x--x u0_a53 u0_a53 2017-06-20 09:44 com.android.soundrecorder drwxr-x--x radio radio 2017-06-20 09:44 com.android.stk drwxr-x--x u0_a15 u0_a15 2017-07-04 06:49 com.android.systemui drwxr-x--x u0_a54 u0_a54 2017-06-20 09:44 com.android.terminal drwxr-x--x u0_a19 u0_a19 2017-06-20 09:44 com.android.vpndialogs drwxr-x--x u0_a40 u0_a40 2017-06-20 09:44 com.android.wallpaper drwxr-x--x u0_a38 u0_a38 2017-06-20 09:44 com.android.wallpaper.holospiral drwxr-x--x u0_a41 u0_a41 2017-06-20 09:44 com.android.wallpaper.livepicker drwxr-x--x u0_a20 u0_a20 2017-06-20 09:44 com.android.wallpapercropper drwxr-x--x u0_a57 u0_a57 2017-06-20 09:44 com.android.webview drwxr-x--x u0_a32 u0_a32 2017-06-20 09:45 com.cyanogenmod.eleven drwxr-x--x u0_a23 u0_a23 2017-06-20 09:44 com.cyanogenmod.filemanager drwxr-x--x u0_a42 u0_a42 2017-06-20 09:45 com.cyanogenmod.lockclock drwxr-x--x radio radio 2017-06-20 09:44 com.cyanogenmod.samsungservicemode drwxr-x--x system system 2017-06-20 09:45 com.cyanogenmod.setupwizard drwxr-x--x u0_a18 u0_a18 2017-06-20 09:46 com.cyanogenmod.trebuchet drwxr-x--x u0_a2 u0_a2 2017-06-20 09:45 com.cyanogenmod.updater drwxr-x--x u0_a24 u0_a24 2017-06-20 09:44 com.cyanogenmod.wallpapers drwxr-x--x system system 2017-06-20 09:44 com.dsi.ant.server drwxr-x--x u0_a49 u0_a49 2017-06-20 09:45 com.svox.pico drwxr-x--x u0_a595 u0_a595 2018-10-17 12:59 com.tencent.liveassistant drwxr-x--x u0_a1309 u0_a1309 2018-10-17 16:32 com.tencent.mobileqq drwxr-x--x u0_a1308 u0_a1308 2018-10-17 17:32 com.tencent.nijigen drwxr-x--x u0_a234 u0_a234 2017-08-23 21:14 com.test.androidspy drwxr-x--x system system 2017-06-20 09:44 cyanogenmod.platform drwxr-x--x u0_a58 u0_a58 2017-06-20 10:01 eu.chainfire.supersu drwxr-x--x u0_a0 u0_a0 2017-06-20 09:45 org.cyanogenmod.audiofx drwxr-x--x shell shell 2017-06-20 09:45 org.cyanogenmod.bugreport drwxr-x--x system system 2017-06-20 09:45 org.cyanogenmod.cmsettings drwxr-x--x u0_a51 u0_a51 2017-06-20 09:45 org.cyanogenmod.profiles drwxr-x--x u0_a17 u0_a17 2018-10-17 17:03 org.cyanogenmod.theme.chooser drwxr-x--x u0_a17 u0_a17 2017-06-20 09:45 org.cyanogenmod.themes.provider drwxr-x--x u0_a47 u0_a47 2017-06-20 09:44 org.cyanogenmod.wallpapers.photophase''' elif path == '/data/local/tmp/1.txt': return '-rw-rw-rw- root root 4096000 2018-03-23 06:12 1.txt\n' else: raise NotImplementedError(path) elif args[0] == 'getprop': if args[1] == 'ro.build.version.sdk': return '21' elif args[1] == 'ro.build.version.release': return '5.0.2' elif args[1] == 'ro.product.cpu.abi': return 'armeabi-v7a' elif args[1] == 'ro.product.model': return 'MI 4C' elif args[1] == 'ro.product.brand': return 'Xiaomi' elif args[1] == 'ro.sf.lcd_density': return '320' elif args[1] == 'ro.kernel.android.qemud': return '0' elif args[1] == 'ro.secure': return '1' elif args[1] == 'ro.debuggable': return '0' else: raise NotImplementedError('Not supported property: %s' % args[1]) elif args[0] == 'id': return 'uid=2000(shell) gid=2000(shell) groups=1004(input),1007(log),1011(adb),1015(sdcard_rw),1028(sdcard_r),3001(net_bt_admin),3002(net_bt),3003(inet),3006(net_bw_stats) context=u:r:shell:s0' elif args[0] == 'pm': if args[1] == 'path': if len(args) >= 3: return 'package:/data/app/%s-1/base.apk\n' % args[2] else: return 'Error: no package specified\n' elif args[0] == 'run-as': if args[2] == 'id': return 'uid=10059(u0_a59) gid=10059(u0_a59) groups=1003(graphics),1004(input),1007(log),1011(adb),1015(sdcard_rw),1028(sdcard_r),3001(net_bt_admin),3002(net_bt),3003(inet),3006(net_bw_stats)\n' else: raise NotImplementedError(args[2]) elif args[0] == 'dumpsys': if args[1] == 'iphonesubinfo': return ''' Phone Subscriber Info: Phone Type = CDMA Device ID = 99000567737777 ''' elif args[0] == 'getenforce': return 'Disabled' elif args[0] == 'ps': return ''' USER PID PPID VSIZE RSS WCHAN PC NAME root 1 0 8908 736 ffffffff 00000000 S /init root 2 0 0 0 ffffffff 00000000 S kthreadd root 3 2 0 0 ffffffff 00000000 S ksoftirqd/0 root 6 2 0 0 ffffffff 00000000 S migration/0 root 7 2 0 0 ffffffff 00000000 S watchdog/0 root 20 2 0 0 ffffffff 00000000 S khelper root 480 2 0 0 ffffffff 00000000 S sync_supers root 482 2 0 0 ffffffff 00000000 S bdi-default root 484 2 0 0 ffffffff 00000000 S kblockd root 499 2 0 0 ffffffff 00000000 S spi2 root 502 2 0 0 ffffffff 00000000 S spi3 root 510 2 0 0 ffffffff 00000000 S khubd root 528 2 0 0 ffffffff 00000000 S irq/524-max7780 root 555 2 0 0 ffffffff 00000000 S irq/519-sec-pmi root 640 2 0 0 ffffffff 00000000 S cfg80211 root 743 2 0 0 ffffffff 00000000 S khungtaskd root 744 2 0 0 ffffffff 00000000 S kswapd0 root 745 2 0 0 ffffffff 00000000 S ksmd root 793 2 0 0 ffffffff 00000000 S fsnotify_mark root 815 2 0 0 ffffffff 00000000 S ecryptfs-kthrea root 822 2 0 0 ffffffff 00000000 S crypto root 926 2 0 0 ffffffff 00000000 S pvr_timer root 1086 2 0 0 ffffffff 00000000 S irq/533-arizona root 1110 2 0 0 ffffffff 00000000 S drd_switch root 1147 2 0 0 ffffffff 00000000 S f_mtp root 1153 2 0 0 ffffffff 00000000 S file-storage root 1170 2 0 0 ffffffff 00000000 S f54_status_work root 1171 2 0 0 ffffffff 00000000 S irq/526-synapti root 1208 2 0 0 ffffffff 00000000 S gsc0_irq_wq_nam root 1212 2 0 0 ffffffff 00000000 S gsc1_irq_wq_nam root 1216 2 0 0 ffffffff 00000000 S gsc2_irq_wq_nam root 1220 2 0 0 ffffffff 00000000 S gsc3_irq_wq_nam root 1227 2 0 0 ffffffff 00000000 S kfimg2dd root 1230 2 0 0 ffffffff 00000000 S irq/128-s5p-mfc root 1235 2 0 0 ffffffff 00000000 S s5p_mfc/watchdo root 1236 2 0 0 ffffffff 00000000 S s5p_mfc/sched root 1241 2 0 0 ffffffff 00000000 S khdcpd root 1244 2 0 0 ffffffff 00000000 S hdmi-mixer root 1268 2 0 0 ffffffff 00000000 S sii8240-cmdwq root 1269 2 0 0 ffffffff 00000000 S sii8240-aviwq root 1270 2 0 0 ffffffff 00000000 S irq/559-sii8240 root 1292 2 0 0 ffffffff 00000000 S dw-mci-card root 1294 2 0 0 ffffffff 00000000 S dw-mci-card root 1296 2 0 0 ffffffff 00000000 S dw-mci-card root 1298 2 0 0 ffffffff 00000000 S irq/532-dw_mmc. root 1373 2 0 0 ffffffff 00000000 S binder root 1386 2 0 0 ffffffff 00000000 S irq/525-fuelgau root 1391 2 0 0 ffffffff 00000000 S mmcqd/0 root 1406 2 0 0 ffffffff 00000000 S max77803-charge root 1417 2 0 0 ffffffff 00000000 S irq/531-wpc-int root 1419 2 0 0 ffffffff 00000000 S hap_work root 1443 2 0 0 ffffffff 00000000 S ssp_debug_wq root 1444 2 0 0 ffffffff 00000000 S irq/539-SSP_Int root 1458 2 0 0 ffffffff 00000000 S ssp_sensorhub_t root 1460 2 0 0 ffffffff 00000000 S mc_fastcall root 1514 2 0 0 ffffffff 00000000 S kswitcher_0 root 1515 2 0 0 ffffffff 00000000 S kswitcher_3 root 1516 2 0 0 ffffffff 00000000 S kswitcher_2 root 1517 2 0 0 ffffffff 00000000 S kswitcher_1 root 1519 2 0 0 ffffffff 00000000 S usb_tx_wq root 1520 2 0 0 ffffffff 00000000 S usb_rx_wq root 1522 2 0 0 ffffffff 00000000 S linkpmd root 1557 2 0 0 ffffffff 00000000 S s3c-fb root 1579 2 0 0 ffffffff 00000000 S s3c-fb-vsync root 1583 2 0 0 ffffffff 00000000 S vsync_workqueue root 1584 2 0 0 ffffffff 00000000 S deferwq root 1587 2 0 0 ffffffff 00000000 S irq/513-flip_co root 1593 2 0 0 ffffffff 00000000 S irq/547-sec_tou root 1595 2 0 0 ffffffff 00000000 S ondemand_wq root 1602 2 0 0 ffffffff 00000000 S devfreq_wq root 1607 2 0 0 ffffffff 00000000 S sec-battery root 1620 2 0 0 ffffffff 00000000 S barcode_init root 1635 2 0 0 ffffffff 00000000 S wl_event_handle root 1642 2 0 0 ffffffff 00000000 S dhd_watchdog_th root 1643 2 0 0 ffffffff 00000000 S dhd_dpc root 1644 2 0 0 ffffffff 00000000 S dhd_rxf root 1650 1 8904 516 ffffffff 00000000 S /sbin/ueventd u0_a17 1903 2598 1574760 86756 ffffffff 00000000 S org.cyanogenmod.theme.chooser root 2527 2 0 0 ffffffff 00000000 S jbd2/mmcblk0p20 root 2528 2 0 0 ffffffff 00000000 S ext4-dio-unwrit root 2532 2 0 0 ffffffff 00000000 S flush-179:0 root 2542 2 0 0 ffffffff 00000000 S jbd2/mmcblk0p19 root 2543 2 0 0 ffffffff 00000000 S ext4-dio-unwrit root 2547 2 0 0 ffffffff 00000000 S jbd2/mmcblk0p21 root 2548 2 0 0 ffffffff 00000000 S ext4-dio-unwrit root 2552 2 0 0 ffffffff 00000000 S jbd2/mmcblk0p3- root 2553 2 0 0 ffffffff 00000000 S ext4-dio-unwrit logd 2559 1 18228 3512 ffffffff 00000000 S /system/bin/logd root 2560 1 9844 356 ffffffff 00000000 S /sbin/healthd root 2561 1 10556 1192 ffffffff 00000000 S /system/bin/lmkd system 2562 1 9516 728 ffffffff 00000000 S /system/bin/servicemanager root 2563 1 17948 1820 ffffffff 00000000 S /system/bin/vold system 2564 1 145896 20344 ffffffff 00000000 S /system/bin/surfaceflinger root 2565 1 8900 264 ffffffff 00000000 S /sbin/watchdogd root 2571 2 0 0 ffffffff 00000000 S pvr_workqueue root 2576 2 0 0 ffffffff 00000000 S kauditd shell 2577 1 9356 752 c026a6bc b6f6db90 S /system/bin/sh audit 2579 1 9264 612 ffffffff 00000000 S /system/bin/auditd root 2580 1 22804 1592 ffffffff 00000000 S /system/bin/netd root 2581 1 10104 1300 ffffffff 00000000 S /system/bin/debuggerd radio 2583 1 28560 7228 ffffffff 00000000 S /system/bin/rild drm 2584 1 25916 4144 ffffffff 00000000 S /system/bin/drmserver media 2585 1 170652 16392 ffffffff 00000000 S /system/bin/mediaserver install 2586 1 9424 756 ffffffff 00000000 S /system/bin/installd keystore 2588 1 12476 1820 ffffffff 00000000 S /system/bin/keystore radio 2590 1 15216 156 ffffffff 00000000 S /system/bin/cbd drmrpc 2591 1 17680 916 ffffffff 00000000 S /system/bin/mcDriverDaemon root 2598 1 1486896 48780 ffffffff 00000000 S zygote media_rw 2601 1 15412 2036 ffffffff 00000000 S /system/bin/sdcard shell 2606 1 18008 616 ffffffff 00000000 S /sbin/adbd root 2611 2 0 0 ffffffff 00000000 S mc_log root 2828 1 9352 476 ffffffff 00000000 S daemonsu:mount:master root 2856 1 15496 3288 ffffffff 00000000 S daemonsu:master system 3021 2598 1707492 134532 ffffffff 00000000 S system_server u0_a15 3136 2598 1620628 110372 ffffffff 00000000 S com.android.systemui u0_a8 3157 2598 1520468 48044 ffffffff 00000000 S android.process.media nfc 3193 2598 1531524 46308 ffffffff 00000000 S com.android.nfc u0_a234 3463 2598 1535956 61340 ffffffff 00000000 S com.test.androidspy u0_a42 3496 2598 1504436 41000 ffffffff 00000000 S com.cyanogenmod.lockclock dhcp 3602 1 9356 760 ffffffff 00000000 S /system/bin/dhcpcd u0_a0 3613 2598 1498792 38644 ffffffff 00000000 S org.cyanogenmod.audiofx u0_a7 3634 2598 1495288 35708 ffffffff 00000000 S com.android.incallui radio 3654 2598 1526784 58864 ffffffff 00000000 S com.android.phone u0_a18 3677 2598 1578640 90756 ffffffff 00000000 S com.cyanogenmod.trebuchet u0_a234 3780 2598 1505824 47708 ffffffff 00000000 S com.test.androidspy:acc_service u0_a56 3796 2598 1495256 34884 ffffffff 00000000 S com.android.smspush root 3970 2856 15496 3460 ffffffff 00000000 S daemonsu:0 gps 4358 1 29864 5092 ffffffff 00000000 S /system/bin/gpsd bluetooth 4511 2598 1539260 49928 ffffffff 00000000 S com.android.bluetooth root 4803 2856 16520 664 ffffffff 00000000 S daemonsu:10058 u0_a33 5075 2598 1516548 40680 ffffffff 00000000 S com.android.email u0_a595 5480 2598 1574788 59188 ffffffff 00000000 S com.tencent.liveassistant:wns u0_a2 7572 2598 1502488 43776 ffffffff 00000000 S com.cyanogenmod.updater u0_a30 7848 2598 1499776 38628 ffffffff 00000000 S com.android.deskclock root 15093 2 0 0 ffffffff 00000000 S kworker/u:1 u0_a58 16041 2598 1499012 37692 ffffffff 00000000 S eu.chainfire.supersu system 16094 2598 1526292 46296 ffffffff 00000000 S com.android.settings root 16247 2 0 0 ffffffff 00000000 S kworker/u:0 shell 16511 2606 9356 772 c026a6bc b6f61b90 S /system/bin/sh root 16547 2 0 0 ffffffff 00000000 S kworker/u:2 root 17402 2 0 0 ffffffff 00000000 S kworker/0:2 u0_a3 18045 2598 1497536 41128 ffffffff 00000000 S com.android.providers.calendar u0_a26 18080 2598 1504928 41824 ffffffff 00000000 S com.android.calendar u0_a595 18162 2598 1565540 63360 ffffffff 00000000 S com.tencent.liveassistant:web wifi 20194 1 12504 2652 ffffffff 00000000 S /system/bin/wpa_supplicant root 21966 2 0 0 ffffffff 00000000 S kworker/u:3 root 22006 2 0 0 ffffffff 00000000 S kworker/0:1 shell 22889 2598 1495600 34984 ffffffff b6e6f290 S org.cyanogenmod.bugreport root 23590 2 0 0 ffffffff 00000000 S kworker/0:0 root 23706 2 0 0 ffffffff 00000000 S migration/1 root 23709 2 0 0 ffffffff 00000000 S kworker/1:0 root 23710 2 0 0 ffffffff 00000000 S ksoftirqd/1 root 23711 2 0 0 ffffffff 00000000 S watchdog/1 root 23712 2 0 0 ffffffff 00000000 S migration/2 root 23713 2 0 0 ffffffff 00000000 S kworker/2:0 root 23714 2 0 0 ffffffff 00000000 S ksoftirqd/2 root 23715 2 0 0 ffffffff 00000000 S watchdog/2 root 23716 2 0 0 ffffffff 00000000 S migration/3 root 23718 2 0 0 ffffffff 00000000 S kworker/3:0 root 23719 2 0 0 ffffffff 00000000 S ksoftirqd/3 root 23720 2 0 0 ffffffff 00000000 S watchdog/3 root 23721 2 0 0 ffffffff 00000000 S kworker/3:1 root 23722 2 0 0 ffffffff 00000000 S kworker/1:1 root 23730 2 0 0 ffffffff 00000000 S kworker/2:1 shell 23740 2606 10640 968 00000000 b6f42b90 R ps u0_a1308 23911 2598 2173820 273648 ffffffff 00000000 S com.tencent.nijigen root 23940 1 1373476 35712 ffffffff 00000000 S com.test.androidspy:service u0_a1308 23957 2598 1608592 78264 ffffffff 00000000 S com.tencent.nijigen:wns u0_a1308 24145 2598 1786384 151352 ffffffff 00000000 S com.tencent.nijigen:QALSERVICE u0_a1308 24517 2598 1588632 72300 ffffffff 00000000 S com.tencent.nijigen:xg_service_v3 u0_a1308 24558 2598 1634068 86392 ffffffff 00000000 S com.tencent.nijigen:picker u0_a1308 24887 1 9272 448 ffffffff 00000000 S /data/data/com.tencent.nijigen/lib/libxguardian.so ''' else: raise NotImplementedError('Not supported command: %s' % cmd_line) class TestADB(unittest.TestCase): '''ADB类测试用例 ''' def test_get_cpu_abi(self): for arch in ['armeabi-v7a', 'x86']: ADB.run_shell_cmd = mock.Mock(return_value=arch) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) self.assertEqual(adb.get_cpu_abi(), arch) def test_get_sdk_version(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) self.assertEqual(adb.get_sdk_version(), 21) def test_list_process(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) result = adb.list_process() self.assertEqual(len(result), 172) self.assertEqual(result[0]['ppid'], 0) self.assertEqual(result[0]['pid'], 1) self.assertEqual(result[0]['proc_name'], '/init') def test_get_pid(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) self.assertEqual(adb.get_pid('android.process.media'), 3157) def test_get_device_imei(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) ADB.is_rooted = mock.Mock(return_value=False) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) self.assertEqual(adb.get_device_imei(), '99000567737777') def test_get_device_model(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) ADB.is_rooted = mock.Mock(return_value=False) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) self.assertEqual(adb.get_device_model(), 'Xiaomi MI 4C') def test_get_system_version(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) ADB.is_rooted = mock.Mock(return_value=False) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) self.assertEqual(adb.get_system_version(), '5.0.2') def test_get_uid(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) self.assertEqual(adb.get_uid('com.tencent.mobileqq'), 'u0_a1309') def test_list_dir(self): ADB.run_shell_cmd = mock.Mock(side_effect=mock_run_shell_cmd) ADB.is_rooted = mock.Mock(return_value=True) adb_backend = LocalADBBackend('127.0.0.1', '') adb = ADB(adb_backend) dir_list, file_list = adb.list_dir('/data/data') self.assertEqual(len(dir_list), 89) self.assertEqual(len(file_list), 0) self.assertEqual(dir_list[0]['name'], 'com.android.apps.tag') self.assertEqual(dir_list[0]['attr'], 'rwxr-x--x') if __name__ == '__main__': unittest.main()
60.196262
205
0.63581
4,057
25,764
3.946019
0.222332
0.167906
0.1784
0.122556
0.4891
0.414579
0.334499
0.194828
0.145668
0.136861
0
0.284077
0.292113
25,764
427
206
60.337237
0.593705
0.027907
0
0.104859
0
0.063939
0.820144
0.108114
0
0
0
0
0.038363
1
0.025575
false
0
0.012788
0
0.089514
0.002558
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
aa9279905e721169dbb3a05ff58bde8698b71dc2
253
py
Python
students/k3343/laboratory_works/Andreeva_Ekaterina/laboratory_work_1/hotels/urls.py
TonikX/ITMO_ICT_-WebProgramming_2020
ba566c1b3ab04585665c69860b713741906935a0
[ "MIT" ]
10
2020-03-20T09:06:12.000Z
2021-07-27T13:06:02.000Z
students/k3343/laboratory_works/Andreeva_Ekaterina/laboratory_work_1/hotels/urls.py
TonikX/ITMO_ICT_-WebProgramming_2020
ba566c1b3ab04585665c69860b713741906935a0
[ "MIT" ]
134
2020-03-23T09:47:48.000Z
2022-03-12T01:05:19.000Z
students/k3343/laboratory_works/Andreeva_Ekaterina/laboratory_work_1/hotels/urls.py
TonikX/ITMO_ICT_-WebProgramming_2020
ba566c1b3ab04585665c69860b713741906935a0
[ "MIT" ]
71
2020-03-20T12:45:56.000Z
2021-10-31T19:22:25.000Z
from django.urls import path from django.contrib.auth import views as view from . import views urlpatterns = [ path('', views.show_hotels, name='all_hotels'), path('single_hotel/<int:pk>', views.show_hotel_single, name="single_hotel"), ]
31.625
81
0.715415
36
253
4.861111
0.527778
0.114286
0
0
0
0
0
0
0
0
0
0
0.15415
253
8
82
31.625
0.817757
0
0
0
0
0
0.174089
0.08502
0
0
0
0
0
1
0
false
0
0.428571
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
aa92e5eaa42bad8cb0c78e20095787f73c918ef0
2,090
py
Python
theme2html.py
persquare/NorthernLights
53a53e181822c907a7fea7ff618cf10a412ede81
[ "MIT" ]
null
null
null
theme2html.py
persquare/NorthernLights
53a53e181822c907a7fea7ff618cf10a412ede81
[ "MIT" ]
null
null
null
theme2html.py
persquare/NorthernLights
53a53e181822c907a7fea7ff618cf10a412ede81
[ "MIT" ]
null
null
null
#!/usr/bin/env python class ThemePage(object): """docstring for ClassName""" def __init__(self): super(ThemePage, self).__init__() self.scopes = [] self.sort_key = 'scope' def gen_header(self, line): pass def parse_file(self, file): lines = [line.strip() for line in open(file)]; for line in lines: if line[0] == 'H': page.gen_header(line) if line[0] == 'M': page.gen_main(line) if line[0] == 'S': page.gen_scope(line) def gen_main(self, line): values = [x.lstrip() for x in line.split(',')] self.props = dict(zip(['bgcolor', 'fgcolor', 'caret', 'selection', 'invisibles', 'linehl'], values[1:])) def gen_scope(self, line): values = [x.lstrip() for x in line.split(',')] scope = dict(zip(['name', 'bgcolor', 'fgcolor', 'style', 'scope'], values[1:])) self.scopes.append(scope) def scope_to_css(self, scope): # background-color: res = "" if scope['fgcolor'][0] == '#': # FIXME: transparency in scope['fgcolor'][7:9] res = "color:%s;" % (scope['fgcolor'][0:7]) if scope['bgcolor'][0] == '#': # FIXME: transparency in scope['bgcolor'][7:9] res = res + "background-color:%s;" % (scope['bgcolor'][0:7]) if res != "": return 'style ="%s"' % res else: return '' def render(self): self.preamble() print "<p>default</p>" for scope in self.scopes: print '<p %s>%s</p>' % (self.scope_to_css(scope), scope['scope']) self.postamble() def preamble(self): print ''' <!DOCTYPE html> <html> <style> body {color:%s;} </style> <body style="background-color:%s;"> ''' % (self.props['fgcolor'], self.props['bgcolor']) def postamble(self): print ''' </body> </html> ''' page = ThemePage() page.parse_file('NorthernLights.tmcsv') page.render()
28.243243
113
0.505263
243
2,090
4.259259
0.304527
0.023188
0.02029
0.021256
0.117874
0.069565
0.069565
0.069565
0.069565
0.069565
0
0.010431
0.311962
2,090
73
114
28.630137
0.709319
0.061244
0
0.071429
0
0
0.182478
0.015034
0
0
0
0.013699
0
0
null
null
0.017857
0
null
null
0.071429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
2
aa9d694c302395d28e118ef790ac8af16b2cd010
142
py
Python
Mundos/Mundo 1/Aulas/Aula 7/ex012.py
NicolasdeLimaAlves/Curso-de-Python
4987a2c8075a76f676aa69bfd968fdf8d1c7fa52
[ "MIT" ]
null
null
null
Mundos/Mundo 1/Aulas/Aula 7/ex012.py
NicolasdeLimaAlves/Curso-de-Python
4987a2c8075a76f676aa69bfd968fdf8d1c7fa52
[ "MIT" ]
null
null
null
Mundos/Mundo 1/Aulas/Aula 7/ex012.py
NicolasdeLimaAlves/Curso-de-Python
4987a2c8075a76f676aa69bfd968fdf8d1c7fa52
[ "MIT" ]
null
null
null
pro = float(input('Preço do produto: R$')) des = pro - (pro * 5 / 100) print('O produto que custava {:.2f} vai custar {:.2f}'.format(pro,des))
47.333333
71
0.626761
24
142
3.708333
0.75
0
0
0
0
0
0
0
0
0
0
0.05
0.15493
142
3
71
47.333333
0.691667
0
0
0
0
0
0.461538
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
aaa2953459ea19d3ffcaec38eab5d956ae6ce6b7
1,784
py
Python
bluebird/utils/timeutils.py
rkm/bluebird
2325ebb151724d4444c092c095a040d7365dda79
[ "MIT" ]
8
2019-01-29T15:19:39.000Z
2020-07-16T03:55:36.000Z
bluebird/utils/timeutils.py
rkm/bluebird
2325ebb151724d4444c092c095a040d7365dda79
[ "MIT" ]
46
2019-02-08T14:23:11.000Z
2021-04-06T13:45:10.000Z
bluebird/utils/timeutils.py
rkm/bluebird
2325ebb151724d4444c092c095a040d7365dda79
[ "MIT" ]
3
2019-05-06T14:18:07.000Z
2021-06-17T10:39:59.000Z
""" Contains utility functions for dates and times """ import datetime import logging import time from bluebird.settings import Settings _LOGGER = logging.getLogger(__name__) DEFAULT_LIFETIME = datetime.timedelta(seconds=10) def before(date_time): """ Check if the given time has passed (i.e. before now()) :param date_time: The datetime to test :return: If the given datetime is in the past """ return now() < date_time def now(): """ Returns the current datetime in UTC :return: DateTime """ return datetime.datetime.utcnow() def wait_until(condition, *args, interval=0.1, timeout=1): """ Sleeps until the given condition is met :param condition: The method to check the condition on :param args: Any arguments to pass to the method :param interval: The rate in seconds at which the condition is checked :param timeout: The maximum amount of time in seconds to wait for the condition to be met """ start = time.time() while not condition(*args) and time.time() - start < timeout: time.sleep(interval) def log_rate(sim_speed): """ Calculate the log rate for a given sim speed :param sim_speed: :return: """ return round(Settings.SIM_LOG_RATE * sim_speed, 2) def timeit(prefix): """ Decorator which logs the execution time of the wrapped method :param prefix: :return: """ def wrap(func): def wrapped_func(*args, **kwargs): start = time.time() res = func(*args, **kwargs) _LOGGER.debug( f"Method {prefix}.{func.__name__} took {time.time()-start:.2f}s to " "execute" ) return res return wrapped_func return wrap
22.3
86
0.63565
236
1,784
4.707627
0.402542
0.028803
0.018002
0.027003
0
0
0
0
0
0
0
0.005385
0.2713
1,784
79
87
22.582278
0.849231
0.394058
0
0.071429
0
0
0.075789
0.050526
0
0
0
0
0
1
0.25
false
0
0.142857
0
0.607143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
aaba5fe05760820c044c48d4954dc54b0987c3fb
896
py
Python
semantic-conventions/src/tests/conftest.py
ekmixon/build-tools
ea3b32830bcf270c8cede6a320c200988501f92a
[ "Apache-2.0" ]
17
2020-07-29T17:27:04.000Z
2021-11-28T14:11:38.000Z
semantic-conventions/src/tests/conftest.py
ekmixon/build-tools
ea3b32830bcf270c8cede6a320c200988501f92a
[ "Apache-2.0" ]
58
2020-07-29T18:11:59.000Z
2022-03-30T09:39:25.000Z
semantic-conventions/src/tests/conftest.py
ekmixon/build-tools
ea3b32830bcf270c8cede6a320c200988501f92a
[ "Apache-2.0" ]
23
2020-07-29T17:27:07.000Z
2022-02-23T18:14:50.000Z
import os import pytest from ruamel.yaml import YAML _TEST_DIR = os.path.dirname(__file__) # Fixtures in pytest work with reused outer names, so shut up pylint here. # pylint:disable=redefined-outer-name @pytest.fixture def test_file_path(): def loader(*path): return os.path.join(_TEST_DIR, "data", *path) return loader @pytest.fixture def open_test_file(test_file_path): def loader(*path): return open(test_file_path(*path), "r", encoding="utf-8") return loader @pytest.fixture def load_yaml(open_test_file): def loader(filename): with open_test_file(os.path.join("yaml", filename)) as yaml_file: return YAML().load(yaml_file) return loader @pytest.fixture def read_test_file(open_test_file): def reader(*path): with open_test_file(*path) as test_file: return test_file.read() return reader
20.363636
74
0.696429
132
896
4.492424
0.318182
0.148398
0.121417
0.126476
0.246206
0.104553
0.104553
0
0
0
0
0.001395
0.199777
896
43
75
20.837209
0.825662
0.120536
0
0.346154
0
0
0.017834
0
0
0
0
0
0
1
0.307692
false
0
0.115385
0.076923
0.730769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
aabc49eaf524285405a06ba58d7418ed819ae534
475
py
Python
MotunrayoKoyejo/Phase 1/Python Basic 1/Day9/Q73.py
CodedLadiesInnovateTech/-python-challenge-solutions
430cd3eb84a2905a286819eef384ee484d8eb9e7
[ "MIT" ]
6
2020-05-23T19:53:25.000Z
2021-05-08T20:21:30.000Z
MotunrayoKoyejo/Phase 1/Python Basic 1/Day9/Q73.py
CodedLadiesInnovateTech/-python-challenge-solutions
430cd3eb84a2905a286819eef384ee484d8eb9e7
[ "MIT" ]
8
2020-05-14T18:53:12.000Z
2020-07-03T00:06:20.000Z
MotunrayoKoyejo/Phase 1/Python Basic 1/Day9/Q73.py
CodedLadiesInnovateTech/-python-challenge-solutions
430cd3eb84a2905a286819eef384ee484d8eb9e7
[ "MIT" ]
39
2020-05-10T20:55:02.000Z
2020-09-12T17:40:59.000Z
print('\nCalculate the midpoint of a line :') x1 = float(input('The value of x (the first endpoint) ')) y1 = float(input('The value of y (the first endpoint) ')) x2 = float(input('The value of x (the first endpoint) ')) y2 = float(input('The value of y (the first endpoint) ')) x_m_point = (x1 + x2)/2 y_m_point = (y1 + y2)/2 print() print("The midpoint of line is :") print( "The midpoint's x value is: ",x_m_point) print( "The midpoint's y value is: ",y_m_point) print()
31.666667
57
0.671579
87
475
3.574713
0.264368
0.141479
0.167203
0.231511
0.475884
0.475884
0.475884
0.475884
0.475884
0
0
0.025381
0.170526
475
15
58
31.666667
0.763959
0
0
0.166667
0
0
0.544118
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
aac42150e9f32d794b188a862397b5770c11468b
2,615
py
Python
pcml/lib/GlobalOperationPrimitives.py
KevinWhalen/pcml
b6282b98b69e0289790e347aa0ec6fc021359323
[ "BSD-3-Clause" ]
1
2018-03-07T20:35:15.000Z
2018-03-07T20:35:15.000Z
pcml/lib/GlobalOperationPrimitives.py
Jindam/HPCGISLab
54ec030cd87b3f6f46ea68cdf007b21344517515
[ "BSD-3-Clause" ]
null
null
null
pcml/lib/GlobalOperationPrimitives.py
Jindam/HPCGISLab
54ec030cd87b3f6f46ea68cdf007b21344517515
[ "BSD-3-Clause" ]
null
null
null
""" Copyright (c) 2014 High-Performance Computing and GIS (HPCGIS) Laboratory. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. Authors and contributors: Eric Shook (eshook@kent.edu); Zhengliang Feng (odayfans@gmail.com, zfeng2@kent.edu) """ from ..core.Operation import * from ..core.Scheduler import * from ..util.OperationBuilder import * import numpy as np import types import math @globaloperation def GlobalMinMHDistance(self, locations, subdomains): pointlist=subdomains[1].get_pointlist() locind=locations[0] loc=subdomains[0].get_yxloc(locind) # Convert from array coordinates (r,c) to (y,x) coordinates mindst=999999999.0 mindstindex=-1 for index in xrange(len(pointlist)): point=pointlist[index] dst=abs(loc['y'] - point['y']) + abs(loc['x'] - point['x']) if dst<mindst: mindst=dst mindstindex=index return mindst @globaloperation def GlobalMinMHDistanceIndex(self, locations, subdomains): pointlist=subdomains[1].get_pointlist() locind=locations[0] loc=subdomains[0].get_yxloc(locind) # Convert from array coordinates (r,c) to (y,x) coordinates mindst=999999999.0 mindstindex=-1 for index in xrange(len(pointlist)): point=pointlist[index] dst=abs(loc['y'] - point['y']) + abs(loc['x'] - point['x']) if dst<mindst: mindst=dst mindstindex=index return mindstindex @globaloperation def GlobalMinDistanceIndex(self, locations, subdomains): pointlist=subdomains[1].get_pointlist() locind=locations[0] loc=subdomains[0].get_yxloc(locind) # Convert from array coordinates (r,c) to (y,x) coordinates #print "loc",loc mindst=999999999.0 mindstindex=-1 for index in xrange(len(pointlist)): point=pointlist[index] #print "point",point dst=math.sqrt((loc['y'] - point['y']) ** 2 + (loc['x'] - point['x']) ** 2) if dst<mindst: mindst=dst mindstindex=index #print "mindst",mindst return mindstindex @globaloperation def GlobalMinDistance(self, locations, subdomains): pointlist=subdomains[1].get_pointlist() locind=locations[0] loc=subdomains[0].get_yxloc(locind) # Convert from array coordinates (r,c) to (y,x) coordinates mindst=999999999.0 for point in pointlist: #print "point",point dst=math.sqrt((loc['y'] - point['y']) ** 2 + (loc['x'] - point['x']) ** 2) if dst<mindst: mindst=dst #print "mindst",mindst return mindst
31.506024
109
0.662333
335
2,615
5.146269
0.283582
0.041763
0.053364
0.074246
0.642111
0.642111
0.642111
0.632831
0.632831
0.632831
0
0.030948
0.209178
2,615
82
110
31.890244
0.802708
0.2413
0
0.813559
0
0
0.008134
0
0
0
0
0
0
1
0.067797
false
0
0.101695
0
0.237288
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
aad1f197ea37b7ec0e06f5bdeb6531fa582aede4
2,494
py
Python
j_notebooks/core_constants.py
andrewt-cville/fb_recruiting
dc8997b2b4626114e4fc06e99a2e68d8e44b0b26
[ "Apache-2.0" ]
null
null
null
j_notebooks/core_constants.py
andrewt-cville/fb_recruiting
dc8997b2b4626114e4fc06e99a2e68d8e44b0b26
[ "Apache-2.0" ]
4
2022-01-21T22:27:35.000Z
2022-01-21T22:39:27.000Z
j_notebooks/core_constants.py
andrewt-cville/fb_recruiting
dc8997b2b4626114e4fc06e99a2e68d8e44b0b26
[ "Apache-2.0" ]
null
null
null
import requests import json def get_defYears(): return ['2002', '2003', '2004', '2005', '2006', '2007', '2008','2009','2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020'] def get_header(): return {'user-agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36'} def get_schoolsList(): return json.loads(open('..//config//schools.json', "r").read()) def create_url247(level, school_id, year): if (level == 'team'): return 'https://247sports.com/college/{}/Season/{}-Football/Commits/'.format(school_id,year) else: print('ERROR: appropriate levels are team, prospect and recruit') def get_htmlDir(source, conference, level): return "..//html//{}//{}//{}//".format(source, conference, level) def get_availableConferences(): return ['bigten', 'bigtwelve', 'acc', 'sec', 'pactwelve', 'american', 'independents', 'cusa', 'mac', 'mwc', 'sunbelt'] def save_files(filePath, filePersist): with open(filePath, "w") as write_file: json.dump(filePersist, write_file) def save_html(filePath, reqText): with open(filePath, "w") as write_file: write_file.write(reqText) def save_records(folder, filename, listPersist): with open("..//{}//{}.json".format(folder, filename), "w") as write_file: json.dump(listPersist, write_file) databaseName = 'fb_recruiting.db' sports247FuzzyFields = ['ID', 'IDYR', 'College', 'Year', 'PlayerName', 'HighSchool', 'City', 'State', 'Position'] rivalsFuzzyFields = ['IDYR', 'College', 'Year', 'PlayerName', 'HighSchool', 'City', 'State', 'Position'] nflFuzzyFields = ['ID', 'College', 'Year', 'PlayerName', 'Position'] allconfFuzzyFields = ['ID', 'College', 'PlayerName'] ncaaFuzzyFields = ['ID', 'College', 'PlayerName', 'Position'] allamericanFuzzyFields = ['ID', 'College', 'Year', 'PlayerName'] sports247Blockers = ['College', 'Year'] rivalsBlockers = ['College', 'Year'] nflBlockers = ['College'] allconfBlockers = ['College'] ncaaBlockers = ['College'] allamericanBlockers = ['College'] fuzzyFields = {'Rivals': rivalsFuzzyFields, 'NFL': nflFuzzyFields, 'AllConference': allconfFuzzyFields, 'NCAA': ncaaFuzzyFields, 'AllAmerican': allamericanFuzzyFields, 'Sports247': sports247FuzzyFields} blockers = {'Rivals': rivalsBlockers, 'NFL': nflBlockers, 'AllConference': allconfBlockers, 'NCAA': ncaaBlockers, 'AllAmerican': allamericanBlockers, 'Sports247': sports247Blockers}
45.345455
202
0.684844
266
2,494
6.345865
0.537594
0.031991
0.049763
0.021327
0.111374
0.111374
0.094787
0.061611
0
0
0
0.058044
0.122694
2,494
55
203
45.345455
0.713437
0
0
0.04878
0
0.02439
0.32986
0.018437
0
0
0
0
0
1
0.219512
false
0
0.04878
0.121951
0.414634
0.02439
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
2aa92d3079dc909388bc1b9520eda56ea436cdba
548
py
Python
__init__.py
Innovation-DOOEL-Skopje/ViewHRV
25c57ec3822daaca8335c6a70b5d2107c6655b48
[ "MIT" ]
null
null
null
__init__.py
Innovation-DOOEL-Skopje/ViewHRV
25c57ec3822daaca8335c6a70b5d2107c6655b48
[ "MIT" ]
null
null
null
__init__.py
Innovation-DOOEL-Skopje/ViewHRV
25c57ec3822daaca8335c6a70b5d2107c6655b48
[ "MIT" ]
null
null
null
# # -*- coding: utf-8 -*- # """ # Notes # ----- # Author # ------ # .. Ervin Shaqiri , Marjan Gushev # Last Update # ----------- # 13.11.2019 # :copyright: # :license: # """ # # Local imports # # from package.__version__ import __version__ # import Package # # Metadata # __author__ = "Ervin Shaqiri, Marjan Gushev" # # __email__ = "@gmail.com" # __maintainer__ = "Ervin Shaqiri" # __status__ = "Development" # # __license__ = "" # name = "Package" # description = "Python toolbox for calculating and plotting Heart Rate Variability indices."
18.896552
93
0.633212
53
548
6.018868
0.773585
0.112853
0.112853
0.15047
0.188088
0
0
0
0
0
0
0.020089
0.182482
548
29
93
18.896552
0.691964
0.890511
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
2ac84d2f2751b2936ef2c7ceda2e752ae5ba5eac
232
py
Python
src/main.py
cld-santos/fifi
5576543afc7a9c1a8f919e7019bb5eab8cba073e
[ "Apache-2.0" ]
null
null
null
src/main.py
cld-santos/fifi
5576543afc7a9c1a8f919e7019bb5eab8cba073e
[ "Apache-2.0" ]
null
null
null
src/main.py
cld-santos/fifi
5576543afc7a9c1a8f919e7019bb5eab8cba073e
[ "Apache-2.0" ]
null
null
null
import sys from fifi import investigate if __name__=='__main__': subject = sys.argv[1] url = sys.argv[2] # investigate('febre amarela', 'http://www.brasil.gov.br/home-1/ultimas-noticias') investigate(subject, url)
23.2
86
0.689655
32
232
4.75
0.71875
0.092105
0
0
0
0
0
0
0
0
0
0.015464
0.163793
232
9
87
25.777778
0.768041
0.344828
0
0
0
0
0.053333
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
2ac88c5d0161a99e852a85615708c76851d21a25
2,161
py
Python
hchztests/tests/test_container_webdav.py
codedsk/hubcheck-hubzero-tests
89dd7164fed9161a5bf80e0a5635cec3da5be31d
[ "MIT" ]
1
2016-01-02T01:36:14.000Z
2016-01-02T01:36:14.000Z
hchztests/tests/test_container_webdav.py
codedsk/hubcheck-hubzero-tests
89dd7164fed9161a5bf80e0a5635cec3da5be31d
[ "MIT" ]
null
null
null
hchztests/tests/test_container_webdav.py
codedsk/hubcheck-hubzero-tests
89dd7164fed9161a5bf80e0a5635cec3da5be31d
[ "MIT" ]
null
null
null
import unittest import pytest import sys import time import hubcheck from webdav import WebdavClient from webdav.Connection import WebdavError,AuthorizationError pytestmark = [ pytest.mark.container, pytest.mark.webdav, pytest.mark.nightly, pytest.mark.reboot ] # sleep for 15 minutes to avoid fail2ban related errors SLEEPTIME=60*15 @pytest.mark.registereduser class container_webdav(hubcheck.testcase.TestCase): def setUp(self): # get user account info self.username,self.userpass = self.testdata.find_account_for( 'registeredworkspace') webdav_base = self.testdata.find_url_for('webdav') self.webdav_url = 'https://%s/webdav' % webdav_base self.do_sleep = True @pytest.mark.webdav_login def test_valid_user_valid_password_login(self): """ try webdav login with valid user and valid password """ c = WebdavClient.CollectionStorer(self.webdav_url) c.connection.addBasicAuthorization(self.username,self.userpass) try: c.validate() # successful login does not require sleep self.do_sleep = False except AuthorizationError, e: self.fail("webdav login to %s as %s failed: %s" % (self.webdav_url,self.username,e)) def test_invalid_user_login(self): """ try webdav login with an invalid user """ c = WebdavClient.CollectionStorer(self.webdav_url) c.connection.addBasicAuthorization('invaliduser','invalidpass') with self.assertRaises(WebdavError) as cm: c.validate() def test_valid_user_invalid_passwordlogin(self): """ try webdav login with a valid user and invalid password """ c = WebdavClient.CollectionStorer(self.webdav_url) c.connection.addBasicAuthorization(self.username,'invalidpass') with self.assertRaises(AuthorizationError) as cm: c.validate() def tearDown(self): if self.do_sleep: time.sleep(SLEEPTIME)
26.036145
71
0.640444
236
2,161
5.745763
0.338983
0.044248
0.047935
0.039823
0.272861
0.233038
0.193215
0.193215
0.193215
0.138643
0
0.004487
0.278112
2,161
82
72
26.353659
0.864744
0.053216
0
0.136364
0
0
0.060307
0
0
0
0
0
0.045455
0
null
null
0.136364
0.159091
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
2ac93bfc784a35467809b491d9544789fcf7d9a4
384
py
Python
qutip/tests/test_hardware_info.py
tonybruguier/qutip
52d01da181a21b810c3407812c670f35fdc647e8
[ "BSD-3-Clause" ]
null
null
null
qutip/tests/test_hardware_info.py
tonybruguier/qutip
52d01da181a21b810c3407812c670f35fdc647e8
[ "BSD-3-Clause" ]
1
2022-03-12T12:27:00.000Z
2022-03-12T12:27:00.000Z
qutip/tests/test_hardware_info.py
tonybruguier/qutip
52d01da181a21b810c3407812c670f35fdc647e8
[ "BSD-3-Clause" ]
null
null
null
""" Tests for qutip.hardware_info. """ from qutip.hardware_info import hardware_info def test_hardware_info(): info = hardware_info() assert info["os"] in ("Windows", "FreeBSD", "Linux", "Mac OSX") assert info["cpus"] > 0 assert isinstance(info["cpus"], int) cpu_freq = info.get("cpu_freq", 0.0) assert cpu_freq >= 0. assert isinstance(cpu_freq, float)
24
67
0.664063
54
384
4.537037
0.462963
0.244898
0.138776
0
0
0
0
0
0
0
0
0.01278
0.184896
384
15
68
25.6
0.769968
0.078125
0
0
0
0
0.127168
0
0
0
0
0
0.555556
1
0.111111
false
0
0.111111
0
0.222222
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
2
2ad41031f0927398dc626ce0c48e111edcd41c16
1,388
py
Python
gui/wellplot/settings/layout/widgets/ui_tracklayouttreewidget.py
adriangrepo/qreservoir
20fba1b1fd1a42add223d9e8af2d267665bec493
[ "MIT" ]
2
2019-10-04T13:54:51.000Z
2021-05-21T19:36:15.000Z
gui/wellplot/settings/layout/widgets/ui_tracklayouttreewidget.py
adriangrepo/qreservoir
20fba1b1fd1a42add223d9e8af2d267665bec493
[ "MIT" ]
3
2019-11-19T17:06:09.000Z
2020-01-18T20:39:54.000Z
gui/wellplot/settings/layout/widgets/ui_tracklayouttreewidget.py
adriangrepo/qreservoir
20fba1b1fd1a42add223d9e8af2d267665bec493
[ "MIT" ]
2
2020-07-02T13:20:48.000Z
2020-11-11T00:18:51.000Z
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'tracklayouttreewidget.ui' # # Created: Tue Jun 23 21:01:09 2015 # by: PyQt4 UI code generator 4.10.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_trackLayouttreWidget(object): def setupUi(self, trackLayouttreWidget): trackLayouttreWidget.setObjectName(_fromUtf8("trackLayouttreWidget")) trackLayouttreWidget.resize(371, 362) trackLayouttreWidget.setAcceptDrops(True) trackLayouttreWidget.setAnimated(False) trackLayouttreWidget.setAllColumnsShowFocus(True) self.verticalLayout = QtGui.QVBoxLayout(trackLayouttreWidget) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.retranslateUi(trackLayouttreWidget) QtCore.QMetaObject.connectSlotsByName(trackLayouttreWidget) def retranslateUi(self, trackLayouttreWidget): pass
33.047619
79
0.741354
134
1,388
7.61194
0.537313
0.062745
0.078431
0.109804
0.160784
0.160784
0.160784
0.160784
0.160784
0.160784
0
0.027122
0.176513
1,388
41
80
33.853659
0.865267
0.162824
0
0.230769
1
0
0.029488
0
0
0
0
0
0
1
0.192308
false
0.038462
0.038462
0.115385
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
2
2adb59302dfc2ec27651cc389b9c66c966443aa6
815
py
Python
test/test_stmts.py
rocky/xpython
ce4ed4329cee2af0aab94254276f5a5687dd25f9
[ "MIT" ]
1
2020-04-28T13:18:13.000Z
2020-04-28T13:18:13.000Z
test/test_stmts.py
rocky/xbyterun
fde8f8a31ffd3e3c4545d76b4b1edf4b7e0191d9
[ "MIT" ]
null
null
null
test/test_stmts.py
rocky/xbyterun
fde8f8a31ffd3e3c4545d76b4b1edf4b7e0191d9
[ "MIT" ]
null
null
null
"""Test Python statements.""" from xdis.version_info import PYTHON_VERSION_TRIPLE, version_tuple_to_str try: import vmtest except ImportError: from . import vmtest if PYTHON_VERSION_TRIPLE[:2] in ((3, 10),): print("Test not gone over yet for %s" % version_tuple_to_str()) class TestStmts(vmtest.VmTestCase): def test_for_loop(self): pass else: class TestStmts(vmtest.VmTestCase): def test_for_loop(self): self.self_checking() def test_while(self): self.self_checking() def test_global(self): self.self_checking() def test_exec(self): self.self_checking() if __name__ == "__main__": # import unittest # unittest.main() t = TestStmts("test_for_loop") t.test_for_loop()
20.897436
73
0.63681
103
815
4.708738
0.427184
0.131959
0.090722
0.164948
0.356701
0.356701
0.197938
0.197938
0.197938
0
0
0.006634
0.260123
815
38
74
21.447368
0.797678
0.068712
0
0.347826
0
0
0.066489
0
0
0
0
0
0
1
0.217391
false
0.043478
0.173913
0
0.478261
0.043478
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
2add779f4df7a51f2c5178e9bbfd9de6fdf7d323
279
py
Python
app.py
juliuskrahn-com/backend
819c4204be410aa8e0049f14a94646f0754ae821
[ "MIT" ]
null
null
null
app.py
juliuskrahn-com/backend
819c4204be410aa8e0049f14a94646f0754ae821
[ "MIT" ]
1
2021-03-24T10:38:41.000Z
2021-03-24T10:38:41.000Z
app.py
juliuskrahn/blog-backend
819c4204be410aa8e0049f14a94646f0754ae821
[ "MIT" ]
null
null
null
from aws_cdk import core import stacks app = core.App() stacks.Production(app, "Production", env=core.Environment(account="473883619336", region="eu-central-1")) stacks.Testing(app, "Testing", env=core.Environment(account="473883619336", region="eu-central-1")) app.synth()
23.25
105
0.749104
38
279
5.473684
0.473684
0.067308
0.173077
0.240385
0.509615
0.509615
0.509615
0.509615
0.509615
0
0
0.101563
0.082437
279
11
106
25.363636
0.710938
0
0
0
0
0
0.232975
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
2af708bfd795871225f7f60fff7193b99390bc1b
942
py
Python
src/wowapi/helper/file_cacher.py
MislavJaksic/World-of-Warcraft-API
f414cf967ed0f54bb8aef74c48ed3b761613db6c
[ "MIT" ]
null
null
null
src/wowapi/helper/file_cacher.py
MislavJaksic/World-of-Warcraft-API
f414cf967ed0f54bb8aef74c48ed3b761613db6c
[ "MIT" ]
2
2021-04-21T17:38:08.000Z
2021-04-21T17:38:35.000Z
src/wowapi/helper/file_cacher.py
MislavJaksic/World-of-Warcraft-API
f414cf967ed0f54bb8aef74c48ed3b761613db6c
[ "MIT" ]
null
null
null
from pathlib import Path import pickle import logging class FileCacher(object): def __init__(self, dir_path: Path): self.dir_path = dir_path def __str__(self): string = "FileCacher[" string += str(self.dir_path.resolve()) string += "]" return string def object_to_file(self, object, filename): logging.info(f"Stored in cache. Id: {filename}") with open(self.get_file_path(filename), 'wb') as file: pickle.dump(object, file, pickle.HIGHEST_PROTOCOL) def from_file(self, filename): logging.info(f"Cache hit. Id: {filename}") with open(self.get_file_path(filename), 'rb') as file: return pickle.load(file) def is_file_exists(self, filename): return self.get_file_path(filename).exists() def get_file_path(self, filename): filename_path = self.dir_path / filename return filename_path.resolve()
29.4375
62
0.64862
123
942
4.731707
0.317073
0.060137
0.075601
0.07732
0.180412
0.140893
0.140893
0.140893
0.140893
0
0
0
0.240977
942
31
63
30.387097
0.813986
0
0
0
0
0
0.076433
0
0
0
0
0
0
1
0.25
false
0
0.125
0.041667
0.583333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
2afbdf11a26cac4bc40bece60d23100b0a64d445
128
py
Python
Algo_practice/CountDiv.py
harsh52/Assignments-competitive_coding
bec05217ca3d67f71d5209498dc5e6bfae9d4a93
[ "Apache-2.0" ]
8
2018-09-28T19:15:50.000Z
2020-02-22T11:17:24.000Z
Algo_practice/CountDiv.py
harsh52/Assignments-competitive_coding
bec05217ca3d67f71d5209498dc5e6bfae9d4a93
[ "Apache-2.0" ]
null
null
null
Algo_practice/CountDiv.py
harsh52/Assignments-competitive_coding
bec05217ca3d67f71d5209498dc5e6bfae9d4a93
[ "Apache-2.0" ]
2
2019-09-14T21:33:52.000Z
2020-02-13T11:06:08.000Z
def solution(A,B,K): count = 0 for i in range(A,B): if(i%K==0): count += 1 #print(count) return count solution(6,11,2)
14.222222
21
0.601563
27
128
2.851852
0.666667
0.051948
0
0
0
0
0
0
0
0
0
0.068627
0.203125
128
9
22
14.222222
0.686275
0.09375
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.285714
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
63104d29f5038f04906c20016b184fe47c390bc0
475
py
Python
custom_components/mila/milacloud/resource.py
sanghviharshit/ha-mila
e5e3bc0c380daedde04fc7a3a19ff872d6809bd2
[ "MIT" ]
2
2022-03-25T23:39:15.000Z
2022-03-25T23:49:22.000Z
custom_components/mila/milacloud/resource.py
sanghviharshit/ha-mila
e5e3bc0c380daedde04fc7a3a19ff872d6809bd2
[ "MIT" ]
1
2022-03-26T09:40:59.000Z
2022-03-28T09:26:24.000Z
custom_components/mila/milacloud/resource.py
sanghviharshit/ha-mila
e5e3bc0c380daedde04fc7a3a19ff872d6809bd2
[ "MIT" ]
1
2022-03-26T09:36:37.000Z
2022-03-26T09:36:37.000Z
"""Milacares API""" from .const import URL_ACCOUNT class Resource(object): def __init__(self, api, device, data): self.api = api self.device = device self.data = data @property def id(self): return self.device["id"] if self.is_device else None @property def is_account(self): return self.__class__.__name__ == "Account" @property def is_device(self): return self.__class__.__name__ == "Device"
21.590909
60
0.625263
59
475
4.627119
0.389831
0.120879
0.153846
0.139194
0.168498
0
0
0
0
0
0
0
0.265263
475
21
61
22.619048
0.782235
0.027368
0
0.2
0
0
0.032895
0
0
0
0
0
0
1
0.266667
false
0
0.066667
0.2
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
6325c6258163a26c43d4609e97bd03ece406baa0
751
py
Python
Foresite/processed_data/models.py
khoamb/Foresite
97b155452d92fe1c487e7cbeffbc867604a1e726
[ "MIT" ]
null
null
null
Foresite/processed_data/models.py
khoamb/Foresite
97b155452d92fe1c487e7cbeffbc867604a1e726
[ "MIT" ]
6
2018-11-29T23:25:16.000Z
2018-11-30T01:17:33.000Z
Foresite/processed_data/models.py
PricelessAntonio/Foresite
4eec1ab5bf588b1ef6ec176a612bc62e8d55b424
[ "MIT" ]
3
2018-09-05T18:57:03.000Z
2020-03-22T02:19:58.000Z
from django.db import models from upload_csv.models import CsvUpload class ProcessedData(models.Model): upload_csv_processed = models.OneToOneField( CsvUpload, on_delete=models.CASCADE, primary_key=True) day_1 = models.FloatField(null=True, blank=True, default=None) day_2 = models.FloatField(null=True, blank=True, default=None) day_3 = models.FloatField(null=True, blank=True, default=None) day_4 = models.FloatField(null=True, blank=True, default=None) day_5 = models.FloatField(null=True, blank=True, default=None) day_6 = models.FloatField(null=True, blank=True, default=None) day_7 = models.FloatField(null=True, blank=True, default=None) image_path = models.CharField(max_length=100, default=None)
46.9375
66
0.75233
107
751
5.149533
0.35514
0.15971
0.254083
0.3049
0.591652
0.591652
0.591652
0.591652
0.511797
0
0
0.015385
0.134487
751
15
67
50.066667
0.832308
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.153846
0
0.923077
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
63271846667581d908047078deca17eed1b4f83c
137
py
Python
ex046.py
bruceewmesmo/python-mundo-02
d37d6271f3870f16521391bd7884b57aaae3484a
[ "MIT" ]
null
null
null
ex046.py
bruceewmesmo/python-mundo-02
d37d6271f3870f16521391bd7884b57aaae3484a
[ "MIT" ]
null
null
null
ex046.py
bruceewmesmo/python-mundo-02
d37d6271f3870f16521391bd7884b57aaae3484a
[ "MIT" ]
null
null
null
# EXERCICIO 046 - CONTAGEM REGRESSIVA from time import sleep for cont in range(10,-1,-1): print(cont) sleep(1) print('BOOM!')
13.7
37
0.664234
21
137
4.333333
0.761905
0.131868
0
0
0
0
0
0
0
0
0
0.073395
0.20438
137
9
38
15.222222
0.761468
0.255474
0
0
0
0
0.050505
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0.4
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
632dd9f6e12bbbd1b4acf6b14acf7925e0d8ee52
434
py
Python
scripts/get-db-raw-snapshot-mysql.py
TakesxiSximada/dumpcar
5046c36ab62c38aeee511a6593a0ad176eb9d941
[ "Apache-2.0" ]
null
null
null
scripts/get-db-raw-snapshot-mysql.py
TakesxiSximada/dumpcar
5046c36ab62c38aeee511a6593a0ad176eb9d941
[ "Apache-2.0" ]
null
null
null
scripts/get-db-raw-snapshot-mysql.py
TakesxiSximada/dumpcar
5046c36ab62c38aeee511a6593a0ad176eb9d941
[ "Apache-2.0" ]
null
null
null
import getpass import argparse import subprocess parser = argparse.ArgumentParser() parser.add_argument('host') parser.add_argument('user') parser.add_argument('db') args = parser.parse_args() child = subprocess.run('mysqldump -h {} -u {} {}'.format( args.host, args.user, args.db), shell=True) child.wait() child = subprocess.run('mysqldump -h {} -u {} {}'.format( args.host, args.user, args.db), shell=True) child.wait()
24.111111
57
0.702765
59
434
5.101695
0.372881
0.089701
0.169435
0.179402
0.498339
0.498339
0.498339
0.498339
0.498339
0.498339
0
0
0.119816
434
17
58
25.529412
0.787958
0
0
0.428571
0
0
0.133641
0
0
0
0
0
0
1
0
false
0.071429
0.214286
0
0.214286
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
632fdaeecd11b2730275a527955c48e365afd77e
5,689
py
Python
ecomcat_api/ztest.py
andrewraharjo/CAN-Bus-Hack_Prius_Focus
b2a73dfb4a3762d8439acd64d2e1b5d729988b2e
[ "MIT" ]
34
2016-08-05T02:49:04.000Z
2022-03-12T07:44:44.000Z
ecomcat_api/ztest.py
andrewraharjo/CAN-Bus-Hack_Prius_Focus
b2a73dfb4a3762d8439acd64d2e1b5d729988b2e
[ "MIT" ]
null
null
null
ecomcat_api/ztest.py
andrewraharjo/CAN-Bus-Hack_Prius_Focus
b2a73dfb4a3762d8439acd64d2e1b5d729988b2e
[ "MIT" ]
15
2016-12-03T03:19:13.000Z
2021-10-17T04:49:14.000Z
from PyEcom import * from config import * import time, struct, sys, binascii def str_to_hexarr(val): payload = [] for x in val: payload.append(ord(x)) return payload def nbo_int_to_bytearr(dword): arr = [] arr.append(dword & 0xFF) arr.append((dword >> 8) & 0xFF) arr.append((dword >> 16) & 0xFF) arr.append((dword >> 24) & 0xFF) return arr class EcuPart: def __init__(self, address, write_address, length): self.address = address self.write_address = write_address self.length = length if __name__ == "__main__": ecom = PyEcom('Debug\\ecomcat_api') ecom.open_device(1,35916) ECU = 0x750 #SmartKey 0x750 [0xB5] seems to return 34 when ret[2] - 0xAB for i in range(0, 1000): ret = ecom.send_iso_tp_data(0x750, [0x27, 0x01], 0x40) #key = (ret[2] - 0xAB) & 0xFF #key = (~ret[2] + 1) & 0xFF key = i & 0xFF ret = ecom.send_iso_tp_data(0x750, [0x27, 0x02, key], 0x40) if ret[2] != 0x35: print "New Error: %d %d" % (key, i) break ret = ecom.request_upload_14229(ECU, 0x01, 0x44, 0x0000F000, 0x00000001, 0x40) ret = ecom.request_upload_14229(ECU, 0x01, 0x33, 0x0000F000, 0x00000001, 0x40) ret = ecom.request_upload_14229(ECU, 0x01, 0x24, 0x0000F000, 0x00000001, 0x40) ret = ecom.request_upload_14229(ECU, 0x01, 0x22, 0x0000F000, 0x00000001, 0x40) ret = ecom.request_upload_14229(ECU, 0x01, 0x12, 0x0000F000, 0x00000001, 0x40) #Potential values for 34715300 #val = ecom.toyota_dword_to_targetdata(0xD2363456), #val = ecom.toyota_dword_to_targetdata(0x0E5E5B29) #val = ecom.toyota_dword_to_targetdata(0x6F8C9954) #val = ecom.toyota_dword_to_targetdata(0x423659A8) #val = ecom.toyota_targetdata_to_dword("42353B3C3A4A4948") #print "34715100 %08X" % (val) #T-0008-08.cuw ## val = ecom.toyota_targetdata_to_dword("443637373B3B384A") ## print "34702000 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("443345463B3C484B") ## print "34702100 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("443A45453B3D4839") ## print "34702200 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("443A33493B3D4B4D") ## print "34705000 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("443246363B463B49") ## print "34705100 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("444632463B473D4B") ## print "34705200 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("4231333A3A384B3E") ## print "34705300 %08X" % (val) ## ## #T-009-08.cuw ## val = ecom.toyota_targetdata_to_dword("4437483B3B483F3D") ## print "34709000 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("424539363A363749") ## print "34710000 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("423145393A38484C") ## print "34710100 %08X" % (val) ## ## #T-0052-11.cuw ## val = ecom.toyota_targetdata_to_dword("423438493A3E3E4D") ## print "34715000 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("42353B3C3A4A4948") ## print "34715100 %08X" % (val) ## ## val = ecom.toyota_targetdata_to_dword("424433493A4B4B4D") ## print "34715200 %08X" % (val) ## print "CRC32: %08X" % (binascii.crc32("34715200") & 0xFFFFFFFF) ## ## #T-0053-11.cuw ## val = ecom.toyota_targetdata_to_dword("3042384539373E39") ## print "34728000 %08X" % (val) ## ## #T-0146-10 ## val = ecom.toyota_targetdata_to_dword("3638393449353A37") ## print "F152647127 %08X" % (val) ## #print "CRC32: %08X" % (binascii.crc32("F152647127") & 0xFFFFFFFF) ## ## val = ecom.toyota_targetdata_to_dword("3638463749353839") ## print "F152647126 %08X" % (val) ## #print "CRC32: %08X" % (binascii.crc32("F152647126") & 0xFFFFFFFF) ## ## val = ecom.toyota_targetdata_to_dword("363846394935383C") ## print "F152647125 %08X" % (val) ## #print "CRC32: %08X" % (binascii.crc32("F152647125") & 0xFFFFFFFF) ## ## f = open("toyota_ecu.bin", "rb") ## ## num = 1 ## total_blocks = [] ## ## chunk = f.read(0x400) ## if chunk: ## hex_arr = str_to_hexarr(chunk) ## total_blocks += hex_arr ## ## #datalen = len(hex_arr) ## datalen = 0x400 ## ## ## print "%04X" % (datalen) ## ## datalen = datalen & 0x0FFF ## data_bytes = (0x01000 | datalen) & 0x0FFFF ## byteone = (data_bytes >> 8) ## bytetwo = data_bytes & 0xFF ## ## print "%02X %02X" % (byteone, bytetwo) ## ## #print "[%d] -> Len: %d" % (num, len(hex_arr)) ## #print hex_arr ## num += 1 ## ## print "Total: %X" % (len(total_blocks)) ## ## vindex = 0 ## cnt = 0 ## chunks = len(total_blocks) / 0x100 ## ## ## for i in range(0, chunks): ## print "Count: %d" % (cnt) ## ## tmp = total_blocks[vindex:vindex+0x100] ## vindex += 0x100 ## cnt += 1 ## for asdf in tmp: ## sys.stdout.write("%02X " % (asdf)) ## ## ecu1 = EcuPart(0x00000000, 0xFF000000, 0x1000) ## ecu2 = EcuPart(0xF7000100, 0xFF001000, None) ## ## ## addrs = [0x00000000, 0xF7000100] ## write_addrs = [0xFF000000, 0xFF001000] ## ## lens = {} ## lens[addrs[0]] = 0x1000 ## ## addr_arr = nbo_int_to_bytearr(write_addrs[0]) ## print hex(addr_arr[0]) ## print hex(addr_arr[1]) ## print hex(addr_arr[2]) ## print hex(addr_arr[3]) ## ## f.close()
30.918478
83
0.587449
651
5,689
4.933948
0.281106
0.047945
0.089041
0.128892
0.411582
0.394147
0.347447
0.272727
0.110212
0.110212
0
0.212321
0.252417
5,689
183
84
31.087432
0.542911
0.647214
0
0
0
0
0.026837
0
0
0
0.142492
0
0
0
null
null
0
0.083333
null
null
0.027778
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
63340edda44bb9c3df306e15359004e548f7b446
467
py
Python
4.py
gor-dimm/lr8
a49265d185a86ac31992f3e70e51ec17f857bf49
[ "MIT" ]
null
null
null
4.py
gor-dimm/lr8
a49265d185a86ac31992f3e70e51ec17f857bf49
[ "MIT" ]
null
null
null
4.py
gor-dimm/lr8
a49265d185a86ac31992f3e70e51ec17f857bf49
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- def getInput(): return input() def testInput(a): try: val = int(a) except ValueError: return False return True def strToInt(a): return int(a) def printInt(a): print(a) if __name__ == '__main__': a = getInput() isIntable = testInput(a) if isIntable: printInt(strToInt(a)) else: print("Значение не может быть преобразовано к целому числу.")
15.064516
69
0.588865
58
467
4.603448
0.62069
0.074906
0
0
0
0
0
0
0
0
0
0.005988
0.284797
467
30
70
15.566667
0.793413
0.092077
0
0
0
0
0.14218
0
0
0
0
0
0
1
0.210526
false
0
0
0.105263
0.421053
0.210526
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
2d4a475000ced27dbdca8e97b33cbf5f2dde8ea6
2,225
py
Python
my_funcs.py
IgorReshetnyak/Spectra
223f8fd40cfbd80d0845ff463b4dde1b5d32f7bf
[ "MIT" ]
null
null
null
my_funcs.py
IgorReshetnyak/Spectra
223f8fd40cfbd80d0845ff463b4dde1b5d32f7bf
[ "MIT" ]
null
null
null
my_funcs.py
IgorReshetnyak/Spectra
223f8fd40cfbd80d0845ff463b4dde1b5d32f7bf
[ "MIT" ]
null
null
null
#COPYRIGHT Igor RESHETNYAK 2016-2020 import math import numpy import time def broad_gauss(data, energies, broad=0.1): if len(data)!=len(energies): exit("Error data and energies length differ") data1=numpy.zeros(len(energies)); ne=len(energies) for i in range(ne): norm=0.0 for i1 in range(ne): if (abs(energies[i1]-energies[i])<5*broad): data1[i]+=data[i1]*gauss(energies[i1],energies[i],broad) norm+=gauss(energies[i1],energies[i],broad) data1[i]=data1[i]/norm return data1 def broad_lorentz(data, energies, broad=0.1): if len(data)!=len(energies): exit("Error data and energies length differ") data1=numpy.zeros(len(energies)); ne=len(energies) for i in range(ne): norm=0.0 for i1 in range(ne): if (abs(energies[i1]-energies[i])<5*broad): data1[i]+=data[i1]*lorentz(energies[i1],energies[i],broad) norm+=lorentz(energies[i1],energies[i],broad) data1[i]=data1[i]/norm return data1 def broad_dynamic(data,energies,broad): if len(data)!=len(energies) or len(broad)!=len(energies): exit("Error data and energies length differ") data1=numpy.zeros(len(energies)); ne=len(energies) for i in range(ne): norm=0.0 for i1 in range(ne): if (abs(energies[i1]-energies[i])<5*broad[i]): data1[i]+=data[i1]*gauss(energies[i1],energies[i],broad[i]) norm+=gauss(energies[i1],energies[i],broad[i]) data1[i]=data1[i]/norm return data1 def gauss(x, mu, sigma): return (sigma * math.sqrt(2*math.pi))*math.exp(-1.0 / (2 * sigma * sigma) * (x - mu)*(x - mu)) def lorentz(x, mu, sigma): return (sigma /math.pi/2)*(1/((x-mu)*(x-mu)+sigma*sigma/4)) def printStage(txt): print txt print time.ctime() def eelsToEps(spectra): n=len(spectra) if n<1: exit() if n%2==1: n=n-1 im=spectra re=spectra re=numpy.fft.fft(re,n) re=-2.*re.imag/n re[0:n/2]=-re[0:n/2] re=numpy.fft.fft(re,n) re=re.real re=re[1:n] im=im[1:n] e1 = re / (re ** 2 + im ** 2) e2 = im / (re ** 2 + im ** 2) return e2
27.134146
96
0.580674
356
2,225
3.620787
0.168539
0.085337
0.125679
0.132661
0.731575
0.708301
0.640031
0.577967
0.554694
0.554694
0
0.045265
0.245393
2,225
81
97
27.469136
0.722454
0.01573
0
0.461538
0
0
0.052682
0
0
0
0
0
0
0
null
null
0
0.046154
null
null
0.046154
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
2d556a815d9c760c24f13a15f5cb2f4c91a81feb
590
py
Python
app.py
choochootrain/choochoodash
fcc5f9e1af4c9ebe4a168372f8f16306951d2ddd
[ "MIT" ]
null
null
null
app.py
choochootrain/choochoodash
fcc5f9e1af4c9ebe4a168372f8f16306951d2ddd
[ "MIT" ]
null
null
null
app.py
choochootrain/choochoodash
fcc5f9e1af4c9ebe4a168372f8f16306951d2ddd
[ "MIT" ]
null
null
null
import os import requests from flask import Flask, render_template app = Flask(__name__) app.config['DEBUG'] = True @app.route('/') def index(): return render_template('index.html') #API @app.route('/api/ping', methods=['GET']) def api_ping(): return "pong" @app.route('/api/weather/<int:city_id>', methods=['GET']) def api_weather(city_id): resp = requests.get('http://api.openweathermap.org/data/2.5/weather?id=%d&units=imperial' % city_id) return resp.text, resp.status_code if __name__ == '__main__': app.run(host='0.0.0.0', port=os.environ.get('PORT', 8000))
23.6
104
0.686441
91
590
4.230769
0.516484
0.062338
0.057143
0.083117
0
0
0
0
0
0
0
0.019305
0.122034
590
24
105
24.583333
0.723938
0.005085
0
0
0
0.058824
0.250853
0.044369
0
0
0
0
0
1
0.176471
false
0
0.176471
0.117647
0.529412
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
2d650c07ad9573d840d1742e4877c9000e5ab436
334
py
Python
basics/print_distance_readings.py
justinbooms/imu-positioning
26f33b4021ba824a44bc329ab4dc0c0c2dbfbbd0
[ "MIT" ]
null
null
null
basics/print_distance_readings.py
justinbooms/imu-positioning
26f33b4021ba824a44bc329ab4dc0c0c2dbfbbd0
[ "MIT" ]
null
null
null
basics/print_distance_readings.py
justinbooms/imu-positioning
26f33b4021ba824a44bc329ab4dc0c0c2dbfbbd0
[ "MIT" ]
null
null
null
import easygopigo3 as easy gpg = easy.EasyGoPiGo3() import di_sensors import time my_distance_sensor = gpg.init_distance_sensor() print("Distance Sensor Reading (mm): " + str(my_distance_sensor.read_mm())) ##my_imu = gpg.init_motion_sensor() ##my_imu.read() ## ##for i in range(100): ## print(my_imu.read()) ##time.sleep(.1)
22.266667
75
0.718563
51
334
4.45098
0.509804
0.246696
0.140969
0
0
0
0
0
0
0
0
0.020548
0.125749
334
14
76
23.857143
0.756849
0.302395
0
0
0
0
0.135747
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0.166667
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
2d6f1cd0f83c98647a3594353c8a2361c8365a14
221
py
Python
sample.py
rokups/snek
8c64a19ebaa74ac739c64f2fb1460b06b2b6d55c
[ "MIT" ]
4
2019-02-06T23:07:42.000Z
2020-06-08T19:36:31.000Z
sample.py
rokups/snek
8c64a19ebaa74ac739c64f2fb1460b06b2b6d55c
[ "MIT" ]
null
null
null
sample.py
rokups/snek
8c64a19ebaa74ac739c64f2fb1460b06b2b6d55c
[ "MIT" ]
null
null
null
import sys import platform import ctypes text = f'Arguments: {sys.argv}\n' +\ f'Interpreter: {sys.version}\n' +\ f'OS: {platform.win32_ver()}' ctypes.windll.user32.MessageBoxW(0, text, 'snek', 1)
22.1
53
0.628959
30
221
4.6
0.666667
0.028986
0
0
0
0
0
0
0
0
0
0.034091
0.20362
221
9
54
24.555556
0.75
0
0
0
0
0
0.382075
0.103774
0
0
0
0
0
1
0
false
0
0.428571
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
2d739716df9efe27729bb8bc3d94a36f3fe01b0b
12,306
py
Python
mmtbx/geometry/tests/tst_topology.py
dperl-sol/cctbx_project
b9e390221a2bc4fd00b9122e97c3b79c632c6664
[ "BSD-3-Clause-LBNL" ]
155
2016-11-23T12:52:16.000Z
2022-03-31T15:35:44.000Z
mmtbx/geometry/tests/tst_topology.py
dperl-sol/cctbx_project
b9e390221a2bc4fd00b9122e97c3b79c632c6664
[ "BSD-3-Clause-LBNL" ]
590
2016-12-10T11:31:18.000Z
2022-03-30T23:10:09.000Z
mmtbx/geometry/tests/tst_topology.py
dperl-sol/cctbx_project
b9e390221a2bc4fd00b9122e97c3b79c632c6664
[ "BSD-3-Clause-LBNL" ]
115
2016-11-15T08:17:28.000Z
2022-02-09T15:30:14.000Z
from __future__ import absolute_import, division, print_function from mmtbx.geometry import topology import unittest class TestAtom(unittest.TestCase): def test_1(self): foo = object() bar = object() a = topology.Atom( foo = foo, bar = bar ) self.assertEqual( a.foo, foo ) self.assertEqual( a.bar, bar ) class TestMolecule(unittest.TestCase): def test_0(self): m = topology.Molecule() self.assertEqual( m.size(), 0 ) self.assertEqual( m.atoms, [] ) self.assertEqual( m.atom_for, {} ) self.assertEqual( m.descriptor_for, {} ) self.assertEqual( list( m.graph.vertices() ), [] ) self.assertEqual( list( m.graph.edges() ), [] ) def test_1(self): m = topology.Molecule() a = topology.Atom() m.add( atom = a, xyz = ( 0, 0, 0 ) ) self.assertEqual( m.size(), 1 ) self.assertEqual( m.atoms, [ a ] ) self.assertEqual( len( m.atom_for ), 1 ) self.assertTrue( a in m.atom_for.values() ) self.assertEqual( len( m.descriptor_for ), 1 ) self.assertTrue( a in m.descriptor_for ) self.assertEqual( len( list( m.graph.vertices() ) ), 1 ) self.assertEqual( list( m.graph.edges() ), [] ) def test_2(self): m = topology.Molecule() a1 = topology.Atom() a2 = topology.Atom() m.add( atom = a1, xyz = ( 0, 0, 0 ) ) m.add( atom = a2, xyz = ( 1, 1, 1 ) ) self.assertEqual( m.size(), 2 ) self.assertEqual( set( m.atoms ), set( [ a1, a2 ] ) ) self.assertEqual( len( m.atom_for ), 2 ) self.assertTrue( a1 in m.atom_for.values() ) self.assertTrue( a2 in m.atom_for.values() ) self.assertEqual( len( m.descriptor_for ), 2 ) self.assertTrue( a1 in m.descriptor_for ) self.assertTrue( a2 in m.descriptor_for ) self.assertEqual( len( list( m.graph.vertices() ) ), 2 ) self.assertEqual( len( list( m.graph.edges() ) ), 1 ) edge = next(m.graph.edges()) self.assertAlmostEqual( m.graph.edge_weight( edge = edge ), 1.73205, 5 ) class TestCompound(unittest.TestCase): def test_0(self): m = topology.Compound.create() self.assertEqual( m.atoms, [] ) self.assertEqual( m.atom_for, {} ) self.assertEqual( m.descriptor_for, {} ) self.assertEqual( list( m.graph.vertices() ), [] ) self.assertEqual( list( m.graph.edges() ), [] ) def test_1(self): m = topology.Compound.create() a = topology.Atom() m.add_atom( atom = a ) self.assertEqual( m.atoms, [ a ] ) self.assertEqual( len( m.atom_for ), 1 ) self.assertTrue( a in m.atom_for.values() ) self.assertEqual( len( m.descriptor_for ), 1 ) self.assertTrue( a in m.descriptor_for ) self.assertEqual( len( list( m.graph.vertices() ) ), 1 ) self.assertEqual( list( m.graph.edges() ), [] ) self.assertEqual( m.distances_from( atom = a ), { a: 0 } ) self.assertEqual( m.connected_segments(), [ [ a ] ] ) def test_2(self): m = topology.Compound.create() a1 = topology.Atom() a2 = topology.Atom() m.add_atom( atom = a1 ) m.add_atom( atom = a2 ) self.assertEqual( set( m.atoms ), set( [ a1, a2 ] ) ) self.assertEqual( len( m.atom_for ), 2 ) self.assertTrue( a1 in m.atom_for.values() ) self.assertTrue( a2 in m.atom_for.values() ) self.assertEqual( len( m.descriptor_for ), 2 ) self.assertTrue( a1 in m.descriptor_for ) self.assertTrue( a2 in m.descriptor_for ) self.assertEqual( len( list( m.graph.vertices() ) ), 2 ) self.assertEqual( len( list( m.graph.edges() ) ), 0 ) self.assertEqual( m.distances_from( atom = a1 ), { a1: 0, a2: None } ) self.assertEqual( m.distances_from( atom = a2 ), { a2: 0, a1: None } ) self.assertEqual( set( frozenset( s ) for s in m.connected_segments() ), set( [ frozenset( [ a1 ] ), frozenset( [ a2 ] ) ] ), ) m.add_bond( left = a1, right = a2 ) self.assertEqual( len( list( m.graph.vertices() ) ), 2 ) self.assertEqual( len( list( m.graph.edges() ) ), 1 ) self.assertEqual( m.distances_from( atom = a1 ), { a1: 0, a2: 1 } ) self.assertEqual( m.distances_from( atom = a2 ), { a2: 0, a1: 1 } ) self.assertEqual( set( frozenset( s ) for s in m.connected_segments() ), set( [ frozenset( [ a1, a2 ] ) ] ), ) ss1 = m.subset( atoms = [ a1 ] ) self.assertEqual( len( ss1.atom_for ), 1 ) self.assertTrue( a1 in ss1.atom_for.values() ) self.assertEqual( len( ss1.descriptor_for ), 1 ) self.assertTrue( a1 in ss1.descriptor_for ) self.assertEqual( len( list( ss1.graph.vertices() ) ), 1 ) self.assertEqual( len( list( ss1.graph.edges() ) ), 0 ) ss2 = m.subset( atoms = [ a2 ] ) self.assertEqual( len( ss2.atom_for ), 1 ) self.assertTrue( a2 in ss2.atom_for.values() ) self.assertEqual( len( ss2.descriptor_for ), 1 ) self.assertTrue( a2 in ss2.descriptor_for ) self.assertEqual( len( list( ss2.graph.vertices() ) ), 1 ) self.assertEqual( len( list( ss2.graph.edges() ) ), 0 ) def test_3(self): atoms = [ topology.Atom( name = "N", element = "N", xyz = ( 11.498, 10.510, 10.231 ) ), topology.Atom( name = "CA", element = "C", xyz = ( 12.730, 11.073, 10.769 ) ), topology.Atom( name = "C", element = "C", xyz = ( 13.674, 9.966, 11.221 ) ), topology.Atom( name = "O", element = "O", xyz = ( 13.739, 8.902, 10.605 ) ), topology.Atom( name = "CB", element = "C", xyz = ( 12.421, 12.004, 11.944 ) ), topology.Atom( name = "CG", element = "C", xyz = ( 11.478, 13.179, 11.661 ) ), topology.Atom( name = "CD1", element = "C", xyz = ( 11.043, 13.834, 12.963 ) ), topology.Atom( name = "CD2", element = "C", xyz = ( 12.126, 14.201, 10.736 ) ), ] compound = topology.Compound.from_structure( atoms = atoms, tolerance = 0.1 ) self.assertEqual( set( frozenset( [ l.name, r.name ] ) for ( l, r ) in compound.bonds ), set( [ frozenset( [ "N", "CA" ] ), frozenset( [ "CA", "C" ] ), frozenset( [ "C", "O" ] ), frozenset( [ "CA", "CB" ] ), frozenset( [ "CB", "CG" ] ), frozenset( [ "CG", "CD1" ] ), frozenset( [ "CG", "CD2" ] ), ] ) ) class TestMcGregorMatch(unittest.TestCase): def test_asn_leu(self): l_ca = topology.Atom( label = "CA" ) l_cb = topology.Atom( label = "C" ) l_cg = topology.Atom( label = "C" ) l_cd1 = topology.Atom( label = "C" ) l_cd2 = topology.Atom( label = "C" ) leu = topology.Molecule() leu.add( atom = l_ca, xyz = ( -1.0085, -0.590773, 0.814318 ) ) leu.add( atom = l_cb, xyz = ( 0.0275, -0.557773, -0.314682 ) ) leu.add( atom = l_cg, xyz = ( 1.2335, 0.374227, -0.138682 ) ) leu.add( atom = l_cd1, xyz = ( 2.3065, 0.046227, -1.16768 ) ) leu.add( atom = l_cd2, xyz = ( 0.8395, 1.84323, -0.230682 ) ) a_ca = topology.Atom( label = "CA" ) a_cb = topology.Atom( label = "C" ) a_cg = topology.Atom( label = "C" ) a_od1 = topology.Atom( label = "C" ) a_nd2 = topology.Atom( label = "C" ) asn = topology.Molecule() asn.add( atom = a_ca, xyz = ( -1.03327, -0.544348, 0.860946 ) ) asn.add( atom = a_cb, xyz = ( 0.10486, -0.548357, -0.164901 ) ) asn.add( atom = a_cg, xyz = ( 0.990984, 0.682823, -0.070521 ) ) asn.add( atom = a_od1, xyz = ( 1.39496, 1.24684, -1.08724 ) ) asn.add( atom = a_nd2, xyz = ( 1.29745, 1.10599, 1.15228 ) ) res = topology.McGregorMatch( molecule1 = leu, molecule2 = asn, is_valid = lambda match: any( m.label == "CA" for m in match ), vertex_equality = lambda l, r: l.label == r.label, edge_equality = lambda l, r: abs( l - r ) < 0.1 ) self.assertEqual( res.length(), 3 ) mapping = res.remapped() self.assertTrue( ( l_ca, a_ca ) in mapping ) self.assertTrue( ( l_cb, a_cb ) in mapping ) self.assertTrue( ( l_cg, a_cg ) in mapping ) self.assertTrue( ( l_cd1, a_od1 ) not in mapping ) class TestRascalMatch(unittest.TestCase): def test_asn_leu(self): l_ca = topology.Atom( label = "CA" ) l_cb = topology.Atom( label = "C" ) l_cg = topology.Atom( label = "C" ) l_cd1 = topology.Atom( label = "C" ) l_cd2 = topology.Atom( label = "C" ) leu = topology.Molecule() leu.add( atom = l_ca, xyz = ( -1.0085, -0.590773, 0.814318 ) ) leu.add( atom = l_cb, xyz = ( 0.0275, -0.557773, -0.314682 ) ) leu.add( atom = l_cg, xyz = ( 1.2335, 0.374227, -0.138682 ) ) leu.add( atom = l_cd1, xyz = ( 2.3065, 0.046227, -1.16768 ) ) leu.add( atom = l_cd2, xyz = ( 0.8395, 1.84323, -0.230682 ) ) a_ca = topology.Atom( label = "CA" ) a_cb = topology.Atom( label = "C" ) a_cg = topology.Atom( label = "C" ) a_od1 = topology.Atom( label = "C" ) a_nd2 = topology.Atom( label = "C" ) asn = topology.Molecule() asn.add( atom = a_ca, xyz = ( -1.03327, -0.544348, 0.860946 ) ) asn.add( atom = a_cb, xyz = ( 0.10486, -0.548357, -0.164901 ) ) asn.add( atom = a_cg, xyz = ( 0.990984, 0.682823, -0.070521 ) ) asn.add( atom = a_od1, xyz = ( 1.39496, 1.24684, -1.08724 ) ) asn.add( atom = a_nd2, xyz = ( 1.29745, 1.10599, 1.15228 ) ) m = topology.RascalMatch( molecule1 = leu, molecule2 = asn, vertex_equality = lambda l, r: l.label == r.label, edge_equality = lambda l, r: abs( l - r ) <= 0.1, ) self.assertEqual( m.count(), 1 ) self.assertEqual( m.length(), 3 ) mapping = m.remapped()[0] self.assertEqual( len( mapping ), 3 ) self.assertTrue( ( l_ca, a_ca ) in mapping ) self.assertTrue( ( l_cb, a_cb ) in mapping ) self.assertTrue( ( l_cg, a_cg ) in mapping ) self.assertTrue( ( l_cd1, a_od1 ) not in mapping ) class TestGreedyMatch(unittest.TestCase): def test_asn_leu(self): l_ca = topology.Atom( label = "CA" ) l_cb = topology.Atom( label = "C" ) l_cg = topology.Atom( label = "C" ) l_cd1 = topology.Atom( label = "C" ) l_cd2 = topology.Atom( label = "C" ) leu = topology.Molecule() leu.add( atom = l_ca, xyz = ( -1.0085, -0.590773, 0.814318 ) ) leu.add( atom = l_cb, xyz = ( 0.0275, -0.557773, -0.314682 ) ) leu.add( atom = l_cg, xyz = ( 1.2335, 0.374227, -0.138682 ) ) leu.add( atom = l_cd1, xyz = ( 2.3065, 0.046227, -1.16768 ) ) leu.add( atom = l_cd2, xyz = ( 0.8395, 1.84323, -0.230682 ) ) a_ca = topology.Atom( label = "CA" ) a_cb = topology.Atom( label = "C" ) a_cg = topology.Atom( label = "C" ) a_od1 = topology.Atom( label = "C" ) a_nd2 = topology.Atom( label = "C" ) asn = topology.Molecule() asn.add( atom = a_ca, xyz = ( -1.03327, -0.544348, 0.860946 ) ) asn.add( atom = a_cb, xyz = ( 0.10486, -0.548357, -0.164901 ) ) asn.add( atom = a_cg, xyz = ( 0.990984, 0.682823, -0.070521 ) ) asn.add( atom = a_od1, xyz = ( 1.39496, 1.24684, -1.08724 ) ) asn.add( atom = a_nd2, xyz = ( 1.29745, 1.10599, 1.15228 ) ) m = topology.GreedyMatch( molecule1 = leu, molecule2 = asn, vertex_equality = lambda l, r: l.label == r.label, edge_equality = lambda l, r: abs( l - r ) <= 0.1, ) self.assertEqual( m.count(), 1 ) self.assertEqual( m.length(), 3 ) mapping = m.remapped()[0] self.assertEqual( len( mapping ), 3 ) self.assertTrue( ( l_ca, a_ca ) in mapping ) self.assertTrue( ( l_cb, a_cb ) in mapping ) self.assertTrue( ( l_cg, a_cg ) in mapping ) self.assertTrue( ( l_cd1, a_od1 ) not in mapping ) suite_atom = unittest.TestLoader().loadTestsFromTestCase( TestAtom ) suite_molecule = unittest.TestLoader().loadTestsFromTestCase( TestMolecule ) suite_compound = unittest.TestLoader().loadTestsFromTestCase( TestCompound ) suite_mcgregor_match = unittest.TestLoader().loadTestsFromTestCase( TestMcGregorMatch ) suite_rascal_match= unittest.TestLoader().loadTestsFromTestCase( TestRascalMatch ) suite_greedy_match= unittest.TestLoader().loadTestsFromTestCase( TestGreedyMatch ) alltests = unittest.TestSuite( [ suite_atom, suite_molecule, suite_compound, suite_mcgregor_match, suite_rascal_match, suite_greedy_match, ] ) def load_tests(loader, tests, pattern): return alltests if __name__ == "__main__": unittest.TextTestRunner( verbosity = 2 ).run( alltests )
35.982456
86
0.591013
1,731
12,306
4.095321
0.109185
0.129073
0.071942
0.060939
0.720976
0.707011
0.668642
0.644238
0.632811
0.622655
0
0.092549
0.243133
12,306
341
87
36.087977
0.668563
0
0
0.594306
0
0
0.00772
0
0
0
0
0
0.320285
1
0.042705
false
0
0.010676
0.003559
0.078292
0.003559
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
2d73ac22b27169801ecab1b7ed3c0715a7bc8969
210
py
Python
decorators/decorator.py
UltiRequiem/professional-phython-platzi
0bf8f97b172d0799d6906193090ef69beb1c8b4b
[ "MIT" ]
4
2021-08-02T21:34:46.000Z
2021-09-24T03:26:35.000Z
decorators/decorator.py
UltiRequiem/professional-phython-platzi
0bf8f97b172d0799d6906193090ef69beb1c8b4b
[ "MIT" ]
null
null
null
decorators/decorator.py
UltiRequiem/professional-phython-platzi
0bf8f97b172d0799d6906193090ef69beb1c8b4b
[ "MIT" ]
4
2021-08-02T21:34:47.000Z
2021-08-11T03:21:37.000Z
def decorator(func): def wrapper(): print("Decoring") func() print("Done!") return wrapper @decorator def say_hi(): print("Hi!") if __name__ == "__main__": say_hi()
12.352941
26
0.542857
23
210
4.521739
0.565217
0.096154
0
0
0
0
0
0
0
0
0
0
0.3
210
16
27
13.125
0.707483
0
0
0
0
0
0.114286
0
0
0
0
0
0
1
0.272727
false
0
0
0
0.363636
0.272727
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
2d8af5bde08bccb8801400762209680c1690511d
151
py
Python
_solutions/pandas/series/pandas_series_slice_datetime.py
sages-pl/2022-01-pythonsqlalchemy-aptiv
1d6d856608e9dbe25b139e8968c48b7f46753b84
[ "MIT" ]
null
null
null
_solutions/pandas/series/pandas_series_slice_datetime.py
sages-pl/2022-01-pythonsqlalchemy-aptiv
1d6d856608e9dbe25b139e8968c48b7f46753b84
[ "MIT" ]
null
null
null
_solutions/pandas/series/pandas_series_slice_datetime.py
sages-pl/2022-01-pythonsqlalchemy-aptiv
1d6d856608e9dbe25b139e8968c48b7f46753b84
[ "MIT" ]
null
null
null
s = pd.Series( data=np.random.randn(NUMBER), index=pd.date_range('2000-01-01', freq='D', periods=NUMBER)) result = s['2000-02-14':'2000-02']
21.571429
64
0.635762
26
151
3.653846
0.730769
0.126316
0
0
0
0
0
0
0
0
0
0.167939
0.13245
151
6
65
25.166667
0.557252
0
0
0
0
0
0.186667
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
2d919e1f16d23a647675a6db1589afef99098e27
1,226
py
Python
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/raw/GL/SGIX/shadow.py
MontyThibault/centre-of-mass-awareness
58778f148e65749e1dfc443043e9fc054ca3ff4d
[ "MIT" ]
null
null
null
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/raw/GL/SGIX/shadow.py
MontyThibault/centre-of-mass-awareness
58778f148e65749e1dfc443043e9fc054ca3ff4d
[ "MIT" ]
null
null
null
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/raw/GL/SGIX/shadow.py
MontyThibault/centre-of-mass-awareness
58778f148e65749e1dfc443043e9fc054ca3ff4d
[ "MIT" ]
null
null
null
'''OpenGL extension SGIX.shadow Overview (from the spec) This extension defines two new operations to be performed on texture values before they are passed on to the filtering subsystem. These operations perform either a <= or >= test on the value from texture memory and the iterated R value, and return 1.0 or 0.0 if the test passes or fails, respectively. The official definition of this extension is available here: http://oss.sgi.com/projects/ogl-sample/registry/SGIX/shadow.txt Automatically generated by the get_gl_extensions script, do not edit! ''' from OpenGL import platform, constants, constant, arrays from OpenGL import extensions from OpenGL.GL import glget import ctypes EXTENSION_NAME = 'GL_SGIX_shadow' GL_TEXTURE_COMPARE_SGIX = constant.Constant( 'GL_TEXTURE_COMPARE_SGIX', 0x819A ) GL_TEXTURE_COMPARE_OPERATOR_SGIX = constant.Constant( 'GL_TEXTURE_COMPARE_OPERATOR_SGIX', 0x819B ) GL_TEXTURE_LEQUAL_R_SGIX = constant.Constant( 'GL_TEXTURE_LEQUAL_R_SGIX', 0x819C ) GL_TEXTURE_GEQUAL_R_SGIX = constant.Constant( 'GL_TEXTURE_GEQUAL_R_SGIX', 0x819D ) def glInitShadowSGIX(): '''Return boolean indicating whether this extension is available''' return extensions.hasGLExtension( EXTENSION_NAME )
40.866667
98
0.80832
180
1,226
5.305556
0.5
0.075393
0.067016
0.092147
0.230366
0.13822
0
0
0
0
0
0.018657
0.125612
1,226
29
99
42.275862
0.872201
0.54894
0
0
0
0
0.198305
0.174576
0
0
0.040678
0
0
1
0.090909
false
0
0.363636
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
2d9c8b84d1f3fe5ddf153b4767b87468c135b546
804
py
Python
csr/kernels/scipy.py
mdekstrand/csr
665ceefff882d7e42db41034246b6ddb1f93e372
[ "MIT" ]
11
2021-02-07T16:37:31.000Z
2022-03-19T15:19:16.000Z
csr/kernels/scipy.py
mdekstrand/csr
665ceefff882d7e42db41034246b6ddb1f93e372
[ "MIT" ]
25
2021-02-11T22:42:01.000Z
2022-01-27T21:04:31.000Z
csr/kernels/scipy.py
lenskit/csr
03fde2d8c3cb7eb330028f34765ff2a06f849631
[ "MIT" ]
2
2021-02-07T02:05:04.000Z
2021-06-01T15:23:09.000Z
""" SciPy "kernel". This kernel is not Numba-compatible, and will never be selected as the default. It primarily exists for ease in testing and benchmarking CSR operations. """ import numpy as np from scipy.sparse import csr_matrix from csr import CSR max_nnz = np.iinfo('i8').max def to_handle(csr: CSR): values = csr.values if values is None: values = np.ones(csr.nnz) return csr_matrix((values, csr.colinds, csr.rowptrs), (csr.nrows, csr.ncols)) def from_handle(h): m: csr_matrix = h.tocsr() nr, nc = m.shape return CSR(nr, nc, m.nnz, m.indptr, m.indices, m.data) def order_columns(h): h.sort_indices() def release_handle(h): pass def mult_ab(A, B): return A @ B def mult_abt(A, B): return A @ B.T def mult_vec(A, v): return A @ v
17.866667
81
0.664179
138
804
3.782609
0.5
0.015326
0.019157
0.034483
0.038314
0
0
0
0
0
0
0.001592
0.218905
804
44
82
18.272727
0.829618
0.211443
0
0
0
0
0.003195
0
0
0
0
0
0
1
0.304348
false
0.043478
0.130435
0.130435
0.652174
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
2d9dc598e3909d8971f2ae462dd996f91b6df60e
25,211
py
Python
riscvmodel/insn.py
bieganski/riscv-python-model
4441f5c44387dc7b005e0759b01f8e78b02a5f24
[ "MIT" ]
null
null
null
riscvmodel/insn.py
bieganski/riscv-python-model
4441f5c44387dc7b005e0759b01f8e78b02a5f24
[ "MIT" ]
null
null
null
riscvmodel/insn.py
bieganski/riscv-python-model
4441f5c44387dc7b005e0759b01f8e78b02a5f24
[ "MIT" ]
null
null
null
# Copyright Stefan Wallentowitz # Licensed under the MIT License, see LICENSE for details. # SPDX-License-Identifier: MIT """ Instructions """ from .isa import * from .variant import * from .model import State @isa("lui", RV32I, opcode=0b0110111) class InstructionLUI(InstructionUType): """ The Load Upper Immediate (LUI) instruction loads the given immediate (unsigned 20 bit) to the upper 20 bit of the destination register. The lower bits are set to zero in the destination register. This instruction can be used to efficiently form constants, as a sequence of LUI and ORI for example. """ def execute(self, model: Model): model.state.intreg[self.rd] = (self.imm << 12) @isa("auipc", RV32I, opcode=0b0010111) class InstructionAUIPC(InstructionUType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.pc + (self.imm << 12) @isa("jal", RV32I, opcode=0b1101111) class InstructionJAL(InstructionJType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.pc + 4 model.state.pc += self.imm @isa("jalr", RV32I, opcode=0b1100111, funct3=0b000) class InstructionJALR(InstructionIType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.pc + 4 model.state.pc = model.state.intreg[self.rs1] + self.imm @isa("beq", RV32I, opcode=0b1100011, funct3=0b000) class InstructionBEQ(InstructionBType): def execute(self, model: Model): # todo: problem with __cmp__ if model.state.intreg[self.rs1].value == model.state.intreg[self.rs2].value: model.state.pc = model.state.pc + self.imm @isa("bne", RV32I, opcode=0b1100011, funct3=0b001) class InstructionBNE(InstructionBType): def execute(self, model: Model): if model.state.intreg[self.rs1].value != model.state.intreg[self.rs2].value: model.state.pc = model.state.pc + self.imm @isa("blt", RV32I, opcode=0b1100011, funct3=0b100) class InstructionBLT(InstructionBType): def execute(self, model: Model): if model.state.intreg[self.rs1].value < model.state.intreg[self.rs2].value: model.state.pc = model.state.pc + self.imm @isa("bge", RV32I, opcode=0b1100011, funct3=0b101) class InstructionBGE(InstructionBType): def execute(self, model: Model): if model.state.intreg[self.rs1].value >= model.state.intreg[self.rs2].value: model.state.pc = model.state.pc + self.imm @isa("bltu", RV32I, opcode=0b1100011, funct3=0b110) class InstructionBLTU(InstructionBType): def execute(self, model: Model): if model.state.intreg[self.rs1].unsigned() < model.state.intreg[ self.rs2].unsigned(): model.state.pc = model.state.pc + self.imm @isa("bgeu", RV32I, opcode=0b1100011, funct3=0b111) class InstructionBGEU(InstructionBType): def execute(self, model: Model): if model.state.intreg[self.rs1].unsigned() >= model.state.intreg[ self.rs2].unsigned(): model.state.pc = model.state.pc + self.imm @isa("lb", RV32I, opcode=0b0000011, funct3=0b000) class InstructionLB(InstructionILType): def execute(self, model: Model): data = model.state.memory.lb((model.state.intreg[self.rs1] + self.imm).unsigned()) if (data >> 7) & 0x1: data |= 0xFFFFFF00 model.state.intreg[self.rd] = data @isa("lh", RV32I, opcode=0b0000011, funct3=0b001) class InstructionLH(InstructionILType): def execute(self, model: Model): data = model.state.memory.lh((model.state.intreg[self.rs1] + self.imm).unsigned()) if (data >> 15) & 0x1: data |= 0xFFFF0000 model.state.intreg[self.rd] = data @isa("lw", RV32I, opcode=0b0000011, funct3=0b010) class InstructionLW(InstructionILType): def execute(self, model: Model): data = model.state.memory.lw((model.state.intreg[self.rs1] + self.imm).unsigned()) model.state.intreg[self.rd] = data @isa("lbu", RV32I, opcode=0b0000011, funct3=0b100) class InstructionLBU(InstructionILType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.memory.lb( (model.state.intreg[self.rs1] + self.imm).unsigned()) @isa("lhu", RV32I, opcode=0b0000011, funct3=0b101) class InstructionLHU(InstructionILType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.memory.lh( (model.state.intreg[self.rs1] + self.imm).unsigned()) @isa("sb", RV32I, opcode=0b0100011, funct3=0b000) class InstructionSB(InstructionSType): def execute(self, model: Model): model.state.memory.sb((model.state.intreg[self.rs1] + self.imm).unsigned(), model.state.intreg[self.rs2]) @isa("sh", RV32I, opcode=0b0100011, funct3=0b001) class InstructionSH(InstructionSType): def execute(self, model: Model): model.state.memory.sh((model.state.intreg[self.rs1] + self.imm).unsigned(), model.state.intreg[self.rs2]) @isa("sw", RV32I, opcode=0b0100011, funct3=0b010) class InstructionSW(InstructionSType): def execute(self, model: Model): model.state.memory.sw((model.state.intreg[self.rs1] + self.imm).unsigned(), model.state.intreg[self.rs2]) @isa("addi", RV32I, opcode=0b0010011, funct3=0b000) class InstructionADDI(InstructionIType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] + self.imm @isa("slti", RV32I, opcode=0b0010011, funct3=0b010) class InstructionSLTI(InstructionIType): def execute(self, model: Model): if model.state.intreg[self.rs1] < self.imm: model.state.intreg[self.rd] = 1 else: model.state.intreg[self.rd] = 0 @isa("sltiu", RV32I, opcode=0b0010011, funct3=0b011) class InstructionSLTIU(InstructionIType): def execute(self, model: Model): if model.state.intreg[self.rs1].unsigned() < int(self.imm): model.state.intreg[self.rd] = 1 else: model.state.intreg[self.rd] = 0 @isa("xori", RV32I, opcode=0b0010011, funct3=0b100) class InstructionXORI(InstructionIType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] ^ self.imm @isa("ori", RV32I, opcode=0b0010011, funct3=0b110) class InstructionORI(InstructionIType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] | self.imm @isa("andi", RV32I, opcode=0b0010011, funct3=0b111) class InstructionANDI(InstructionIType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] & self.imm @isa("slli", RV32I, opcode=0b0010011, funct3=0b001, funct7=0b0000000) class InstructionSLLI(InstructionISType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] << self.shamt @isa("srli", RV32I, opcode=0b0010011, funct3=0b101, funct7=0b0000000) class InstructionSRLI(InstructionISType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1].unsigned() >> int( self.shamt) @isa("srai", RV32I, opcode=0b0010011, funct3=0b101, funct7=0b0100000) class InstructionSRAI(InstructionISType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] >> self.shamt @isa("add", RV32I, opcode=0b0110011, funct3=0b000, funct7=0b0000000) class InstructionADD(InstructionRType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] + model.state.intreg[self.rs2] @isa("sub", RV32I, opcode=0b0110011, funct3=0b000, funct7=0b0100000) class InstructionSUB(InstructionRType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] - model.state.intreg[self.rs2] @isa("sll", RV32I, opcode=0b0110011, funct3=0b001, funct7=0b0000000) class InstructionSLL(InstructionRType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] << ( model.state.intreg[self.rs2] & 0x1f) @isa("slt", RV32I, opcode=0b0110011, funct3=0b010, funct7=0b0000000) class InstructionSLT(InstructionRType): def execute(self, model: Model): if model.state.intreg[self.rs1] < model.state.intreg[self.rs2]: model.state.intreg[self.rd] = 1 else: model.state.intreg[self.rd] = 0 @isa("sltu", RV32I, opcode=0b0110011, funct3=0b011, funct7=0b0000000) class InstructionSLTU(InstructionRType): def execute(self, state: State): if state.intreg[self.rs1].unsigned() < state.intreg[ self.rs2].unsigned(): state.intreg[self.rd] = 1 else: state.intreg[self.rd] = 0 @isa("xor", RV32I, opcode=0b0110011, funct3=0b100, funct7=0b0000000) class InstructionXOR(InstructionRType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] ^ model.state.intreg[self.rs2] @isa("srl", RV32I, opcode=0b0110011, funct3=0b101, funct7=0b0000000) class InstructionSRL(InstructionRType): def execute(self, model: Model): src = model.state.intreg[self.rs1] shift = model.state.intreg[self.rs2] & 0x1f model.state.intreg[self.rd] = src >> shift @isa("sra", RV32I, opcode=0b0110011, funct3=0b101, funct7=0b0100000) class InstructionSRA(InstructionRType): def execute(self, model: Model): usrc = model.state.intreg[self.rs1].unsigned() shift = model.state.intreg[self.rs2].unsigned() & 0x1f if usrc >> 31: to_clear = 32 - shift sign_mask = (((1 << 32) - 1) >> to_clear) << to_clear else: sign_mask = 0 model.state.intreg[self.rd] = sign_mask | (usrc >> shift) @isa("or", RV32I, opcode=0b0110011, funct3=0b110, funct7=0b0000000) class InstructionOR(InstructionRType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] | model.state.intreg[self.rs2] @isa("and", RV32I, opcode=0b0110011, funct3=0b111, funct7=0b0000000) class InstructionAND(InstructionRType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] & model.state.intreg[self.rs2] @isa("fence", RV32I, opcode=0b0001111, funct3=0b000) class InstructionFENCE(InstructionIType): isa_format_id = "FENCE" def execute(self, model: Model): pass @isa("fence.i", RV32IZifencei, opcode=0b0001111, funct3=0b001) class InstructionFENCEI(InstructionIType): def execute(self, model: Model): pass # @isa("mret", RV32I, opcode=0b1110011, funct3=0b000, imm=0b001100000010, rs1=0b00000, rd=0b00000) @isa("ecall", RV32I, opcode=0b1110011, funct3=0b000, imm=0b000000000000, rd=0b00000, rs1=0b00000) class InstructionECALL(InstructionIType): def execute(self, model: Model): model.environment.call(model.state) def __str__(self): return "ecall" @isa("uret", RV32I, opcode=0b1110011, funct3=0b000, imm=0b000000000010, rs1=0b00000, rd=0b00000) class InstructionURET(InstructionIType): """ Machine level exception return """ def execute(self, model: Model): # TODO: implement pass @isa("sret", RV32I, opcode=0b1110011, funct3=0b000, imm=0b000100000010, rs1=0b00000, rd=0b00000) class InstructionSRET(InstructionIType): """ Machine level exception return """ def execute(self, model: Model): # TODO: implement pass @isa("hret", RV32I, opcode=0b1110011, funct3=0b000, imm=0b001000000010, rs1=0b00000, rd=0b00000) class InstructionHRET(InstructionIType): """ Machine level exception return """ def execute(self, model: Model): # TODO: implement pass @isa("mret", RV32I, opcode=0b1110011, funct3=0b000, imm=0b001100000010, rs1=0b00000, rd=0b00000) class InstructionMRET(InstructionIType): """ Machine level exception return """ def execute(self, model: Model): # TODO: implement pass @isa("wfi", RV32I, opcode=0b1110011, funct3=0b000, imm=0b000100000101, rs1=0b00000, rd=0b00000) class InstructionWFI(InstructionIType): def execute(self, model: Model): pass @isa("ebreak", RV32I, opcode=0b1110011, funct3=0b000, imm=0b000000000001) class InstructionEBREAK(InstructionIType): def execute(self, model: Model): pass def __str__(self): return "ebreak" @isa("csrrw", RV32IZicsr, opcode=0b1110011, funct3=0b001) class InstructionCSRRW(CSRxInstructionType): def execute(self, model: Model): pass @isa("csrrs", RV32IZicsr, opcode=0b1110011, funct3=0b010) class InstructionCSRRS(CSRxInstructionType): def execute(self, model: Model): pass @isa("csrrc", RV32IZicsr, opcode=0b1110011, funct3=0b011) class InstructionCSRRC(CSRxInstructionType): def execute(self, model: Model): pass #@isa("csrrwi", RV32IZicsr, opcode=0b1110011, funct3=0b101) #class InstructionCSRRWI(Instruction): # def execute(self, model: Model): # pass #@isa("csrrsi", RV32IZicsr, opcode=0b1110011, funct3=0b110) #class InstructionCSRRSI(Instruction): # def execute(self, model: Model): # pass #@isa("csrrci", RV32IZicsr, opcode=0b1110011, funct3=0b111) #class InstructionCSRRCI(Instruction): # def execute(self, model: Model): # pass @isa("lwu", RV64I, opcode=0b0000011, funct3=0b110) class InstructionLWU(InstructionIType): def execute(self, model: Model): pass @isa("ld", RV64I, opcode=0b0000011, funct3=0b011) class InstructionLD(InstructionIType): def execute(self, model: Model): pass @isa("sd", RV64I, opcode=0b0100011, funct3=0b011) class InstructionSD(InstructionISType): def execute(self, model: Model): pass @isa_pseudo() class InstructionNOP(InstructionADDI): def __init__(self): super().__init__(0, 0, 0) def __str__(self): return "nop" @isa("mul", RV32IM, opcode=0b0110011, funct3=0b000, funct7=0b0000001) class InstructionMUL(InstructionRType): def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs1] * model.state.intreg[self.rs2] @isa("mulh", RV32IM, opcode=0b0110011, funct3=0b001, funct7=0b0000001) class InstructionMULH(InstructionRType): def execute(self, model: Model): # TODO: implement pass @isa("mulhsu", RV32IM, opcode=0b0110011, funct3=0b010, funct7=0b0000001) class InstructionMULHSU(InstructionRType): def execute(self, model: Model): # TODO: implement pass @isa("mulhu", RV32IM, opcode=0b0110011, funct3=0b011, funct7=0b0000001) class InstructionMULHU(InstructionRType): def execute(self, model: Model): # TODO: implement pass @isa("div", RV32IM, opcode=0b0110011, funct3=0b100, funct7=0b0000001) class InstructionDIV(InstructionRType): def execute(self, model: Model): # TODO: implement pass @isa("divu", RV32IM, opcode=0b0110011, funct3=0b101, funct7=0b0000001) class InstructionDIVU(InstructionRType): def execute(self, model: Model): # TODO: implement pass @isa("rem", RV32IM, opcode=0b0110011, funct3=0b110, funct7=0b0000001) class InstructionREM(InstructionRType): def execute(self, model: Model): # TODO: implement pass @isa("remu", RV32IM, opcode=0b0110011, funct3=0b111, funct7=0b0000001) class InstructionREMU(InstructionRType): def execute(self, model: Model): # TODO: implement pass @isa_c("c.addi", RV32IC, opcode=1, funct3=0b000) class InstructionCADDI(InstructionCIType): def expand(self): pass def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rd] + self.imm @isa_c("c.andi", RV32IC, opcode=1, funct3=0b100) class InstructionCANDI(InstructionCBType): def expand(self): pass def execute(self, model: Model): pass @isa_c("c.swsp", RV32IC, opcode=2, funct3=6) class InstructionCSWSP(InstructionCSSType): def expand(self): pass def decode(self, machinecode: int): self.rs = (machinecode >> 2) & 0x1f imm12to9 = (machinecode >> 9) & 0xf imm8to7 = (machinecode >> 7) & 0x3 self.imm.set_from_bits((imm8to7 << 4) | imm12to9) def execute(self, model: Model): pass @isa_c("c.li", RV32IC, opcode=1, funct3=2) class InstructionCLI(InstructionCIType): def expand(self): pass def execute(self, model: Model): model.state.intreg[self.rd] = self.imm @isa_c("c.mv", RV32IC, opcode=2, funct4=8) class InstructionCMV(InstructionCRType): def expand(self): pass def execute(self, model: Model): model.state.intreg[self.rd] = model.state.intreg[self.rs] @isa("lr", RV32A, opcode=0b0101111, funct5=0b00010, funct3=0b010) class InstructionLR(InstructionAMOType): """ Load reserved """ def execute(self, model: Model): # Perform a normal load data = model.state.memory.lw(model.state.intreg[self.rs1].unsigned()) model.state.intreg[self.rd] = data # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("sc", RV32A, opcode=0b0101111, funct5=0b00011, funct3=0b010) class InstructionSC(InstructionAMOType): """ Store conditional """ def execute(self, model: Model): # Check if this address is reserved if model.state.atomic_reserved(model.state.intreg[self.rs1]): model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), model.state.intreg[self.rs2] ) model.state.intreg[self.rd] = 0 else: model.state.intreg[self.rd] = 1 # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amoadd", RV32A, opcode=0b0101111, funct5=0b00000, funct3=0b010) class InstructionAMOADD(InstructionAMOType): """ Atomic add operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), (model.state.intreg[self.rs2] + model.state.intreg[self.rd]) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amoxor", RV32A, opcode=0b0101111, funct5=0b00100, funct3=0b010) class InstructionAMOXOR(InstructionAMOType): """ Atomic XOR operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), (model.state.intreg[self.rs2] ^ model.state.intreg[self.rd]) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amoor", RV32A, opcode=0b0101111, funct5=0b01000, funct3=0b010) class InstructionAMOOR(InstructionAMOType): """ Atomic OR operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), (model.state.intreg[self.rs2] | model.state.intreg[self.rd]) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amoand", RV32A, opcode=0b0101111, funct5=0b01100, funct3=0b010) class InstructionAMOAND(InstructionAMOType): """ Atomic AND operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), (model.state.intreg[self.rs2] & model.state.intreg[self.rd]) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amomin", RV32A, opcode=0b0101111, funct5=0b10000, funct3=0b010) class InstructionAMOMIN(InstructionAMOType): """ Atomic minimum operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), min(model.state.intreg[self.rs2], model.state.intreg[self.rd]) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amomax", RV32A, opcode=0b0101111, funct5=0b10100, funct3=0b010) class InstructionAMOMAX(InstructionAMOType): """ Atomic maximum operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), max(model.state.intreg[self.rs2], model.state.intreg[self.rd]) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amominu", RV32A, opcode=0b0101111, funct5=0b11000, funct3=0b010) class InstructionAMOMINU(InstructionAMOType): """ Atomic unsigned minimum operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), min( model.state.intreg[self.rs2].unsigned(), model.state.intreg[self.rd].unsigned() ) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amomaxu", RV32A, opcode=0b0101111, funct5=0b11100, funct3=0b010) class InstructionAMOMAXU(InstructionAMOType): """ Atomic unsigned maximum operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), max( model.state.intreg[self.rs2].unsigned(), model.state.intreg[self.rd].unsigned() ) ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1]) @isa("amoswap", RV32A, opcode=0b0101111, funct5=0b00001, funct3=0b010) class InstructionAMOSWAP(InstructionAMOType): """ Atomic swap operation """ def execute(self, model: Model): # This models a single HART with 1 stage pipeline, so will always succeed model.state.intreg[self.rd] = model.state.memory.lw( model.state.intreg[self.rs1].unsigned() ) model.state.memory.sw( model.state.intreg[self.rs1].unsigned(), model.state.intreg[self.rs2] ) # Perform correct lock or release actions if self.rl: model.state.atomic_release(model.state.intreg[self.rs1]) elif self.aq: model.state.atomic_acquire(model.state.intreg[self.rs1])
35.309524
110
0.671175
3,104
25,211
5.431057
0.120168
0.137027
0.147704
0.192194
0.687092
0.637027
0.591529
0.576877
0.546743
0.53138
0
0.088296
0.195431
25,211
713
111
35.359046
0.742802
0.106858
0
0.449893
0
0
0.01432
0
0
0
0.002148
0.001403
0
1
0.183369
false
0.059701
0.006397
0.006397
0.362473
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
2dc1fce6ff36eced9643eb2405ebb15d6785acf4
264
py
Python
public/cantusdata/test/views/test_browse.py
jacobsanz97/cantus
37d139ae20972c36d4abb96a2a5ac5106b0c1b47
[ "MIT" ]
12
2015-01-08T14:34:55.000Z
2021-06-03T06:53:04.000Z
public/cantusdata/test/views/test_browse.py
jacobsanz97/cantus
37d139ae20972c36d4abb96a2a5ac5106b0c1b47
[ "MIT" ]
303
2015-01-14T17:10:32.000Z
2022-02-14T20:27:21.000Z
public/cantusdata/test/views/test_browse.py
jacobsanz97/cantus
37d139ae20972c36d4abb96a2a5ac5106b0c1b47
[ "MIT" ]
2
2019-10-07T21:21:27.000Z
2019-10-20T16:58:22.000Z
from rest_framework.test import APITestCase from rest_framework import status class BrowseViewTestCase(APITestCase): def test_get_browse(self): response = self.client.get("/browse/") self.assertEqual(response.status_code, status.HTTP_200_OK)
29.333333
66
0.768939
33
264
5.939394
0.606061
0.081633
0.173469
0
0
0
0
0
0
0
0
0.013333
0.147727
264
8
67
33
0.857778
0
0
0
0
0
0.030303
0
0
0
0
0
0.166667
1
0.166667
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
2dc858685507f59151c55418ca42c6818c4fab18
282
py
Python
model/resnet44.py
lorenz0890/pytorch-admm-pruning
85f15d86e6d9037fe4016ebcd435065ecba823b5
[ "BSD-3-Clause" ]
null
null
null
model/resnet44.py
lorenz0890/pytorch-admm-pruning
85f15d86e6d9037fe4016ebcd435065ecba823b5
[ "BSD-3-Clause" ]
null
null
null
model/resnet44.py
lorenz0890/pytorch-admm-pruning
85f15d86e6d9037fe4016ebcd435065ecba823b5
[ "BSD-3-Clause" ]
null
null
null
import torch import torchvision.models import torch.nn.functional as F import torch.nn as nn from .resnet import resnet44 def ResNet44(num_classes=10): model = nn.Sequential( resnet44(num_classes=num_classes), nn.LogSoftmax(1) ) return model
23.5
46
0.695035
38
282
5.078947
0.526316
0.170984
0.134715
0
0
0
0
0
0
0
0
0.041667
0.234043
282
12
47
23.5
0.851852
0
0
0
0
0
0
0
0
0
0
0
0
1
0.090909
false
0
0.454545
0
0.636364
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
2dcf8bc8a5a0492476ff8cc6f512e7c828b813d7
414
py
Python
index/models.py
brahici/WBO
8781512b2ae3e308e2fe9310935a0f5ce4e30a29
[ "BSD-3-Clause" ]
null
null
null
index/models.py
brahici/WBO
8781512b2ae3e308e2fe9310935a0f5ce4e30a29
[ "BSD-3-Clause" ]
null
null
null
index/models.py
brahici/WBO
8781512b2ae3e308e2fe9310935a0f5ce4e30a29
[ "BSD-3-Clause" ]
null
null
null
from django.db import models class Menu(models.Model): label = models.CharField(max_length=64) url = models.CharField(max_length=254) sequence = models.IntegerField() visible = models.BooleanField(default=False) def __unicode__(self): return self.label class Meta: ordering = ('sequence',) verbose_name = 'index menu' verbose_name_plural = 'index menus'
24.352941
48
0.671498
48
414
5.604167
0.666667
0.111524
0.133829
0.178439
0
0
0
0
0
0
0
0.015674
0.229469
414
16
49
25.875
0.827586
0
0
0
0
0
0.070218
0
0
0
0
0
0
1
0.083333
false
0
0.083333
0.083333
0.75
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
2dd26c6204a033b6f1e83b70cb2fbe90ad12917d
3,875
py
Python
examples/libtest/DatetimeModuleTest.py
takipsizad/pyjs
54db0ba6747aca744f9f3c3e985a17e913dfb951
[ "ECL-2.0", "Apache-2.0" ]
739
2015-01-01T02:05:11.000Z
2022-03-30T15:26:16.000Z
examples/libtest/DatetimeModuleTest.py
takipsizad/pyjs
54db0ba6747aca744f9f3c3e985a17e913dfb951
[ "ECL-2.0", "Apache-2.0" ]
33
2015-03-25T23:17:04.000Z
2021-08-19T08:25:22.000Z
examples/libtest/DatetimeModuleTest.py
takipsizad/pyjs
54db0ba6747aca744f9f3c3e985a17e913dfb951
[ "ECL-2.0", "Apache-2.0" ]
167
2015-01-01T22:27:47.000Z
2022-03-17T13:29:19.000Z
# Testing datetime module import sys import UnitTest import datetime class DatetimeModuleTest(UnitTest.UnitTest): def testDate(self): d = datetime.date(2010, 4, 9) self.assertEqual(d.year, 2010) self.assertEqual(d.month, 4) self.assertEqual(d.day, 9) self.assertEqual(d.weekday(), 4) def testTime(self): t = datetime.time(9, 45, 11, 95000) self.assertEqual(t.hour, 9) self.assertEqual(t.minute, 45) self.assertEqual(t.second, 11) self.assertEqual(t.microsecond, 95000) def testTimestamp(self): d = datetime.date.fromtimestamp(1270804609) self.assertEqual(str(d), '2010-04-09') dt = str(datetime.datetime.fromtimestamp(1270804609.95)) # CET: 2010-04-09 11:16:49.950000 self.assertEqual( (dt[:11], dt[16:]), ("2010-04-09 ", ":49.950000"), ) def testCtime(self): d = datetime.date(2010, 4, 9) self.assertEqual(d.ctime(), "Fri Apr 9 00:00:00 2010") dt = datetime.datetime(2010, 4, 9, 10, 57, 32) self.assertEqual(dt.ctime(), "Fri Apr 9 10:57:32 2010") def testIsoCalendar(self): d = datetime.date(2010, 4, 9) self.assertEqual(d.isocalendar(), (2010, 14, 5)) d1 = datetime.date(2007, 12, 31) self.assertEqual(d1.isocalendar(), (2008, 1, 1)) def testIsoFormat(self): d = datetime.date(2010, 4, 9) self.assertEqual(d.isoformat(), '2010-04-09') dt = datetime.datetime(2010, 4, 9, 10, 57, 32) self.assertEqual(dt.isoformat(), '2010-04-09T10:57:32') dt2 = datetime.datetime(2010, 4, 9, 10, 57, 32, 95000) self.assertEqual(dt2.isoformat(), '2010-04-09T10:57:32.095000') def testOrdinal(self): d = datetime.date.fromordinal(1) self.assertEqual(str(d), '0001-01-01') d1 = datetime.date.fromordinal(733871) self.assertEqual(str(d1), '2010-04-09') self.assertEqual(d1.toordinal(), 733871) def testReplace(self): d = datetime.date(2010, 4, 9).replace(month=6, day=13) self.assertEqual(str(d), '2010-06-13') t = datetime.time(23, 59, 59).replace(minute=45, microsecond=95000) self.assertEqual(str(t), '23:45:59.095000') dt = datetime.datetime(2010, 4, 9, 10, 57, 32).replace(month=6, day=13, hour=12, minute=0, second=0) self.assertEqual(str(dt), '2010-06-13 12:00:00') def testTimetuple(self): tm = datetime.date(2010, 4, 9).timetuple() self.assertEqual(tm.tm_year, 2010) self.assertEqual(tm.tm_mon, 4) self.assertEqual(tm.tm_mday, 9) self.assertEqual(tm.tm_hour, 0) self.assertEqual(tm.tm_min, 0) self.assertEqual(tm.tm_sec, 0) self.assertEqual(tm.tm_wday, 4) self.assertEqual(tm.tm_yday, 99) def testStrftime(self): d = datetime.date(2010, 4, 9) self.assertEqual(d.strftime("%d/%m/%y"), "09/04/10") def testStrptime(self): d = datetime.datetime.strptime("010100 1234", "%d%m%y %H%M") self.assertEqual(str(d), '2000-01-01 12:34:00') def testComparision(self): d1 = datetime.date(2010, 6, 8) d2 = datetime.date(2010, 6, 8) d3 = datetime.date(2010, 4, 9) self.assertTrue(d1 == d2, "d1 and d2 differ") self.assertTrue(d1 > d3, "d1 is not later than d3") self.assertTrue(d3 < d1, "d3 is not earlier than d1") def testOperations(self): d1 = datetime.date(2010, 4, 9) d2 = datetime.date(2010, 6, 13) diff = d2 - d1 self.assertEqual(diff.days, 65) self.assertEqual(str(d1 + diff), "2010-06-13") self.assertEqual(str(d1 - diff), "2010-02-03") if __name__ == '__main__': from RunTests import RunTests t = RunTests() t.add(DatetimeModuleTest) t.start_test()
34.598214
108
0.599226
542
3,875
4.252768
0.234317
0.234273
0.033839
0.066377
0.339696
0.215184
0.160521
0.150542
0.138395
0.12538
0
0.165068
0.246452
3,875
111
109
34.90991
0.624315
0.014194
0
0.078652
0
0
0.090933
0.006813
0
0
0
0
0.438202
1
0.146067
false
0
0.044944
0
0.202247
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
2
2dd503e2807b79a835cbe6e07372b54f279a4edd
3,828
py
Python
app/genre/datastore.py
datphan/moviecrab
e3bcff700b994388f1ded68d268a960b10d57a81
[ "BSD-3-Clause" ]
null
null
null
app/genre/datastore.py
datphan/moviecrab
e3bcff700b994388f1ded68d268a960b10d57a81
[ "BSD-3-Clause" ]
null
null
null
app/genre/datastore.py
datphan/moviecrab
e3bcff700b994388f1ded68d268a960b10d57a81
[ "BSD-3-Clause" ]
null
null
null
from abc import abstractmethod, ABCMeta from datetime import datetime from flask_security.utils import encrypt_password from ..datastore import SQLAlchemyDatastore from ..utils import fix_docs class GenreDatastore(object): """Abstract GenreDatastore class. .. versionadded:: 0.1.0 """ __metaclass__ = ABCMeta # genres @abstractmethod def find_genre_list(self, q=None, filters=None, sort=None, offset=None, limit=None, **kwargs): """Find all existing genre from the datastore by optional query and options. .. versionadded:: 0.1.0 :param q: the optional query as a string which is provided by the current genre, default is None. :param filters: the filters list of directory item with keys: (key, op, value) :param sort: sorting string (sort='+a,-b,c') :param offset: offset (integer positive) :param limit: limit (integer positive) :param kwargs: the additional keyword arguments containing filter dict {key:value,} :return the query """ pass @abstractmethod def create_genre(self, **kwargs): """Creates a new genre associated with the current genre then save it to the database. .. versionadded:: 0.1.0 :param kwargs: the optional kwargs :return the created genre """ pass @abstractmethod def read_genre(self, pid, **kwargs): """Reads an existing genre associated with the current genre by its primary id from the database. .. versionadded:: 0.1.0 :param pid: primary id of an genre. :param kwargs: the optional kwargs. :return the found genre """ pass @abstractmethod def update_genre(self, pid, **kwargs): """Updates an existing genre associated with the current genre by its primary id from the database. .. versionadded:: 0.1.0 :param pid: primary id of an genre. :param kwargs: the optional kwargs. :return the updated genre """ pass @abstractmethod def delete_genre(self, pid, **kwargs): """Deletes a existing genre associated with the current genre by its primary id from the database. .. versionadded:: 0.1.0 :param pid: primary id of an genre. :param kwargs: the optional kwargs """ pass @fix_docs class SQLAlchemyGenreDatastore(SQLAlchemyDatastore, GenreDatastore): """ Implementation for GenreDatastore with SQLAlchemy """ # User def find_genre_list(self, q=None, filters=None, **kwargs): accepted_filter_keys = ('email', 'active') kwargs.update({ 'q': q, 'filters': filters, 'accepted_filter_keys': accepted_filter_keys }) return self.find_by_model_name('genre', **kwargs) def create_genre(self, **kwargs): accepted_keys = ('email', 'password', 'active', 'confirmed_at') kwargs['password'] = encrypt_password(kwargs['password']) # TODO(hoatle): implement verification by signals kwargs['active'] = True kwargs['confirmed_at'] = datetime.utcnow() genre = self.create_by_model_name('genre', accepted_keys, **kwargs) genre.roles.append(self.find_roles(name='genre').first()) self.commit() return genre def read_genre(self, pid, **kwargs): return self.read_by_model_name('genre', pid, **kwargs) def update_genre(self, pid, **kwargs): return self.update_by_model_name('genre', pid, **kwargs) def delete_genre(self, pid, **kwargs): self.delete_by_model_name('genre', pid, **kwargs) def filter_by(self, **kwargs): return self.filter_by_model_name('genre', **kwargs)
29.446154
98
0.631139
457
3,828
5.175055
0.262582
0.034249
0.035518
0.038055
0.413953
0.366596
0.279493
0.217336
0.217336
0.186892
0
0.006424
0.268025
3,828
129
99
29.674419
0.837616
0.388976
0
0.367347
0
0
0.068846
0
0
0
0
0.007752
0
1
0.22449
false
0.163265
0.102041
0.061224
0.489796
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
2df31a29b7b8de55930b8e265b17dab114002b6a
610
py
Python
stock_activities/migrations/0008_auto_20210524_0215.py
ericpesto/Archeon-Django-REST-API
e02b871b95c5247d83580acfe25f6ec299fdb9b1
[ "MIT" ]
1
2021-06-07T17:31:23.000Z
2021-06-07T17:31:23.000Z
stock_activities/migrations/0008_auto_20210524_0215.py
ericpesto/Archeon-Django-REST-API
e02b871b95c5247d83580acfe25f6ec299fdb9b1
[ "MIT" ]
null
null
null
stock_activities/migrations/0008_auto_20210524_0215.py
ericpesto/Archeon-Django-REST-API
e02b871b95c5247d83580acfe25f6ec299fdb9b1
[ "MIT" ]
null
null
null
# Generated by Django 3.2.3 on 2021-05-24 02:15 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stock_activities', '0007_auto_20210522_1833'), ] operations = [ migrations.AlterField( model_name='stockactivity', name='activity_type_id', field=models.IntegerField(blank=True, null=True), ), migrations.AlterField( model_name='stockactivity', name='stock_code', field=models.CharField(blank=True, max_length=20, null=True), ), ]
25.416667
73
0.608197
64
610
5.640625
0.671875
0.110803
0.138504
0.160665
0.254848
0.254848
0
0
0
0
0
0.075171
0.280328
610
23
74
26.521739
0.747153
0.07377
0
0.352941
1
0
0.161634
0.040853
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
2df5518865fd89345cea0e35c89a91387c0ff281
357
py
Python
src/jbdl/rbdl/math/cross_force_space.py
yz-mao/jbdl
a5380233b3795c8aaa9acd9e5c07fa44f8a5dadb
[ "MIT" ]
21
2021-08-29T06:59:18.000Z
2022-01-13T22:53:02.000Z
src/jbdl/rbdl/math/cross_force_space.py
yz-mao/jbdl
a5380233b3795c8aaa9acd9e5c07fa44f8a5dadb
[ "MIT" ]
2
2021-08-31T08:34:09.000Z
2021-09-06T07:40:51.000Z
src/jbdl/rbdl/math/cross_force_space.py
yz-mao/jbdl
a5380233b3795c8aaa9acd9e5c07fa44f8a5dadb
[ "MIT" ]
4
2021-08-29T06:59:22.000Z
2021-10-04T05:59:41.000Z
import jax.numpy as jnp from jax.api import jit from jbdl.rbdl.math import cross_motion_space @jit def cross_force_space(v): vcross = -jnp.transpose(cross_motion_space(v)) return vcross if __name__ == "__main__": from jax import make_jaxpr a = jnp.ones((6, 1)) print(make_jaxpr(cross_force_space)(a)) print(cross_force_space(a))
21
50
0.722689
58
357
4.103448
0.517241
0.12605
0.189076
0.134454
0
0
0
0
0
0
0
0.00678
0.173669
357
16
51
22.3125
0.8
0
0
0
0
0
0.022409
0
0
0
0
0
0
1
0.083333
false
0
0.333333
0
0.5
0.166667
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
2df97e060f8e316c4080d64e9a478c6ed11edf50
2,423
py
Python
opbasm/color.py
1Maxnet1/opbasm
bef9e446f089a6bc6cfc21f6c8e799010572daf5
[ "MIT" ]
50
2015-06-02T11:32:11.000Z
2022-03-28T19:12:00.000Z
opbasm/color.py
1Maxnet1/opbasm
bef9e446f089a6bc6cfc21f6c8e799010572daf5
[ "MIT" ]
22
2015-06-15T15:21:45.000Z
2022-01-19T09:18:00.000Z
opbasm/color.py
1Maxnet1/opbasm
bef9e446f089a6bc6cfc21f6c8e799010572daf5
[ "MIT" ]
13
2015-06-02T11:51:03.000Z
2022-01-19T10:16:24.000Z
#!/usr/bin/python # -*- coding: utf-8 -*- '''Color formatting ''' # Copyright © 2014 Kevin Thibedeau # (kevin 'period' thibedeau 'at' gmail 'punto' com) # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from __future__ import print_function, division try: import colorama colorama.init() from colorama import Fore, Back, Style except ImportError: def note(t): return t def success(t): return t def warn(t): return t def error(t): return t else: import os _no_color = os.getenv('NO_COLOR', 'false') _no_color = True if _no_color.lower() in ['1', 'true', 't', 'y', 'yes'] else False def stdout_redirected(): return os.fstat(0) != os.fstat(1) _redir_stdout = stdout_redirected() def colorize(t, code): if _no_color or _redir_stdout: return t return ''.join([code, t, Style.RESET_ALL]) def note(t): return colorize(t, Fore.MAGENTA) def success(t): return colorize(t, Fore.GREEN) def warn(t): return colorize(t, Fore.YELLOW + Style.BRIGHT) def error(t): return colorize(t, Fore.RED + Style.BRIGHT) if __name__ == '__main__': print('Colorized text:\n') print('note("foobar") : ' + note('foobar')) print('success("foobar") : ' + success('foobar')) print('warn("foobar") : ' + warn('foobar')) print('error("foobar") : ' + error('foobar'))
29.91358
86
0.680974
343
2,423
4.728863
0.460641
0.038841
0.019729
0.039457
0.049322
0
0
0
0
0
0
0.004188
0.211721
2,423
80
87
30.2875
0.844503
0.478333
0
0
0
0
0.123077
0
0
0
0
0
0
1
0.285714
false
0
0.142857
0.257143
0.628571
0.171429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
2dfce4eeb0b1fe6364c68900f08cc196107998fc
1,487
py
Python
osf/models/rdm_announcement.py
yuanyuan-deng/RDM-osf.io
e1c54e97c898d26406d71129db7e4baf82802224
[ "Apache-2.0" ]
null
null
null
osf/models/rdm_announcement.py
yuanyuan-deng/RDM-osf.io
e1c54e97c898d26406d71129db7e4baf82802224
[ "Apache-2.0" ]
8
2018-11-09T05:57:09.000Z
2019-07-25T10:27:55.000Z
osf/models/rdm_announcement.py
yuanyuan-deng/RDM-osf.io
e1c54e97c898d26406d71129db7e4baf82802224
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from django.db import models from osf.models.base import BaseModel from osf.utils.fields import NonNaiveDateTimeField, EncryptedTextField class RdmAnnouncement(BaseModel): user = models.ForeignKey('OSFUser', null=True) title = models.CharField(max_length=256, blank=True, null=False) body = models.TextField(max_length=63206, null=False) announcement_type = models.CharField(max_length=256, null=False) date_sent = NonNaiveDateTimeField(auto_now_add=True) is_success = models.BooleanField(default=False) class RdmAnnouncementOption(BaseModel): user = models.ForeignKey('OSFUser', null=True) twitter_api_key = EncryptedTextField(blank=True, null=True) twitter_api_secret = EncryptedTextField(blank=True, null=True) twitter_access_token = EncryptedTextField(blank=True, null=True) twitter_access_token_secret = EncryptedTextField(blank=True, null=True) facebook_api_key = EncryptedTextField(blank=True, null=True) facebook_api_secret = EncryptedTextField(blank=True, null=True) facebook_access_token = EncryptedTextField(blank=True, null=True) redmine_api_url = EncryptedTextField(blank=True, null=True) redmine_api_key = EncryptedTextField(blank=True, null=True) class RdmFcmDevice(BaseModel): user = models.ForeignKey('OSFUser', null=True) device_token = EncryptedTextField(blank=True, null=True) date_created = NonNaiveDateTimeField(auto_now_add=True)
47.967742
76
0.763282
175
1,487
6.308571
0.32
0.094203
0.129529
0.280797
0.677536
0.565217
0.522645
0.096014
0
0
0
0.009382
0.139879
1,487
30
77
49.566667
0.853792
0.014122
0
0.12
0
0
0.014644
0
0
0
0
0
0
1
0
false
0
0.12
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
93073cad5cf341a185e1aab28773942026aba472
1,523
py
Python
custom_tasks/other/python/round_predictions/custom.py
geomex/custom-models
899292552d97c2132306c2e1e372d67ffb9c1a50
[ "Apache-2.0" ]
10
2021-07-18T20:13:21.000Z
2022-03-16T08:20:15.000Z
custom_tasks/other/python/round_predictions/custom.py
geomex/custom-models
899292552d97c2132306c2e1e372d67ffb9c1a50
[ "Apache-2.0" ]
6
2021-08-19T22:25:43.000Z
2022-02-04T15:48:36.000Z
custom_tasks/other/python/round_predictions/custom.py
geomex/custom-models
899292552d97c2132306c2e1e372d67ffb9c1a50
[ "Apache-2.0" ]
31
2021-06-30T18:14:55.000Z
2022-03-26T05:30:55.000Z
from typing import List, Optional import pickle import pandas as pd import numpy as np from pathlib import Path from sklearn.pipeline import Pipeline def fit( X: pd.DataFrame, y: pd.Series, output_dir: str, class_order: Optional[List[str]] = None, row_weights: Optional[np.ndarray] = None, **kwargs, ) -> None: estimator = pipeline(X) estimator.fit(X, y) output_dir_path = Path(output_dir) if output_dir_path.exists() and output_dir_path.is_dir(): with open("{}/artifact.pkl".format(output_dir), "wb") as fp: pickle.dump(estimator, fp) class RoundInput(): """ Goal is to round the output of a prior model, so using those unrounded predictions as inputs here. """ def __init__(self, X): self.X = X def fit(self, X, y=None, **kwargs): self.X = round(X) return self def transform(self, X): return np.array(round(X[X.columns[0]])).reshape(-1, 1) class EmptyEstimator(): """ This is empty because the rounding is done in the above step of the pipeline. Still need this for the pipeline to run though. """ def fit(self, X, y): return self def predict(self, data: pd.DataFrame): return data[:,0] def pipeline(X): return Pipeline(steps=[("preprocessing", RoundInput(X)), ("model", EmptyEstimator())]) def score(data: pd.DataFrame, model, **kwargs) -> pd.DataFrame: return pd.DataFrame(data=model.predict(data), columns = ['Predictions'])
24.174603
90
0.640184
213
1,523
4.502347
0.413146
0.056309
0.040667
0.022941
0.025026
0
0
0
0
0
0
0.003433
0.235062
1,523
62
91
24.564516
0.819742
0.147078
0
0.054054
0
0
0.036508
0
0
0
0
0
0
1
0.216216
false
0
0.162162
0.135135
0.594595
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
930e45f4d02f8de9aa6c0f3216f36d0e1467b66d
101
py
Python
nideep/eval/eval_utils.py
kashefy/caffe_sandbox
31afc409df14fece0ac21707185e586dd2d625a9
[ "BSD-2-Clause" ]
15
2015-08-26T21:15:15.000Z
2016-03-10T06:25:08.000Z
nideep/eval/eval_utils.py
nigroup/nideep
31afc409df14fece0ac21707185e586dd2d625a9
[ "BSD-2-Clause" ]
35
2016-05-24T13:57:01.000Z
2018-03-07T18:43:07.000Z
nideep/eval/eval_utils.py
nigroup/nideep
31afc409df14fece0ac21707185e586dd2d625a9
[ "BSD-2-Clause" ]
11
2016-05-24T13:42:55.000Z
2019-10-04T16:20:54.000Z
''' Created on Mar 1, 2016 @author: kashefy ''' class Phase: TRAIN = 'Train' TEST = 'Test'
10.1
22
0.574257
13
101
4.461538
0.846154
0
0
0
0
0
0
0
0
0
0
0.067568
0.267327
101
9
23
11.222222
0.716216
0.39604
0
0
0
0
0.169811
0
0
0
0
0
0
1
0
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
93126857683317270dcf8dc5f7d2a82755e17de6
2,144
py
Python
App/migrations/0007_categories_test_data.py
avivz450/A-GBusinessesPromotions
0dd6e678af5a95dd0246fd1d448c099d86774263
[ "MIT" ]
1
2021-08-18T22:23:57.000Z
2021-08-18T22:23:57.000Z
App/migrations/0007_categories_test_data.py
avivz450/A-GBusinessesPromotions
0dd6e678af5a95dd0246fd1d448c099d86774263
[ "MIT" ]
72
2021-05-22T18:04:54.000Z
2021-09-18T16:32:15.000Z
App/migrations/0007_categories_test_data.py
avivz450/A-GBusinessesPromotions
0dd6e678af5a95dd0246fd1d448c099d86774263
[ "MIT" ]
null
null
null
from django.db import migrations, transaction class Migration(migrations.Migration): dependencies = [ ("App", "‏‏0006_sales_test_data"), ] def generate_data(apps, schema_editor): from App.models import Website, Business_Category categories_test_data = [ ( 1, "store", ), ( 2, "vegan", ), ( 2, "food", ), ( 2, "drinks", ), ( 2, "store", ), ( 3, "food", ), ( 3, "drinks", ), ( 3, "store", ), ( 4, "store", ), ( 5, "food", ), ( 5, "store", ), ( 6, "store", ), ( 6, "food", ), ( 7, "store", ), ( 7, "food", ), ( 8, "store", ), ( 9, "store", ), ( 9, "food", ), ( 9, "drinks", ), ] with transaction.atomic(): for ( website_id, category_name, ) in categories_test_data: business_Category = Business_Category( website=Website.objects.filter(pk=website_id)[0], category_name=category_name, ) business_Category.save() operations = [ migrations.RunPython(generate_data), ]
20.419048
69
0.254664
107
2,144
4.943925
0.476636
0.120983
0.068053
0
0
0
0
0
0
0
0
0.032432
0.654851
2,144
104
70
20.615385
0.67973
0
0
0.540816
1
0
0.054571
0.010261
0
0
0
0
0
1
0.010204
false
0
0.020408
0
0.061224
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
93269f941a904401bd0f2abb5f4c334094fdc6b5
173
py
Python
pokemon/objects/pokemon.py
nanato12/pokemon-stats
0701a2fc720d41ef9294e674cdf8917b3b996379
[ "Apache-2.0" ]
null
null
null
pokemon/objects/pokemon.py
nanato12/pokemon-stats
0701a2fc720d41ef9294e674cdf8917b3b996379
[ "Apache-2.0" ]
null
null
null
pokemon/objects/pokemon.py
nanato12/pokemon-stats
0701a2fc720d41ef9294e674cdf8917b3b996379
[ "Apache-2.0" ]
null
null
null
from dataclasses import dataclass @dataclass class Pokemon: number: int name: str H: int A: int B: int C: int D: int S: int total: int
11.533333
33
0.572254
25
173
3.96
0.68
0
0
0
0
0
0
0
0
0
0
0
0.364162
173
14
34
12.357143
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.083333
0
0.916667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
933bb32a0d7e5f70b1f951b7b455900ce0857405
4,148
py
Python
upass.py
yoshiumw/upass
e8847661ec4f7401564240ff3dff9c744e693f1b
[ "MIT" ]
null
null
null
upass.py
yoshiumw/upass
e8847661ec4f7401564240ff3dff9c744e693f1b
[ "MIT" ]
null
null
null
upass.py
yoshiumw/upass
e8847661ec4f7401564240ff3dff9c744e693f1b
[ "MIT" ]
null
null
null
"""U-Pass Auto Completer (upass.py) This script allows university students in Vancouver to renew their U-Pass automatically. This tool requires an additional Python script (init.py) that creates a textfile that this script reads. This script requires that 'selenium' be installed within the Python environment you are running this script to. """ from selenium import webdriver from selenium.webdriver.support.select import Select from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as EC from selenium.common import exceptions import time import sys #Tries to open info.txt, if doesn't exist throws error. try: f = open("info.txt", "r") school = int(f.readline()) email = f.readline() pw = f.readline() except: print("Please run init.py before this script.") sys.exit() #Only chrome right now #TODO: Other browser support. driver = webdriver.Chrome("drivers\chromedriver.exe") wait = WebDriverWait(driver, 10) driver.set_page_load_timeout(10) driver.get("https://upassbc.translink.ca/") #U-Pass initial page select_school = Select( driver.find_element_by_id("PsiId") ) select_school.select_by_index(school) driver.find_element_by_id("goButton").click() #School portal page, depends on what user put in text file. if (school == 9 or school == 2 or school == 5): #sfu, kpu, bcit user_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("username")) password_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("password")) submit_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_name("submit")) user_element.send_keys(email) password_element.send_keys(pw) try: submit_element.click() except exceptions.StaleElementReferenceException: pass elif (school == 4 or school == 7): #ubc, ecarr user_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("username")) password_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("password")) submit_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_name("_eventId_proceed")) user_element.send_keys(email) password_element.send_keys(pw) try: submit_element.click() except exceptions.StaleElementReferenceException: pass elif (school == 1 or school == 3, school == 10): #douglas, nicola, vcc user_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("ctl00_ContentPlaceHolder1_UsernameTextBox")) password_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("ctl00_ContentPlaceHolder1_PasswordTextBox")) submit_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_name("ctl00$ContentPlaceHolder1$SubmitButton")) user_element.send_keys(email) password_element.send_keys(pw) try: submit_element.click() except exceptions.StaleElementReferenceException: pass elif (school == 6 or school == 8): #capu, langara user_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("userNameInput")) password_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_id("passwordInput")) submit_element = WebDriverWait(driver, 10).until(lambda driver: driver.find_element_by_name("submitButton")) user_element.send_keys(email) password_element.send_keys(pw) try: submit_element.click() except exceptions.StaleElementReferenceException: pass #clicks everything on the page that has tag input but works... checkboxes = wait.until(lambda driver: driver.find_elements_by_tag_name("input")) for cb in checkboxes: try: cb.click() except exceptions.ElementNotVisibleException: print("element not interactable") pass time.sleep(4) driver.quit()
41.48
143
0.735776
530
4,148
5.584906
0.320755
0.050676
0.080405
0.089865
0.525338
0.502027
0.502027
0.502027
0.502027
0.502027
0
0.014178
0.166827
4,148
99
144
41.89899
0.842303
0.154291
0
0.430556
0
0
0.104363
0.042453
0
0
0
0.010101
0
1
0
false
0.194444
0.111111
0
0.111111
0.027778
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
0
0
0
0
0
2
9342126eed723bd86d6cab909dad9bf0e956e23c
4,092
py
Python
printers.py
falouu/el4000
2ec0472ef259519173bc755765abb5a0199e9978
[ "MIT" ]
null
null
null
printers.py
falouu/el4000
2ec0472ef259519173bc755765abb5a0199e9978
[ "MIT" ]
null
null
null
printers.py
falouu/el4000
2ec0472ef259519173bc755765abb5a0199e9978
[ "MIT" ]
null
null
null
#!/usr/bin/env python # Output formatters (for printing info and data files) # # Copyright (C) 2014 Peter Wu <peter@lekensteyn.nl> import math from defs import info, data # Python 2.7 compatibility if b'' == '': import functools, itertools iterbytes = functools.partial(itertools.imap, ord) else: iterbytes = iter class BasePrinter(object): """Prints the info, data header or data in verbose form.""" def __init__(self, filename): pass def print_info(self, t): print_namedtuple(t, info) def print_data_header(self, t): print_namedtuple(t, data) def print_data(self, t, date): print_namedtuple(t, data) class RawPrinter(BasePrinter): """Prints raw bytes in hex form, possibly with headers.""" def print_data(self, t, date): # Convert interpreted numbers back to bytes... all_bs = [data.pack_as_bytes(name, getattr(t, name)) for name in data.names] # Convert bytes to hex and print them print(date + ' ' + ' '.join( ''.join('{0:02x}'.format(b) for b in iterbytes(bs)) for bs in all_bs)) class CSVPrinter(BasePrinter): """Prints data separated by a semicolon.""" def __init__(self, filename, separator=','): self.separator = separator self.printed_header = False def print_data_header(self, t): pass def print_data(self, t, date): if not self.printed_header: print(self.separator.join(["timestamp"] + data.names)) self.printed_header = True print('{1}{0}{2:5.1f}{0}{3:5.3f}{0}{4:5.3f}' .format(self.separator, date, *t)) class EffectivePowerPrinter(BasePrinter): """ Prints the effective power in Watt, computed from voltage, current and the power factor. """ def __init__(self, filename, separator=','): self.separator = separator def print_data_header(self, t): pass def print_data(self, t, date): effective_power = t.voltage * t.current * t.power_factor print('{1}{0}{2:.1f}'.format(self.separator, date, effective_power)) class ApparentPowerPrinter(BasePrinter): """Prints the calculated apparent power in VA.""" def __init__(self, filename, separator=','): self.separator = separator def print_data_header(self, t): pass def print_data(self, t, date): apparent_power = t.voltage * t.current print('{1}{0}{2:.1f}'.format(self.separator, date, apparent_power)) class MemoryPrinter(BasePrinter): def __init__(self): self.info = [] self.data = [] pass def print_info(self, t): for n, v in zip(t._fields, t): # Print literals in displayable characters if isinstance(v, bytes): v = repr(v) self.info += [{ "key": n, "val": info.unitify(n, v) }] def print_data_header(self, t): pass def print_data(self, t, date): # data_file_index = len(self.data) - 1 # if data_file_index < 0: # raise Exception("data_file_index < 0. print_data_header not called?") apparent_power = t.voltage * t.current effective_power = t.voltage * t.current * t.power_factor self.data.append({ "date": date, "voltage": t.voltage, "current": t.current, "power_factor": t.power_factor, "apparent_power": apparent_power, "effective_power": effective_power }) def round_up(n, multiple): return int(math.ceil(1.0 * n / multiple) * multiple) def print_namedtuple(t, formatter): # Align at columns of four chars with at least two spaces as separator name_width = round_up(max(len(name) for name in t._fields) + 2, 4) format = '{0:' + str(name_width) + '}{1}' for n, v in zip(t._fields, t): # Print literals in displayable characters if isinstance(v, bytes): v = repr(v) print(format.format(n, formatter.unitify(n, v)))
31.96875
83
0.604106
531
4,092
4.508475
0.276836
0.046784
0.055138
0.0401
0.352966
0.339599
0.270677
0.270677
0.249791
0.187135
0
0.013118
0.27346
4,092
127
84
32.220472
0.792129
0.193793
0
0.430233
0
0.011628
0.047751
0.011091
0
0
0
0
0
1
0.232558
false
0.069767
0.034884
0.011628
0.348837
0.302326
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
93443bea2d8f28d75d196bc6e1f9586cfbcf32af
1,295
py
Python
mayan/apps/authentication/urls.py
eshbeata/open-paperless
6b9ed1f21908116ad2795b3785b2dbd66713d66e
[ "Apache-2.0" ]
2,743
2017-12-18T07:12:30.000Z
2022-03-27T17:21:25.000Z
mayan/apps/authentication/urls.py
eshbeata/open-paperless
6b9ed1f21908116ad2795b3785b2dbd66713d66e
[ "Apache-2.0" ]
15
2017-12-18T14:58:07.000Z
2021-03-01T20:05:05.000Z
mayan/apps/authentication/urls.py
eshbeata/open-paperless
6b9ed1f21908116ad2795b3785b2dbd66713d66e
[ "Apache-2.0" ]
257
2017-12-18T03:12:58.000Z
2022-03-25T08:59:10.000Z
from __future__ import unicode_literals from django.conf import settings from django.conf.urls import url from django.contrib.auth.views import logout from .views import ( login_view, password_change_done, password_change_view, password_reset_complete_view, password_reset_confirm_view, password_reset_done_view, password_reset_view ) urlpatterns = [ url(r'^login/$', login_view, name='login_view'), url( r'^password/change/done/$', password_change_done, name='password_change_done' ), url( r'^password/change/$', password_change_view, name='password_change_view' ), url( r'^logout/$', logout, {'next_page': settings.LOGIN_REDIRECT_URL}, name='logout_view' ), url( r'^password/reset/$', password_reset_view, name='password_reset_view' ), url( r'^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', password_reset_confirm_view, name='password_reset_confirm_view' ), url( r'^password/reset/complete/$', password_reset_complete_view, name='password_reset_complete_view' ), url( r'^password/reset/done/$', password_reset_done_view, name='password_reset_done_view' ), ]
29.431818
112
0.669498
167
1,295
4.844311
0.227545
0.257108
0.059333
0.098888
0.200247
0
0
0
0
0
0
0.013423
0.194595
1,295
43
113
30.116279
0.762224
0
0
0.358974
0
0.025641
0.301931
0.19305
0
0
0
0
0
1
0
false
0.358974
0.128205
0
0.128205
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
9364a51bd8ed01cbafc6bda7f669e3128903d538
1,631
py
Python
latestos/client/base.py
ProfessorManhattan/latestos
1ca0636aaa1a5f94ae2fd25a60fffbf4d3aff752
[ "MIT" ]
null
null
null
latestos/client/base.py
ProfessorManhattan/latestos
1ca0636aaa1a5f94ae2fd25a60fffbf4d3aff752
[ "MIT" ]
null
null
null
latestos/client/base.py
ProfessorManhattan/latestos
1ca0636aaa1a5f94ae2fd25a60fffbf4d3aff752
[ "MIT" ]
null
null
null
from abc import ABC from typing import Any, Dict, List, Tuple class BaseClient(ABC): """Defines the interface used to make requests through the web""" def get(self, url: str, *args, **kwargs) -> Tuple[Any, str]: """ Makes a GET request to the given URL. Args: url (str): url to get Returns: (Tuple[Any, str]): response object, raw response text """ raise NotImplementedError() def post(self, url: str, data: dict, content_type: str) -> Tuple[Any, str]: """ Makes a POST request to the given URL. Args: url (str): url to post to data (dict): data to send content_type (str): content type of the request Returns: (Tuple[Any, str]): response object, raw response text """ raise NotImplementedError() def get_cookies(self) -> List[Dict[str, str]]: """ Get cookies in use. Returns: (List[Dict[str, str]]): cookies - each of 'em is a dict {name: val} """ raise NotImplementedError() def set_cookies(self, cookies: list, url: str): """ Set cookies. Args: cookies (List[Dict[str, str]]): cookies - each is a dict {name: val} url (str): domain's base url """ raise NotImplementedError() def close(self): """ Closes the client / ends sessions. """ raise NotImplementedError() def restart(self): """ Restarts the client """ raise NotImplementedError()
25.484375
80
0.53832
188
1,631
4.648936
0.329787
0.04119
0.154462
0.048055
0.377574
0.306636
0.249428
0.249428
0.249428
0.249428
0
0
0.351931
1,631
63
81
25.888889
0.826869
0.433476
0
0.4
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0.133333
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
9364a9f12012275af5db5c9088ca38f8457d6da0
2,471
py
Python
vision/utils/model_book.py
minsu1206/ssdlite_revision
ed7ab8d1aa87fd75c299f5994475a564b50c0d3d
[ "MIT" ]
null
null
null
vision/utils/model_book.py
minsu1206/ssdlite_revision
ed7ab8d1aa87fd75c299f5994475a564b50c0d3d
[ "MIT" ]
null
null
null
vision/utils/model_book.py
minsu1206/ssdlite_revision
ed7ab8d1aa87fd75c299f5994475a564b50c0d3d
[ "MIT" ]
null
null
null
from collections import OrderedDict import torch.nn as nn class ModelBook: """Maintain the mapping between modules and their paths. Example: book = ModelBook(model_ft) for p, m in book.conv2d_modules(): print('path:', p, 'num of filters:', m.out_channels) assert m is book.get_module(p) """ def __init__(self, model): self._model = model self._modules = OrderedDict() self._paths = OrderedDict() path = [] self._construct(self._model, path) def _construct(self, module, path): if not module._modules: return for name, m in module._modules.items(): cur_path = tuple(path + [name]) self._paths[m] = cur_path self._modules[cur_path] = m self._construct(m, path + [name]) def conv2d_modules(self): return self.modules(nn.Conv2d) def linear_modules(self): return self.modules(nn.Linear) def modules(self, module_type=None): for p, m in self._modules.items(): if not module_type or isinstance(m, module_type): yield p, m def num_of_conv2d_modules(self): return self.num_of_modules(nn.Conv2d) def num_of_conv2d_filters(self): """Return the sum of out_channels of all conv2d layers. Here we treat the sub weight with size of [in_channels, h, w] as a single filter. """ num_filters = 0 for _, m in self.conv2d_modules(): num_filters += m.out_channels return num_filters def num_of_linear_modules(self): return self.num_of_modules(nn.Linear) def num_of_linear_filters(self): num_filters = 0 for _, m in self.linear_modules(): num_filters += m.out_features return num_filters def num_of_modules(self, module_type=None): num = 0 for p, m in self._modules.items(): if not module_type or isinstance(m, module_type): num += 1 return num def get_module(self, path): return self._modules.get(path) def get_path(self, module): return self._paths.get(module) def update(self, path, module): old_module = self._modules[path] del self._paths[old_module] self._paths[module] = path self._modules[path] = module
30.134146
90
0.585593
318
2,471
4.31761
0.235849
0.072105
0.029133
0.06118
0.333576
0.24909
0.17043
0.13984
0.088857
0.088857
0
0.007186
0.32416
2,471
81
91
30.506173
0.81497
0.149332
0
0.148148
0
0
0
0
0
0
0
0
0
1
0.240741
false
0
0.037037
0.111111
0.481481
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
937bd8ca02e74c48d36e5966c07d43bb07f2f036
3,805
py
Python
lib/gcn/sparse/torch_vertex.py
ChenFengYe/relightable-nr
239a97406f4df01cf5786dcdde58e464395a682d
[ "MIT" ]
105
2020-06-15T08:51:32.000Z
2022-03-24T08:27:40.000Z
lib/gcn/sparse/torch_vertex.py
ChenFengYe/relightable-nr
239a97406f4df01cf5786dcdde58e464395a682d
[ "MIT" ]
8
2020-06-15T08:53:04.000Z
2021-04-06T15:20:37.000Z
gcn_lib/sparse/torch_vertex.py
LansburyCH/relightable-nr
d5953029eeb41b40f53995000c6b010dcf5d5120
[ "MIT" ]
22
2020-06-17T03:17:18.000Z
2022-03-26T18:01:28.000Z
import torch from torch import nn import torch_geometric as tg from .torch_nn import MLP from .torch_edge import DilatedKnnGraph class MRConv(nn.Module): """ Max-Relative Graph Convolution (Paper: https://arxiv.org/abs/1904.03751) """ def __init__(self, in_channels, out_channels, act_type='relu', norm_type=None, bias=True, aggr='max'): super(MRConv, self).__init__() self.nn = MLP([in_channels*2, out_channels], act_type, norm_type, bias) self.aggr = aggr def forward(self, x, edge_index): """""" x_j = tg.utils.scatter_(self.aggr, torch.index_select(x, 0, edge_index[0]) - torch.index_select(x, 0, edge_index[1]), edge_index[1]) return self.nn(torch.cat([x, x_j], dim=1)) class EdgConv(tg.nn.EdgeConv): """ Edge convolution layer (with activation, batch normalization) """ def __init__(self, in_channels, out_channels, act_type='relu', norm_type=None, bias=True, aggr='max'): super(EdgConv, self).__init__(MLP([in_channels*2, out_channels], act_type, norm_type, bias), aggr) def forward(self, x, edge_index): return super(EdgConv, self).forward(x, edge_index) class GraphConv(nn.Module): """ Static graph convolution layer """ def __init__(self, in_channels, out_channels, conv_type='edge', act_type='relu', norm_type=None, bias=True): super(GraphConv, self).__init__() if conv_type == 'edge': self.gconv = EdgConv(in_channels, out_channels, act_type, norm_type, bias) elif conv_type == 'mr': self.gconv = MRConv(in_channels, out_channels, act_type, norm_type, bias) def forward(self, x, edge_index): return self.gconv(x, edge_index) class DynConv(GraphConv): """ Dynamic graph convolution layer """ def __init__(self, in_channels, out_channels, kernel_size=9, dilation=1, conv_type='edge', act_type='relu', norm_type=None, bias=True, stochastic=False, epsilon=1.0, knn_type='matrix'): super(DynConv, self).__init__(in_channels, out_channels, conv_type, act_type, norm_type, bias) self.k = kernel_size self.d = dilation self.dilated_knn_graph = DilatedKnnGraph(kernel_size, dilation, stochastic, epsilon, knn_type) def forward(self, x, batch=None): edge_index = self.dilated_knn_graph(x, batch) return super(DynConv, self).forward(x, edge_index) class ResDynBlock(nn.Module): """ Residual Dynamic graph convolution block :input: (x0, x1, x2, ... , xi), batch :output:(x0, x1, x2, ... , xi ,xi+1) , batch """ def __init__(self, channels, kernel_size=9, dilation=1, conv_type='edge', act_type='relu', norm_type=None, bias=True, stochastic=False, epsilon=1.0, knn_type='matrix'): super(ResDynBlock, self).__init__() self.body = DynConv(channels, channels, kernel_size, dilation, conv_type, act_type, norm_type, bias, stochastic, epsilon, knn_type) # input: (x0, x1, x2, ..., xi); (xi-1, xi), output is (xi, xi+1) def forward(self, x, batch): return self.body(x, batch) + x, batch class DenseDynBlock(nn.Module): """ Dense Dynamic graph convolution block """ def __init__(self, channels, kernel_size=9, dilation=1, conv_type='edge', act_type='relu', norm_type=None, bias=True, stochastic=False, epsilon=1.0, knn_type='matrix'): super(DenseDynBlock, self).__init__() self.body = DynConv(channels*2, channels, kernel_size, dilation, conv_type, act_type, norm_type, bias, stochastic, epsilon, knn_type) def forward(self, x, batch): dense = self.body(batch) return torch.cat((x, dense), 1), batch
38.434343
140
0.646518
518
3,805
4.486486
0.179537
0.039157
0.039157
0.063253
0.608434
0.592083
0.521084
0.446644
0.446644
0.384682
0
0.014493
0.220237
3,805
98
141
38.826531
0.76879
0.113535
0
0.236364
0
0
0.02144
0
0
0
0
0
0
1
0.218182
false
0
0.090909
0.054545
0.527273
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
fa763e0593e535916f24919e83f2858a5f58d7e1
3,171
py
Python
tests/unit2/test_view_old.py
LiorAvrahami/arcade
fce254a9eb89629de1f99d57a63759a2953184e9
[ "MIT" ]
1
2020-01-18T04:48:38.000Z
2020-01-18T04:48:38.000Z
tests/unit2/test_view_old.py
LiorAvrahami/arcade
fce254a9eb89629de1f99d57a63759a2953184e9
[ "MIT" ]
1
2019-08-11T18:47:27.000Z
2019-08-12T03:02:11.000Z
tests/unit2/test_view_old.py
LiorAvrahami/arcade
fce254a9eb89629de1f99d57a63759a2953184e9
[ "MIT" ]
null
null
null
# import pytest # import arcade # # # class FakeWindow(arcade.Window): # """A test double Window that is never displayed""" # # def __init__(self): # super().__init__() # # Track the behavior of Views: # self.draw_calls = [] # records sequence of calls to on_draw() # self.update_calls = [] # records sequence of calls to update() # self.mouse_motion_calls = [] # records sequence of calls to on_mouse_motion() # # def set_visible(self, visible=True): # pass # Make set_visible do nothing for testing # # def test(self, frames: int = 10): # """The order of the event loop in the original Window.test method # was producing unintuitive results for the purpose of these tests. # """ # for i in range(frames): # self.switch_to() # self.update(1 / 60) # self.dispatch_event('on_draw') # self.dispatch_events() # self.flip() # # # class BaseView(arcade.View): # def update(self, delta_time): # self.window.update_calls.append(self.__class__.__name__) # self.window.dispatch_event("on_mouse_motion", 42, 43, 1, 1) # change # # def on_draw(self): # self.window.draw_calls.append(self.__class__.__name__) # # # class ViewOne(BaseView): # def on_mouse_motion(self, _x, _y, _dx, _dy): # self.window.mouse_motion_calls.append(self.__class__.__name__) # # # class ViewTwo(BaseView): # pass # # # def test_it_asserts_showing_view_of_none(): # win = FakeWindow() # with pytest.raises(Exception): # # noinspection PyTypeChecker # win.show_view(None) # # # def test_single_view(): # win = FakeWindow() # view = ViewOne() # win.show_view(view) # win.test(2) # assert win.update_calls == ['ViewOne', 'ViewOne'] # assert win.draw_calls == ['ViewOne', 'ViewOne'] # assert win.mouse_motion_calls == ['ViewOne', 'ViewOne'] # # # def test_multiple_views(): # win = FakeWindow() # view_one = ViewOne() # view_two = ViewTwo() # win.show_view(view_one) # win.test(2) # win.show_view(view_two) # win.test(1) # win.show_view(view_one) # win.test(2) # assert win.update_calls == ['ViewOne', 'ViewOne', 'ViewTwo', 'ViewOne', 'ViewOne'] # assert win.draw_calls == ['ViewOne', 'ViewOne', 'ViewTwo', 'ViewOne', 'ViewOne'] # # # def test_show_view_improper_argument_raises_value_error(): # window = FakeWindow() # # with pytest.raises(ValueError): # # noinspection PyTypeChecker # window.show_view(None) # # # # def test_show_view_sets_window_if_none(): # # window = FakeWindow() # # view_one = ViewOne() # # assert view_one.window is None # # # # window.show_view(view_one) # # assert view_one.window is window # # # # # # def test_show_view_does_not_allow_multiple_windows_of_one_view_object(): # # window1 = FakeWindow() # # window2 = FakeWindow() # # view_one = ViewOne() # # # # window1.show_view(view_one) # # assert view_one.window is window1 # # # # with pytest.raises(RuntimeError): # # window2.show_view(view_one)
29.915094
88
0.624093
386
3,171
4.810881
0.287565
0.051696
0.045234
0.040388
0.294561
0.245019
0.183091
0.149704
0.084006
0
0
0.008268
0.237149
3,171
105
89
30.2
0.759405
0.924945
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
fa7b49d4ed0d86719b5164c46d5e106a7bb63b9f
701
py
Python
geom/SinCosTan.py
JesterOrNot/python_calc
3e939046930eca2204f8354d9f221f6127d2db91
[ "MIT" ]
1
2019-07-22T19:42:15.000Z
2019-07-22T19:42:15.000Z
geom/SinCosTan.py
JesterOrNot/python_calc
3e939046930eca2204f8354d9f221f6127d2db91
[ "MIT" ]
1
2019-07-23T17:49:48.000Z
2019-07-23T17:52:14.000Z
geom/SinCosTan.py
JesterOrNot/python_calc
3e939046930eca2204f8354d9f221f6127d2db91
[ "MIT" ]
1
2019-07-23T17:45:40.000Z
2019-07-23T17:45:40.000Z
#! /usr/bin/python3.7 # soh cah toa def sin(): o = float(input('What is the oppisite?: ')) h = float(input("What is the hypotnuse?: ")) s = o / h print("sin = {}".format(s)) def cos(): a = float(input('What is the ajacent?: ')) h = float(input("What is the hypotnuse?: ")) c = a / h print('cos = {}'.format(c)) def tan(): o = float(input("What is the oppisite?: ")) a = float(input("What is the ajacent?: ")) t = o / a print("tan = {}".format(t)) def main(): userInt = input('What are we solving for (sin cos or tan)?: ').lower() if userInt == 'sin': sin() elif userInt == 'cos': cos() elif userInt == 'tan': tan() else: print('An error has occured please try again') main()
23.366667
71
0.584879
112
701
3.660714
0.401786
0.153659
0.204878
0.234146
0.409756
0.409756
0.409756
0
0
0
0
0.003578
0.202568
701
30
72
23.366667
0.729875
0.045649
0
0.076923
0
0
0.375749
0
0
0
0
0
0
1
0.153846
false
0
0
0
0.153846
0.153846
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fa7e05c3a1ae8006eb7088431795f720953c2e82
1,609
py
Python
04_Data Manipulation with pandas/04_Creating and Visualizing DataFrames/04_Price of conventional vs. organic avocados.py
mohd-faizy/DataScience-With-Python
13ebb10cf9083343056d5b782957241de1d595f9
[ "MIT" ]
5
2021-02-03T14:36:58.000Z
2022-01-01T10:29:26.000Z
04_Data Manipulation with pandas/04_Creating and Visualizing DataFrames/04_Price of conventional vs. organic avocados.py
mohd-faizy/DataScience-With-Python
13ebb10cf9083343056d5b782957241de1d595f9
[ "MIT" ]
null
null
null
04_Data Manipulation with pandas/04_Creating and Visualizing DataFrames/04_Price of conventional vs. organic avocados.py
mohd-faizy/DataScience-With-Python
13ebb10cf9083343056d5b782957241de1d595f9
[ "MIT" ]
3
2021-02-08T00:31:16.000Z
2022-03-17T13:52:32.000Z
''' 04 - Price of conventional vs. organic avocados: Creating multiple plots for different subsets of data allows you to compare groups. In this exercise, you'll create multiple histograms to compare the prices of conventional and organic avocados. matplotlib.pyplot has been imported as plt and pandas has been imported as pd. Instructions: - Subset avocados for the conventional type, and the average price column. Create a histogram. - Create a histogram of avg_price for organic type avocados. - Add a legend to your plot, with the names "conventional" and "organic". - Modify your code to adjust the transparency of both histograms to 0.5. - Modify your code to use 20 bins in both histograms. - Show your plot. -------------------------------------------------------------- avocados.head() date type year avg_price size nb_sold 0 2015-12-27 conventional 2015 0.95 small 9.627e+06 1 2015-12-20 conventional 2015 0.98 small 8.710e+06 2 2015-12-13 conventional 2015 0.93 small 9.855e+06 3 2015-12-06 conventional 2015 0.89 small 9.405e+06 4 2015-11-29 conventional 2015 0.99 small 8.095e+06 --------------------------------------------------------------- ''' # Import matplotlib.pyplot with alias plt import matplotlib.pyplot as plt # Modify bins to 20 avocados[avocados["type"] == "conventional"]["avg_price"].hist(alpha=0.5, bins=20) # Modify bins to 20 avocados[avocados["type"] == "organic"]["avg_price"].hist(alpha=0.5, bins=20) # Add a legend plt.legend(["conventional", "organic"]) # Show the plot plt.show()
36.568182
94
0.662523
239
1,609
4.439331
0.401674
0.075401
0.080113
0.032045
0.111216
0.111216
0.111216
0.047125
0
0
0
0.097191
0.181479
1,609
43
95
37.418605
0.708428
0.83468
0
0
0
0
0.251969
0
0
0
0
0
0
1
0
true
0
0.2
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
fa8030134b2a278ad1492e594e32d2c14d287183
2,428
py
Python
pycbc/handlers/check.py
mchestr/pycbc
c215c1f177fe383ec6e797437fa2d5f4727eb9f3
[ "Unlicense" ]
null
null
null
pycbc/handlers/check.py
mchestr/pycbc
c215c1f177fe383ec6e797437fa2d5f4727eb9f3
[ "Unlicense" ]
null
null
null
pycbc/handlers/check.py
mchestr/pycbc
c215c1f177fe383ec6e797437fa2d5f4727eb9f3
[ "Unlicense" ]
null
null
null
import logging import logging.config from pycbc import client, token from pycbc.config import load log = logging.getLogger(__name__) _AVAILABLE = b'iVBORw0KGgoAAAANSUhEUgAAAAwAAAAQCAYAAAAiYZ4HAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEwAACxMBAJqcGAAAAOhJREFUKJHNkMFNw0AURN//u6QJo0Qy9BFbpA0HEeoKSJgyjPYAbRAUQ5og6/0ckCM7yiUXxJzfSG8G/l0EYFsur1wna5mkZdbU7RDY3tzlrmMtPt5mTd0qgDP3jOrc9hp2i2o6hCUSgKL79g8AHiCZVioxIJrbnrBbVGU07yUSVJhiaeMmaXVQAmjn99cqMYBeGvZhJq6H5SKVvaoMdEclgGMYQM996VA4Vkrw+btpfISegtV3hTmKUyUFUEn1EM6aup29PG5MuhLSF6J5f6v2txrpzYmOBs7C03snVhi8uklcnbv3j/IDk1qBhz0yl7oAAAAASUVORK5CYII=' _UNAVAILABLE = b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEwAACxMBAJqcGAAAAbhJREFUOI2Nkz1oU2EUhp9z0lsT0duiGBDUUkQJVTDGSesPZLHioHHJXIcUETE1k7uDIDSDYNuMzoaCizg01IAQEGltB7fa2Um7JCU353MI19xUG/JOH+ec532/Hz5hnzLVhYwhcw7NojYBoOiOc67mxC19y81vROclXFx5v3y402m+FuThftOonFklHlixkS81/xp04dZHgeuD4FDmqCfawUwjX2oqQDd5OBhAhZstL1YGkEx1IeM09nU4UDDnejvBLqshc8PAY6Nx3t7Kc/vU+Z4hUlCHZoeBF6dzTI0nSR872WuYZDV8KoDUeJJnF2+gIv/AqbETvPuxxauteuRMTI5Ek55MXeNq8gxHvFFebNQ46h3qg19urvXdAcCIojvAOYDnXz6wOJ3j/sQFPI1x1j8+EDZsW51ztbCw297j0ecVvv/6yd3TqYEwgCCr6sQtRYtRk0EwgCgVAUhXy8uiWog2EzGPPQsOhHG8WX/w9LECxAMrmqMe7Tc77YNhY83/7ZcAFKCRLzUT7WDGmVX+T/Qn+7v+nU+zsy2I/MZQl1bKaUUKmGRRJruBti3IqiiV9XvFzej8H4xUvW+Yvt46AAAAAElFTkSuQmCC' def handler(event, context): config = load(event) logging.config.dictConfig(config.logging) return { 'statusCode': 200, 'body': _generate_icon(config, event), 'headers': { 'Content-Type': 'image/png' }, 'isBase64Encoded': True, } def _check_timeslot_available(service_id, branch_id, date, time): pycbc = client.WebBookingClient() available_times = pycbc.branches_times_search(service_id, branch_id, date) return any(time in t.time for t in available_times) def _generate_icon(config, event): try: query_params = event['queryStringParameters'] query_token = query_params.pop('token') payload = token.decrypt(config.encrypt_key, query_token, ttl=60 * 60 * 4) except Exception as exc: log.exception(exc) return _UNAVAILABLE log.info(f'Payload: {payload}') if _check_timeslot_available(payload.service.publicId, query_params['branch_id'], query_params['date'], query_params['time']): return _AVAILABLE return _UNAVAILABLE
50.583333
730
0.812191
177
2,428
10.937853
0.536723
0.028409
0.018595
0.02376
0.021694
0
0
0
0
0
0
0.080169
0.121499
2,428
47
731
51.659574
0.827473
0
0
0.057143
0
0.028571
0.521417
0.481466
0
0
0
0
0
1
0.085714
false
0
0.114286
0
0.342857
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fa879bee35452a5e27262c621b3196b591d11b24
5,387
py
Python
ipymd/formats/python.py
nathanfdunn/ipymd
cca5f98e34a024396e21ab7a3f322bbe2e3f37d1
[ "BSD-3-Clause" ]
521
2015-01-01T09:22:05.000Z
2022-02-06T23:54:10.000Z
ipymd/formats/python.py
nathanfdunn/ipymd
cca5f98e34a024396e21ab7a3f322bbe2e3f37d1
[ "BSD-3-Clause" ]
79
2015-01-04T21:17:29.000Z
2018-08-28T16:39:21.000Z
ipymd/formats/python.py
nathanfdunn/ipymd
cca5f98e34a024396e21ab7a3f322bbe2e3f37d1
[ "BSD-3-Clause" ]
56
2015-03-01T08:48:26.000Z
2022-01-29T03:11:13.000Z
# -*- coding: utf-8 -*- """Python reader and writer.""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ import re import ast from collections import OrderedDict from ..lib.base_lexer import BaseGrammar, BaseLexer from ..lib.markdown import MarkdownFilter from ..lib.python import _is_python from ..ext.six import StringIO from ..utils.utils import _ensure_string, _preprocess #------------------------------------------------------------------------------ # Python reader and writer #------------------------------------------------------------------------------ class PythonSplitGrammar(BaseGrammar): """Grammar used to split Python code into chunks while not cutting long Python strings.""" _triple_quotes = "'''" _triple_doublequotes = '"""' _triple = _triple_quotes + '|' + _triple_doublequotes # '''text''' or """text""". text_var = re.compile(r"^({0})((?!{0}).|\n)*?\1".format(_triple)) # Two new lines followed by non-space newline = re.compile(r'^[\n]{2,}(?=[^ ])') linebreak = re.compile(r'^\n+') other = re.compile(r'^(?!{0}).'.format(_triple)) class PythonSplitLexer(BaseLexer): """Lexer for splitting Python code into chunks.""" grammar_class = PythonSplitGrammar default_rules = ['text_var', 'newline', 'linebreak', 'other'] def __init__(self): super(PythonSplitLexer, self).__init__() self._chunks = [''] @property def current(self): if not self._chunks: return None else: return self._chunks[-1] @property def chunks(self): return [chunk for chunk in self._chunks if chunk] @current.setter def current(self, value): self._chunks[-1] = value def new_chunk(self): self._chunks.append('') def append(self, text): self.current += text def parse_newline(self, m): self.new_chunk() def parse_linebreak(self, m): self.append(m.group(0)) def parse_text_var(self, m): self.append(m.group(0)) def parse_other(self, m): self.append(m.group(0)) def _split_python(python): """Split Python source into chunks. Chunks are separated by at least two return lines. The break must not be followed by a space. Also, long Python strings spanning several lines are not splitted. """ python = _preprocess(python) if not python: return [] lexer = PythonSplitLexer() lexer.read(python) return lexer.chunks def _is_chunk_markdown(source): """Return whether a chunk contains Markdown contents.""" lines = source.splitlines() if all(line.startswith('# ') for line in lines): # The chunk is a Markdown *unless* it is commented Python code. source = '\n'.join(line[2:] for line in lines if not line[2:].startswith('#')) # skip headers if not source: return True # Try to parse the chunk: if it fails, it is Markdown, otherwise, # it is Python. return not _is_python(source) return False def _remove_hash(source): """Remove the leading '#' of every line in the source.""" return '\n'.join(line[2:].rstrip() for line in source.splitlines()) def _add_hash(source): """Add a leading hash '#' at the beginning of every line in the source.""" source = '\n'.join('# ' + line.rstrip() for line in source.splitlines()) return source class PythonReader(object): """Python reader.""" def read(self, python): chunks = _split_python(python) for chunk in chunks: if _is_chunk_markdown(chunk): yield self._markdown_cell(_remove_hash(chunk)) else: yield self._code_cell(chunk) def _code_cell(self, source): return {'cell_type': 'code', 'input': source, 'output': None} def _markdown_cell(self, source): return {'cell_type': 'markdown', 'source': source} class PythonWriter(object): """Python writer.""" def __init__(self, keep_markdown=None): self._output = StringIO() self._markdown_filter = MarkdownFilter(keep_markdown) def _new_paragraph(self): self._output.write('\n\n') def append_comments(self, source): source = source.rstrip() # Filter Markdown contents. source = self._markdown_filter(source) # Skip empty cells. if not source: return comments = _add_hash(source) self._output.write(comments) self._new_paragraph() def append_code(self, input): self._output.write(input) self._new_paragraph() def write(self, cell): if cell['cell_type'] == 'markdown': self.append_comments(cell['source']) elif cell['cell_type'] == 'code': self.append_code(cell['input']) @property def contents(self): return self._output.getvalue().rstrip() + '\n' # end of file \n def close(self): self._output.close() def __del__(self): self.close() PYTHON_FORMAT = dict( reader=PythonReader, writer=PythonWriter, file_extension='.py', file_type='text', )
27.484694
79
0.575645
613
5,387
4.882545
0.251223
0.028065
0.013365
0.015035
0.082526
0.082526
0.0284
0.0284
0.020047
0
0
0.003455
0.247819
5,387
195
80
27.625641
0.735193
0.221645
0
0.101695
0
0
0.048905
0.005596
0
0
0
0
0
1
0.211864
false
0
0.067797
0.033898
0.508475
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
fa9af22fa7934795222ad549da06fc9ad593a98d
2,080
py
Python
partnerweb_parser/date_func.py
ChanTerelLy/partnerweb_parser
4bebe39318c43e069e322c04e2d402db317ab7db
[ "MIT" ]
null
null
null
partnerweb_parser/date_func.py
ChanTerelLy/partnerweb_parser
4bebe39318c43e069e322c04e2d402db317ab7db
[ "MIT" ]
null
null
null
partnerweb_parser/date_func.py
ChanTerelLy/partnerweb_parser
4bebe39318c43e069e322c04e2d402db317ab7db
[ "MIT" ]
null
null
null
import calendar import datetime from datetime import datetime as dt import pytz moscow = pytz.timezone('Europe/Moscow') def current_date(): now_date = datetime.date.today() # Текущая дата (без времени) cur_year = now_date.year # Год текущий cur_month = now_date.month # Месяц текущий cur_day = now_date.day # День текущий return cur_day, cur_month, cur_year def last_day_current_month(): now_date = datetime.date.today() cur_year = now_date.year # Год текущий cur_month = now_date.month # Месяц текущий last_day = calendar.monthrange(cur_year, cur_month)[1] return last_day, cur_month, cur_year def url_formate_date(date): return date.strftime("%d.%m.%Y") def formate_date_schedule(str_date): return datetime.datetime.strptime(str_date, '%Y-%m-%dT%H:%M:%S+00:00').strftime('%H:%M') def delta_current_month(date_first, date_second): date_second = date_second - datetime.timedelta(days=30) date_first = date_first - datetime.timedelta(days=30) return date_first, date_second def range_current_month(): now_date = datetime.date.today() date_first = now_date - datetime.timedelta(days=30) date_second = now_date + datetime.timedelta(days=1) return date_first, date_second def current_year_date(): now_date = datetime.date.today() date_first = url_formate_date(now_date - datetime.timedelta(days=365)) date_second = url_formate_date(now_date + datetime.timedelta(days=1)) return date_first, date_second def dmYHM_to_date(ticket_call_time): return dt.strptime(ticket_call_time, "%d.%m.%Y %H:%M").date() if ticket_call_time else dt(1000, 1, 1) def dmYHM_to_datetime(ticket_call_time): d = dt.strptime(ticket_call_time, "%d.%m.%Y %H:%M") if ticket_call_time else dt(1000, 1, 1) return moscow.localize(d) def dmY_to_date(ticket_call_time): return dt.strptime(ticket_call_time, "%d.%m.%Y").date() def today(): return datetime.date.today() def convert_utc_string(date): format = '%Y-%m-%dT%H:%M:%S%z' return datetime.datetime.strptime(date, format)
32.5
105
0.725
327
2,080
4.345566
0.201835
0.064039
0.084448
0.053483
0.544687
0.501759
0.420127
0.339901
0.300493
0.261084
0
0.0159
0.153365
2,080
64
106
32.5
0.791028
0.04375
0
0.234043
0
0
0.052446
0.011599
0
0
0
0
0
1
0.255319
false
0
0.085106
0.106383
0.595745
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
fa9cda2a000dde9bc1bd4154c7f3f12394c65f7c
208
py
Python
minimum.py
Shivams9/pythoncodecamp
e6cd27f4704a407ee360414a8c9236b254117a59
[ "MIT" ]
null
null
null
minimum.py
Shivams9/pythoncodecamp
e6cd27f4704a407ee360414a8c9236b254117a59
[ "MIT" ]
null
null
null
minimum.py
Shivams9/pythoncodecamp
e6cd27f4704a407ee360414a8c9236b254117a59
[ "MIT" ]
null
null
null
a=int(input("Enter the first number:")) b=int(input("Enter the second number:")) c=int(input("Enter the third number:")) if a<b in c>a: min=a elif a<b in c>b: min=b else: min=c print(min)
13.866667
40
0.610577
42
208
3.02381
0.404762
0.188976
0.307087
0.377953
0
0
0
0
0
0
0
0
0.216346
208
14
41
14.857143
0.779141
0
0
0
0
0
0.343137
0
0
0
0
0
0
1
0
false
0
0
0
0
0.1
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fab60ff746e5ffe14bbca1134eac1bab1a29f37f
6,933
py
Python
src/m1/model.py
pvzteam/pvz_recsys2019
3fd14d3b82033474d2e172402abd0ebc5e7b0afc
[ "Apache-2.0" ]
1
2019-07-24T08:41:53.000Z
2019-07-24T08:41:53.000Z
src/m1/model.py
pvzteam/pvz_recsys2019
3fd14d3b82033474d2e172402abd0ebc5e7b0afc
[ "Apache-2.0" ]
null
null
null
src/m1/model.py
pvzteam/pvz_recsys2019
3fd14d3b82033474d2e172402abd0ebc5e7b0afc
[ "Apache-2.0" ]
1
2020-12-02T09:49:12.000Z
2020-12-02T09:49:12.000Z
# -*- coding: utf-8 -*- import gc import numpy as np import pandas as pd import lightgbm as lgb from data import * from feat import * from resource import * from utils import (load_dataframe, convert_dtype, CrossValidation, merge_all) def rank_feat_inside_session(df, cols): for col in cols: col_n = '%s_rank' % col df[col_n] = df.groupby(['session_id'])[col].rank(method='first').astype('float32') return df def rank_similarity_inside_session(df): cols_rank = [ 'item_id_interaction_last_similarity_wv_impression', 'item_id_impression_prev_item_meta_cos', 'item_id_impression_prev_co_appearence_impression_count', 'item_id_impression_first_co_appearence_interaction_count', 'item_id_impression_first_co_appearence_impression_count', 'item_id_interaction_last_co_appearence_impression_count', 'item_id_interaction_last_co_appearence_interaction_count', ] return rank_feat_inside_session(df, cols_rank) f_m2_top30 = TrainTestResource(FeatResource, 'm2_%s_top30_fea', fix=['tr', 'te'], fmt='ftr') f_m3_top30 = TrainTestResource(FeatResource, 'm3_%s_feat_top30', fix=['tr', 'te'], fmt='ftr') m_20190622 = TrainTestResource(ModelResource, '%s_m1_20190622', fix=['tr', 'te'], fmt='csv') m_20190624 = TrainTestResource(ModelResource, '%s_m1_20190624', fix=['tr', 'te'], fmt='csv') m_20190626 = TrainTestResource(ModelResource, '%s_m1_20190626', fix=['tr', 'te'], fmt='csv') @register(out=m_20190622, inp=[t_tr_te_classify, f_top100, f_si_sim]) def train_predict_lgb_20190622_2(): from feat_names import names_lgb_20190622_2 as feats def load_data(tt): df = merge_all([ t_tr_te_classify[tt].load().rename(columns={'item_id': 'impressions'}), f_top100[tt].load(), f_si_sim[tt].load().rename(columns={'item_id': 'impressions'}), ], on=['session_id', 'impressions'], how='left') df = rank_similarity_inside_session(df) return df train = load_data('train') cv = CrossValidation() model = lgb.LGBMClassifier(n_estimators=50000, objective="binary", metric='binary_logloss', num_leaves=31, min_child_samples=100, learning_rate=0.1, bagging_fraction=0.7, feature_fraction=0.7, bagging_frequency=5, seed=1, feature_fraction_seed=1, use_best_model=True, n_jobs=16) df_train = train[['session_id', 'impressions']] df_train['target'] = cv.validate(model, feats, train, train['target'], early_stopping_rounds=100, verbose=100) print('Validation Score:', np.mean(cv.scores)) del train gc.collect() test = load_data('test') df_test = test[['session_id', 'impressions']] df_test['target'] = cv.predict_proba(test) df_train.to_csv(m_20190622.train.path, index=False, float_format='%.4f') df_test.to_csv(m_20190622.test.path, index=False, float_format='%.4f') @register(out=m_20190624, inp=[t_tr_te_classify, f_top30, f_si_sim, f_m2_top30, f_m3_top30]) def train_predict_lgb_20190624_1(): from feat_names import names_lgb_20190624_1 as feats def load_data(tt): df = merge_all([ t_tr_te_classify[tt].load().rename(columns={'item_id': 'impressions'}), f_m2_top30[tt].load(), f_m3_top30[tt].load(), f_top30[tt].load(), f_si_sim[tt].load().rename(columns={'item_id': 'impressions'}), ], on=['session_id', 'impressions'], how='left') df = rank_similarity_inside_session(df) return df train = load_data('train') cv = CrossValidation() model = lgb.LGBMClassifier(n_estimators=50000, objective="binary", metric='binary_logloss', num_leaves=31, min_child_samples=100, learning_rate=0.1, bagging_fraction=0.7, feature_fraction=0.7, bagging_frequency=5, seed=1, use_best_model=True, n_jobs=16) df_train = train[['session_id', 'impressions']] df_train['target'] = cv.validate(model, feats, train, train['target'], early_stopping_rounds=100, verbose=100) print('Validation Score:', np.mean(cv.scores)) del train gc.collect() test = load_data('test') df_test = test[['session_id', 'impressions']] df_test['target'] = cv.predict_proba(test) df_train.to_csv(m_20190624.train.path, index=False, float_format='%.4f') df_test.to_csv(m_20190624.test.path, index=False, float_format='%.4f') @register(out=m_20190626, inp=[t_tr_te_classify, f_top30, f_si_sim, f_si_cmp, f_si_win, f_m2_top30, f_m3_top30]) def train_predict_lgb_20190626_2(): from feat_names import names_lgb_20190626_2 as feats def load_data(tt): df = merge_all([ t_tr_te_classify[tt].load().rename(columns={'item_id': 'impressions'}), f_m2_top30[tt].load(), f_m3_top30[tt].load(), f_top30[tt].load(), f_si_sim[tt].load().rename(columns={'item_id': 'impressions'}), f_si_cmp[tt].load().rename(columns={'item_id': 'impressions'}), f_si_win[tt].load().rename(columns={'item_id': 'impressions'}), ], on=['session_id', 'impressions'], how='left') df = rank_similarity_inside_session(df) cols_win = [ 'item_id_impression_prev_item_win_ratio', 'item_id_impression_first_item_win_ratio', 'item_id_interaction_last_item_win_ratio', 'item_id_interaction_most_item_win_ratio', ] df = rank_feat_inside_session(df, cols_win) return df train = load_data('train') cv = CrossValidation() model = lgb.LGBMClassifier(n_estimators=50000, objective="binary", metric='binary_logloss', num_leaves=31, min_child_samples=100, learning_rate=0.1, bagging_fraction=0.7, feature_fraction=0.7, bagging_frequency=5, seed=1, feature_fraction_seed=1, use_best_model=True, n_jobs=16) df_train = train[['session_id', 'impressions']] df_train['target'] = cv.validate(model, feats, train, train['target'], early_stopping_rounds=100, verbose=100) print('Validation Score:', np.mean(cv.scores)) del train gc.collect() test = load_data('test') df_test = test[['session_id', 'impressions']] df_test['target'] = cv.predict_proba(test) df_train.to_csv(m_20190626.train.path, index=False, float_format='%.4f') df_test.to_csv(m_20190626.test.path, index=False, float_format='%.4f')
38.516667
114
0.630607
905
6,933
4.474033
0.166851
0.028155
0.044455
0.03754
0.792047
0.763645
0.696962
0.659669
0.659669
0.649296
0
0.059973
0.240012
6,933
179
115
38.731844
0.708484
0.003029
0
0.580153
0
0
0.175832
0.074819
0
0
0
0
0
1
0.061069
false
0
0.083969
0
0.183206
0.022901
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fabcae7c7fe83da20e2a985001d5f1af459a81fb
1,088
py
Python
civil/apps/definitions/admin.py
christopinka/django-civil
d134624da9d36c4ba0bea2df8a21698df196bdf6
[ "Apache-2.0" ]
3
2020-06-15T21:01:06.000Z
2022-02-17T17:41:57.000Z
civil/apps/definitions/admin.py
christopinka/django-civil
d134624da9d36c4ba0bea2df8a21698df196bdf6
[ "Apache-2.0" ]
null
null
null
civil/apps/definitions/admin.py
christopinka/django-civil
d134624da9d36c4ba0bea2df8a21698df196bdf6
[ "Apache-2.0" ]
1
2021-11-06T18:33:29.000Z
2021-11-06T18:33:29.000Z
# -*- coding: utf-8 -*- from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from civil.library.admin import BaseAdmin from .models import * #============================================================================== class NameOnlyAdmin(BaseAdmin): list_display = ('id', 'name', 'created', 'modified') list_display_links = ('id', 'name', ) list_filter = ('created', 'modified') search_fields = ['name'] #============================================================================== admin.site.register(AddressType, NameOnlyAdmin) admin.site.register(PhoneType, NameOnlyAdmin) admin.site.register(EmailType, NameOnlyAdmin) admin.site.register(WebsiteType, NameOnlyAdmin) admin.site.register(SexType, NameOnlyAdmin) admin.site.register(RelationshipType, NameOnlyAdmin) admin.site.register(PaymentType, NameOnlyAdmin) #============================================================================== admin.site.register(ContactType, BaseAdmin) admin.site.register(PrefixType, BaseAdmin) admin.site.register(SuffixType, BaseAdmin)
36.266667
79
0.604779
96
1,088
6.78125
0.427083
0.138249
0.261137
0.322581
0
0
0
0
0
0
0
0.001
0.080882
1,088
29
80
37.517241
0.65
0.234375
0
0
0
0
0.055556
0
0
0
0
0
0
1
0
false
0
0.210526
0
0.473684
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fac94055cd160a82d0e5dc330f3df39c005a9bca
225
py
Python
tworaven_apps/image_utils/urls.py
TwoRavens/TwoRavens
e5f820557d6646df525ceed15e17d79f4159cf0a
[ "Apache-2.0" ]
20
2017-12-11T07:26:06.000Z
2021-11-22T16:16:20.000Z
tworaven_apps/image_utils/urls.py
TwoRavens/TwoRavens
e5f820557d6646df525ceed15e17d79f4159cf0a
[ "Apache-2.0" ]
849
2017-10-20T18:21:18.000Z
2022-02-18T02:45:44.000Z
tworaven_apps/image_utils/urls.py
TwoRavens/TwoRavens
e5f820557d6646df525ceed15e17d79f4159cf0a
[ "Apache-2.0" ]
1
2020-05-18T06:02:13.000Z
2020-05-18T06:02:13.000Z
from django.conf.urls import url from tworaven_apps.image_utils import views urlpatterns = ( # Create new log entry # url(r'^markup-image$', views.view_markup_image, name='view_markup_image'), )
18.75
43
0.675556
30
225
4.866667
0.666667
0.226027
0.205479
0
0
0
0
0
0
0
0
0
0.226667
225
11
44
20.454545
0.83908
0.088889
0
0
0
0
0.153465
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.285714
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
faca17f369309dc144868a867918cfe16a0f2f8b
11,139
py
Python
Tools/unicode/makeunicodedata.py
marcosptf/cpython-2.0.1
73c739a764e8b1dc84640e73b880bc66e1916bca
[ "PSF-2.0" ]
5
2022-03-26T21:53:36.000Z
2022-03-30T21:47:20.000Z
Tools/unicode/makeunicodedata.py
marcosptf/cpython-2.0.1
73c739a764e8b1dc84640e73b880bc66e1916bca
[ "PSF-2.0" ]
6
2020-11-18T15:48:14.000Z
2021-05-03T21:20:50.000Z
Tools/unicode/makeunicodedata.py
marcosptf/cpython-2.0.1
73c739a764e8b1dc84640e73b880bc66e1916bca
[ "PSF-2.0" ]
2
2015-07-16T08:14:13.000Z
2022-03-27T01:55:17.000Z
# # (re)generate unicode property and type databases # # this script converts a unicode 3.0 database file to # Modules/unicodedata_db.h and Objects/unicodetype_db.h # # history: # 2000-09-24 fl created (based on bits and pieces from unidb) # 2000-09-25 fl merged tim's splitbin fixes, separate decomposition table # 2000-09-25 fl added character type table # 2000-09-26 fl added LINEBREAK, DECIMAL, and DIGIT flags/fields # # written by Fredrik Lundh (fredrik@pythonware.com), September 2000 # import sys SCRIPT = sys.argv[0] VERSION = "1.1" UNICODE_DATA = "UnicodeData-Latest.txt" CATEGORY_NAMES = [ "Cn", "Lu", "Ll", "Lt", "Mn", "Mc", "Me", "Nd", "Nl", "No", "Zs", "Zl", "Zp", "Cc", "Cf", "Cs", "Co", "Cn", "Lm", "Lo", "Pc", "Pd", "Ps", "Pe", "Pi", "Pf", "Po", "Sm", "Sc", "Sk", "So" ] BIDIRECTIONAL_NAMES = [ "", "L", "LRE", "LRO", "R", "AL", "RLE", "RLO", "PDF", "EN", "ES", "ET", "AN", "CS", "NSM", "BN", "B", "S", "WS", "ON" ] # note: should match definitions in Objects/unicodectype.c ALPHA_MASK = 0x01 DECIMAL_MASK = 0x02 DIGIT_MASK = 0x04 LOWER_MASK = 0x08 LINEBREAK_MASK = 0x10 SPACE_MASK = 0x20 TITLE_MASK = 0x40 UPPER_MASK = 0x80 def maketables(): unicode = UnicodeData(UNICODE_DATA) # extract unicode properties dummy = (0, 0, 0, 0) table = [dummy] cache = {0: dummy} index = [0] * len(unicode.chars) # 1) database properties for char in unicode.chars: record = unicode.table[char] if record: # extract database properties category = CATEGORY_NAMES.index(record[2]) combining = int(record[3]) bidirectional = BIDIRECTIONAL_NAMES.index(record[4]) mirrored = record[9] == "Y" item = ( category, combining, bidirectional, mirrored ) # add entry to index and item tables i = cache.get(item) if i is None: cache[item] = i = len(table) table.append(item) index[char] = i # 2) decomposition data # FIXME: <fl> using the encoding stuff from unidb would save # another 50k or so, but I'll leave that for 2.1... decomp_data = [""] decomp_index = [0] * len(unicode.chars) for char in unicode.chars: record = unicode.table[char] if record: if record[5]: try: i = decomp_data.index(record[5]) except ValueError: i = len(decomp_data) decomp_data.append(record[5]) else: i = 0 decomp_index[char] = i FILE = "Modules/unicodedata_db.h" sys.stdout = open(FILE, "w") print "/* this file was generated by %s %s */" % (SCRIPT, VERSION) print print "/* a list of unique database records */" print "const _PyUnicode_DatabaseRecord _PyUnicode_Database_Records[] = {" for item in table: print " {%d, %d, %d, %d}," % item print "};" print # FIXME: the following tables should be made static, and # the support code moved into unicodedatabase.c print "/* string literals */" print "const char *_PyUnicode_CategoryNames[] = {" for name in CATEGORY_NAMES: print " \"%s\"," % name print " NULL" print "};" print "const char *_PyUnicode_BidirectionalNames[] = {" for name in BIDIRECTIONAL_NAMES: print " \"%s\"," % name print " NULL" print "};" print "static const char *decomp_data[] = {" for name in decomp_data: print " \"%s\"," % name print " NULL" print "};" # split record index table index1, index2, shift = splitbins(index) print "/* index tables for the database records */" print "#define SHIFT", shift Array("index1", index1).dump(sys.stdout) Array("index2", index2).dump(sys.stdout) # split decomposition index table index1, index2, shift = splitbins(decomp_index) print "/* index tables for the decomposition data */" print "#define DECOMP_SHIFT", shift Array("decomp_index1", index1).dump(sys.stdout) Array("decomp_index2", index2).dump(sys.stdout) sys.stdout = sys.__stdout__ # # 3) unicode type data # extract unicode types dummy = (0, 0, 0, 0, 0, 0) table = [dummy] cache = {0: dummy} index = [0] * len(unicode.chars) for char in unicode.chars: record = unicode.table[char] if record: # extract database properties category = record[2] bidirectional = record[4] flags = 0 if category in ["Lm", "Lt", "Lu", "Ll", "Lo"]: flags |= ALPHA_MASK if category == "Ll": flags |= LOWER_MASK if category == "Zl" or bidirectional == "B": flags |= LINEBREAK_MASK if category == "Zs" or bidirectional in ("WS", "B", "S"): flags |= SPACE_MASK if category == "Lt": flags |= TITLE_MASK if category == "Lu": flags |= UPPER_MASK # use delta predictor for upper/lower/title if record[12]: upper = (int(record[12], 16) - char) & 0xffff else: upper = 0 if record[13]: lower = (int(record[13], 16) - char) & 0xffff else: lower = 0 if record[14]: title = (int(record[14], 16) - char) & 0xffff else: title = 0 # decimal digit, integer digit decimal = 0 if record[6]: flags |= DECIMAL_MASK decimal = int(record[6]) digit = 0 if record[7]: flags |= DIGIT_MASK digit = int(record[7]) item = ( flags, upper, lower, title, decimal, digit ) # add entry to index and item tables i = cache.get(item) if i is None: cache[item] = i = len(table) table.append(item) index[char] = i print len(table), "ctype entries" FILE = "Objects/unicodetype_db.h" sys.stdout = open(FILE, "w") print "/* this file was generated by %s %s */" % (SCRIPT, VERSION) print print "/* a list of unique character type descriptors */" print "const _PyUnicode_TypeRecord _PyUnicode_TypeRecords[] = {" for item in table: print " {%d, %d, %d, %d, %d, %d}," % item print "};" print # split decomposition index table index1, index2, shift = splitbins(index) print "/* type indexes */" print "#define SHIFT", shift Array("index1", index1).dump(sys.stdout) Array("index2", index2).dump(sys.stdout) sys.stdout = sys.__stdout__ # -------------------------------------------------------------------- # the following support code is taken from the unidb utilities # Copyright (c) 1999-2000 by Secret Labs AB # load a unicode-data file from disk import string, sys class UnicodeData: def __init__(self, filename): file = open(filename) table = [None] * 65536 while 1: s = file.readline() if not s: break s = string.split(string.strip(s), ";") char = string.atoi(s[0], 16) table[char] = s # public attributes self.filename = filename self.table = table self.chars = range(65536) # unicode def uselatin1(self): # restrict character range to ISO Latin 1 self.chars = range(256) # stuff to deal with arrays of unsigned integers class Array: def __init__(self, name, data): self.name = name self.data = data def dump(self, file): # write data to file, as a C array size = getsize(self.data) # print >>sys.stderr, self.name+":", size*len(self.data), "bytes" file.write("static ") if size == 1: file.write("unsigned char") elif size == 2: file.write("unsigned short") else: file.write("unsigned int") file.write(" " + self.name + "[] = {\n") if self.data: s = " " for item in self.data: i = str(item) + ", " if len(s) + len(i) > 78: file.write(s + "\n") s = " " + i else: s = s + i if string.strip(s): file.write(s + "\n") file.write("};\n\n") def getsize(data): # return smallest possible integer size for the given array maxdata = max(data) if maxdata < 256: return 1 elif maxdata < 65536: return 2 else: return 4 def splitbins(t, trace=0): """t, trace=0 -> (t1, t2, shift). Split a table to save space. t is a sequence of ints. This function can be useful to save space if many of the ints are the same. t1 and t2 are lists of ints, and shift is an int, chosen to minimize the combined size of t1 and t2 (in C code), and where for each i in range(len(t)), t[i] == t2[(t1[i >> shift] << shift) + (i & mask)] where mask is a bitmask isolating the last "shift" bits. If optional arg trace is true (default false), progress info is printed to sys.stderr. """ import sys if trace: def dump(t1, t2, shift, bytes): print >>sys.stderr, "%d+%d bins at shift %d; %d bytes" % ( len(t1), len(t2), shift, bytes) print >>sys.stderr, "Size of original table:", len(t)*getsize(t), \ "bytes" n = len(t)-1 # last valid index maxshift = 0 # the most we can shift n and still have something left if n > 0: while n >> 1: n >>= 1 maxshift += 1 del n bytes = sys.maxint # smallest total size so far t = tuple(t) # so slices can be dict keys for shift in range(maxshift + 1): t1 = [] t2 = [] size = 2**shift bincache = {} for i in range(0, len(t), size): bin = t[i:i+size] index = bincache.get(bin) if index is None: index = len(t2) bincache[bin] = index t2.extend(bin) t1.append(index >> shift) # determine memory size b = len(t1)*getsize(t1) + len(t2)*getsize(t2) if trace: dump(t1, t2, shift, b) if b < bytes: best = t1, t2, shift bytes = b t1, t2, shift = best if trace: print >>sys.stderr, "Best:", dump(t1, t2, shift, bytes) if __debug__: # exhaustively verify that the decomposition is correct mask = ~((~0) << shift) # i.e., low-bit mask of shift bits for i in xrange(len(t)): assert t[i] == t2[(t1[i >> shift] << shift) + (i & mask)] return best if __name__ == "__main__": maketables()
30.434426
77
0.529132
1,378
11,139
4.218433
0.256168
0.018579
0.003097
0.002752
0.249613
0.24342
0.217616
0.212455
0.172544
0.146568
0
0.03157
0.343119
11,139
365
78
30.517808
0.762881
0.166083
0
0.28125
1
0
0.126772
0.026276
0
0
0.005762
0.00274
0.003906
0
null
null
0
0.011719
null
null
0.144531
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
facf5ba9d9c54f6351e3a588f530398889e61c19
1,723
py
Python
ontask/oauth/views.py
pinheiroo27/ontask_b
23fee8caf4e1c5694a710a77f3004ca5d9effeac
[ "MIT" ]
33
2017-12-02T04:09:24.000Z
2021-11-07T08:41:57.000Z
ontask/oauth/views.py
pinheiroo27/ontask_b
23fee8caf4e1c5694a710a77f3004ca5d9effeac
[ "MIT" ]
189
2017-11-16T04:06:29.000Z
2022-03-11T23:35:59.000Z
ontask/oauth/views.py
pinheiroo27/ontask_b
23fee8caf4e1c5694a710a77f3004ca5d9effeac
[ "MIT" ]
30
2017-11-30T03:35:44.000Z
2022-01-31T03:08:08.000Z
# -*- coding: utf-8 -*- """Call back view for OAuth2 authentication.""" from django import http from django.contrib import messages from django.contrib.auth.decorators import user_passes_test from django.core.handlers.wsgi import WSGIRequest from django.shortcuts import redirect, reverse from django.utils.translation import ugettext, ugettext_lazy as _ from ontask.core import SessionPayload from ontask.core.permissions import is_instructor from ontask.oauth import services @user_passes_test(is_instructor) def callback(request: WSGIRequest) -> http.HttpResponse: """Process the call received from the server. This is supposed to contain the token so it is saved to the database and then redirects to a page previously stored in the session object. :param request: Request object :return: Redirection to the stored page """ payload = SessionPayload(request.session) # If there is no payload, something went wrong. if payload is None: # Something is wrong with this execution. Return to action table. messages.error( request, _('Incorrect Canvas callback invocation.')) return redirect('action:index') # Check first if there has been some error error_string = request.GET.get('error') if error_string: messages.error( request, ugettext('Error in OAuth2 step 1 ({0})').format(error_string)) return redirect('action:index') status = services.process_callback(request, payload) if status: messages.error(request, status) return redirect('action:index') return redirect( request.session.get(services.return_url_key, reverse('action:index')))
33.784314
78
0.714452
219
1,723
5.552511
0.456621
0.049342
0.049342
0.061678
0
0
0
0
0
0
0
0.003658
0.206616
1,723
50
79
34.46
0.885881
0.27278
0
0.241379
0
0
0.09688
0
0
0
0
0
0
1
0.034483
false
0.068966
0.310345
0
0.482759
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
2
facf71d5d2af872c34d173c4d9e46ca81c6811b0
933
py
Python
_utils.py
catalinbotean/LicentaRosustnet
c59c1b539f3ff602827a1d2613c4684f5cf91d65
[ "BSD-3-Clause" ]
null
null
null
_utils.py
catalinbotean/LicentaRosustnet
c59c1b539f3ff602827a1d2613c4684f5cf91d65
[ "BSD-3-Clause" ]
null
null
null
_utils.py
catalinbotean/LicentaRosustnet
c59c1b539f3ff602827a1d2613c4684f5cf91d65
[ "BSD-3-Clause" ]
null
null
null
import enum from typing import Sequence, TypeVar, Type T = TypeVar("T", bound=enum.Enum) class StrEnumMeta(enum.EnumMeta): auto = enum.auto def from_str(self: Type[T], member: str) -> T: # type: ignore[misc] try: return self[member] except KeyError: # TODO: use `add_suggestion` from torchvision.prototype.utils._internal to improve the error message as # soon as it is migrated. raise ValueError(f"Unknown value '{member}' for {self.__name__}.") from None class StrEnum(enum.Enum, metaclass=StrEnumMeta): pass def sequence_to_str(seq: Sequence, separate_last: str = "") -> str: if not seq: return "" if len(seq) == 1: return f"'{seq[0]}'" head = "'" + "', '".join([str(item) for item in seq[:-1]]) + "'" tail = f"{'' if separate_last and len(seq) == 2 else ','} {separate_last}'{seq[-1]}'" return head + tail
29.15625
115
0.60343
125
933
4.408
0.544
0.065336
0.036298
0
0
0
0
0
0
0
0
0.007143
0.249732
933
32
116
29.15625
0.78
0.155413
0
0
0
0
0.174522
0.033121
0.05
0
0
0.03125
0
1
0.1
false
0.05
0.1
0
0.55
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
fadc9bf4539325b36b93f4d31e6ebd90427b62db
7,743
py
Python
src/bo4e/bo/marktlokation.py
bo4e/BO4E-python
28b12f853c8a496d14b133759b7aa2d6661f79a0
[ "MIT" ]
1
2022-03-02T12:49:44.000Z
2022-03-02T12:49:44.000Z
src/bo4e/bo/marktlokation.py
bo4e/BO4E-python
28b12f853c8a496d14b133759b7aa2d6661f79a0
[ "MIT" ]
21
2022-02-04T07:38:46.000Z
2022-03-28T14:01:53.000Z
src/bo4e/bo/marktlokation.py
bo4e/BO4E-python
28b12f853c8a496d14b133759b7aa2d6661f79a0
[ "MIT" ]
null
null
null
""" Contains Marktlokation class and corresponding marshmallow schema for de-/serialization """ import attr from marshmallow import fields from marshmallow_enum import EnumField # type:ignore[import] from bo4e.bo.geschaeftsobjekt import Geschaeftsobjekt, GeschaeftsobjektSchema from bo4e.bo.geschaeftspartner import Geschaeftspartner, GeschaeftspartnerSchema from bo4e.com.adresse import Adresse, AdresseSchema from bo4e.com.geokoordinaten import Geokoordinaten, GeokoordinatenSchema from bo4e.com.katasteradresse import Katasteradresse, KatasteradresseSchema from bo4e.com.messlokationszuordnung import Messlokationszuordnung, MesslokationszuordnungSchema from bo4e.enum.bilanzierungsmethode import Bilanzierungsmethode from bo4e.enum.botyp import BoTyp from bo4e.enum.energierichtung import Energierichtung from bo4e.enum.gasqualitaet import Gasqualitaet from bo4e.enum.gebiettyp import Gebiettyp from bo4e.enum.netzebene import Netzebene from bo4e.enum.sparte import Sparte from bo4e.enum.verbrauchsart import Verbrauchsart from bo4e.validators import validate_marktlokations_id # pylint: disable=too-many-instance-attributes, too-few-public-methods @attr.s(auto_attribs=True, kw_only=True) class Marktlokation(Geschaeftsobjekt): """ Object containing information about a Marktlokation .. HINT:: `Marktlokation JSON Schema <https://json-schema.app/view/%23?url=https://raw.githubusercontent.com/Hochfrequenz/BO4E-python/main/json_schemas/bo/MarktlokationSchema.json>`_ """ # required attributes bo_typ: BoTyp = attr.ib(default=BoTyp.MARKTLOKATION) #: Identifikationsnummer einer Marktlokation, an der Energie entweder verbraucht, oder erzeugt wird. marktlokations_id: str = attr.ib(validator=validate_marktlokations_id) #: Sparte der Marktlokation, z.B. Gas oder Strom sparte: Sparte #: Kennzeichnung, ob Energie eingespeist oder entnommen (ausgespeist) wird energierichtung: Energierichtung #: Die Bilanzierungsmethode, RLM oder SLP bilanzierungsmethode: Bilanzierungsmethode netzebene: Netzebene """ Netzebene, in der der Bezug der Energie erfolgt. Bei Strom Spannungsebene der Lieferung, bei Gas Druckstufe. Beispiel Strom: Niederspannung Beispiel Gas: Niederdruck. """ # optional attributes #: Verbrauchsart der Marktlokation. verbrauchsart: Verbrauchsart = attr.ib(default=None) #: Gibt an, ob es sich um eine unterbrechbare Belieferung handelt unterbrechbar: bool = attr.ib(default=None) #: Codenummer des Netzbetreibers, an dessen Netz diese Marktlokation angeschlossen ist. netzbetreibercodenr: str = attr.ib(default=None) #: Typ des Netzgebietes, z.B. Verteilnetz gebietstyp: Gebiettyp = attr.ib(default=None) #: Die ID des Gebietes in der ene't-Datenbank netzgebietsnr: str = attr.ib(default=None) # todo: rename to "id" (see 2021-12-15 update) #: Bilanzierungsgebiet, dem das Netzgebiet zugeordnet ist - im Falle eines Strom Netzes bilanzierungsgebiet: str = attr.ib(default=None) #: Codenummer des Grundversorgers, der für diese Marktlokation zuständig ist grundversorgercodenr: str = attr.ib(default=None) #: Die Gasqualität in diesem Netzgebiet. H-Gas oder L-Gas. Im Falle eines Gas-Netzes gasqualitaet: Gasqualitaet = attr.ib(default=None) #: Geschäftspartner, dem diese Marktlokation gehört endkunde: Geschaeftspartner = attr.ib(default=None) zugehoerige_messlokation: Messlokationszuordnung = attr.ib(default=None) # todo: rename to plural """ Aufzählung der Messlokationen, die zu dieser Marktlokation gehören. Es können 3 verschiedene Konstrukte auftreten: Beziehung 1 : 0 : Hier handelt es sich um Pauschalanlagen ohne Messung. D.h. die Verbrauchsdaten sind direkt über die Marktlokation abgreifbar. Beziehung 1 : 1 : Das ist die Standard-Beziehung für die meisten Fälle. In diesem Fall gibt es zu einer Marktlokation genau eine Messlokation. Beziehung 1 : N : Hier liegt beispielsweise eine Untermessung vor. Der Verbrauch einer Marklokation berechnet sich hier aus mehreren Messungen. Es gibt praktisch auch noch die Beziehung N : 1, beispielsweise bei einer Zweirichtungsmessung bei der durch eine Messeinrichtung die Messung sowohl für die Einspreiseseite als auch für die Aussspeiseseite erfolgt. Da Abrechnung und Bilanzierung jedoch für beide Marktlokationen getrennt erfolgt, werden nie beide Marktlokationen gemeinsam betrachtet. Daher lässt sich dieses Konstrukt auf zwei 1:1-Beziehung zurückführen, wobei die Messlokation in beiden Fällen die gleiche ist. In den Zuordnungen sind ist die arithmetische Operation mit der der Verbrauch einer Messlokation zum Verbrauch einer Marktlokation beitrögt mit aufgeführt. Der Standard ist hier die Addition. """ # only one of the following three optional attributes can be set #: Die Adresse, an der die Energie-Lieferung oder -Einspeisung erfolgt lokationsadresse: Adresse = attr.ib(default=None) geoadresse: Geokoordinaten = attr.ib(default=None) """ Alternativ zu einer postalischen Adresse kann hier ein Ort mittels Geokoordinaten angegeben werden (z.B. zur Identifikation von Sendemasten). """ katasterinformation: Katasteradresse = attr.ib(default=None) """ Alternativ zu einer postalischen Adresse und Geokoordinaten kann hier eine Ortsangabe mittels Gemarkung und Flurstück erfolgen. """ # todo: add kundengruppe # pylint:disable=unused-argument @lokationsadresse.validator @geoadresse.validator @katasterinformation.validator def validate_address_info(self, address_attribute, value): """Checks that there is one and only one valid adress given.""" all_address_attributes = [ self.lokationsadresse, self.geoadresse, self.katasterinformation, ] amount_of_given_address_infos = len([i for i in all_address_attributes if i is not None]) if amount_of_given_address_infos != 1: raise ValueError("No or more than one address information is given.") class MarktlokationSchema(GeschaeftsobjektSchema): """ Schema for de-/serialization of Marktlokation. Inherits from GeschaeftsobjektSchema. """ # class_name is needed to use the correct schema for deserialisation. # see function `deserialize` in geschaeftsobjekt.py class_name = Marktlokation # required attributes marktlokations_id = fields.Str(data_key="marktlokationsId") sparte = EnumField(Sparte) energierichtung = EnumField(Energierichtung) bilanzierungsmethode = EnumField(Bilanzierungsmethode) netzebene = EnumField(Netzebene) # optional attributes verbrauchsart = EnumField(Verbrauchsart, load_default=None) unterbrechbar = fields.Bool(load_default=None) netzbetreibercodenr = fields.Str(load_default=None) gebietstyp = EnumField(Gebiettyp, load_default=None) netzgebietsnr = fields.Str(load_default=None) bilanzierungsgebiet = fields.Str(load_default=None) grundversorgercodenr = fields.Str(load_default=None) gasqualitaet = EnumField(Gasqualitaet, load_default=None) endkunde = fields.Nested(GeschaeftspartnerSchema, load_default=None) zugehoerige_messlokation = fields.List( fields.Nested(MesslokationszuordnungSchema), load_default=None, data_key="zugehoerigeMesslokation" ) # only one of the following three optional attributes can be set lokationsadresse = fields.Nested(AdresseSchema, load_default=None) geoadresse = fields.Nested(GeokoordinatenSchema, load_default=None) katasterinformation = fields.Nested(KatasteradresseSchema, load_default=None)
47.213415
180
0.767661
883
7,743
6.679502
0.374858
0.048491
0.030858
0.03747
0.092065
0.055612
0.045439
0.035605
0.035605
0.017633
0
0.005585
0.167506
7,743
163
181
47.503067
0.909401
0.244866
0
0
0
0
0.022166
0.005793
0
0
0
0.01227
0
1
0.013699
false
0
0.246575
0
0.808219
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
faded8c9f8cb40cb8a3b311954261cad01666c3c
1,446
py
Python
db/csvs_test_examples/project/project_new_potential/doc.py
souissim/gridpath
4eeca2be24b485edc56026e38cfda83f4a6b27ea
[ "Apache-2.0" ]
44
2020-10-27T19:05:44.000Z
2022-03-22T17:17:37.000Z
db/csvs_test_examples/project/project_new_potential/doc.py
souissim/gridpath
4eeca2be24b485edc56026e38cfda83f4a6b27ea
[ "Apache-2.0" ]
67
2020-10-08T22:36:53.000Z
2022-03-22T22:58:33.000Z
db/csvs_test_examples/project/project_new_potential/doc.py
souissim/gridpath
4eeca2be24b485edc56026e38cfda83f4a6b27ea
[ "Apache-2.0" ]
21
2020-10-08T23:23:48.000Z
2022-03-28T01:21:21.000Z
# Copyright 2016-2020 Blue Marble Analytics LLC. All rights reserved. """ **Relevant tables:** +--------------------------------+----------------------------------------------+ |:code:`scenarios` table column |:code:`project_new_potential_scenario_id` | +--------------------------------+----------------------------------------------+ |:code:`scenarios` table feature |N/A | +--------------------------------+----------------------------------------------+ |:code:`subscenario_` table |:code:`subscenarios_project_new_potential` | +--------------------------------+----------------------------------------------+ |:code:`input_` tables |:code:`inputs_project_new_potential` | +--------------------------------+----------------------------------------------+ If the project portfolio includes projects of a 'new' capacity type (:code:`gen_new_bin`, :code:`gen_new_lin`, :code:`stor_new_bin`, or :code:`stor_new_lin`), the user may specify the minimum and maximum cumulative new capacity to be built in each period in the :code:`inputs_project_new_potential` table. For storage project, the minimum and maximum energy capacity may also be specified. All columns are optional and NULL values are interpreted by GridPath as no constraint. Projects that don't either a minimum or maximum cumulative new capacity constraints can be omitted from this table completely. """
51.642857
81
0.520055
144
1,446
5.0625
0.555556
0.05487
0.104252
0.05487
0.079561
0
0
0
0
0
0
0.006415
0.137621
1,446
27
82
53.555556
0.578188
0.99101
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
fadf886a8953595d93a3380cd1443e99e570dbb1
1,156
py
Python
src/credentials.py
LRY579/NOJ
4b63a9dd98290058d8f7af56e91a58f1a25c1292
[ "BSD-2-Clause" ]
null
null
null
src/credentials.py
LRY579/NOJ
4b63a9dd98290058d8f7af56e91a58f1a25c1292
[ "BSD-2-Clause" ]
null
null
null
src/credentials.py
LRY579/NOJ
4b63a9dd98290058d8f7af56e91a58f1a25c1292
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from keystoneauth1 import loading from keystoneauth1 import session import novaclient.client as nova_client import glanceclient.client as glance_client import neutronclient.v2_0.client as neutron_client import os # I think it's not a good idea to let the user change the code to provide their credentials # instead, I think the user should just source their own openstack-rc file # and then everything works def get_session(): loader = loading.get_plugin_loader('password') auth = loader.load_from_options( auth_url = os.environ['OS_AUTH_URL'], username = os.environ['OS_USERNAME'], password = os.environ['OS_PASSWORD'], project_id = os.environ['OS_TENANT_ID']) return session.Session(auth = auth) def get_nova_client(): VERSION = '2' sess = get_session() return nova_client.Client(VERSION, session = sess) def get_glance_client(): VERSION = '2' sess = get_session() return glance_client.Client(VERSION, session = sess) def get_neutron_client(): sess = get_session() return neutron_client.Client(session = sess)
31.243243
91
0.715398
161
1,156
4.956522
0.434783
0.030075
0.055138
0.075188
0.175439
0.175439
0.175439
0
0
0
0
0.007503
0.192907
1,156
36
92
32.111111
0.847803
0.199827
0
0.2
0
0
0.059783
0
0
0
0
0
0
1
0.16
false
0.08
0.24
0
0.56
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
faf0f965f51beb439bcbbdf93d32f142afb3a16b
446
py
Python
Extension.py
Shridat/Assignment1
6f52f6477260ff19030fce0d7d56a2f464e00aef
[ "MIT" ]
null
null
null
Extension.py
Shridat/Assignment1
6f52f6477260ff19030fce0d7d56a2f464e00aef
[ "MIT" ]
null
null
null
Extension.py
Shridat/Assignment1
6f52f6477260ff19030fce0d7d56a2f464e00aef
[ "MIT" ]
2
2021-10-30T16:04:58.000Z
2021-10-30T18:52:40.000Z
file_name=str(input("Input the Filename:")) if(file_name.split('.')[1]=='c'): print('The extension of the file is C') if(file_name.split('.')[1]=='cpp'): print('The extension of the file is C++') if(file_name.split('.')[1]=='java'): print('The extension of the file is Java') if(file_name.split('.')[1]=='py'): print('The extension of the file is python') if(file_name.split('.')[1]=='html'): print('The extension of the file is HTMl')
37.166667
46
0.64574
77
446
3.662338
0.246753
0.170213
0.177305
0.265957
0.787234
0.617021
0.617021
0.319149
0.319149
0.319149
0
0.012723
0.118834
446
11
47
40.545455
0.704835
0
0
0
0
0
0.450673
0
0
0
0
0
0
1
0
false
0
0
0
0
0.454545
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
faf8e00376f3cd6b9ca1d71894b7dd4affe771aa
28,077
py
Python
pysnmp-with-texts/INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB # Produced by pysmi-0.3.4 at Wed May 1 13:54:55 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint") chassis, = mibBuilder.importSymbols("INTELCORPORATION-MULTI-FLEX-SERVER-MIB", "chassis") groups, regModule = mibBuilder.importSymbols("INTELCORPORATION-MULTI-FLEX-SERVER-REG", "groups", "regModule") Index, INT32withException, Power, PowerLedStates, IdromBinary16, FaultLedStates, FeatureSet, PresenceLedStates, Presence = mibBuilder.importSymbols("INTELCORPORATION-MULTI-FLEX-SERVER-TC", "Index", "INT32withException", "Power", "PowerLedStates", "IdromBinary16", "FaultLedStates", "FeatureSet", "PresenceLedStates", "Presence") ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup") Unsigned32, ModuleIdentity, Integer32, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, ObjectIdentity, Counter64, TimeTicks, Counter32, Bits, iso, IpAddress, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "ModuleIdentity", "Integer32", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "ObjectIdentity", "Counter64", "TimeTicks", "Counter32", "Bits", "iso", "IpAddress", "MibIdentifier") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") multiFlexServerDrivesMibModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 1, 1, 15)) multiFlexServerDrivesMibModule.setRevisions(('2007-08-16 12:00', '2007-07-20 16:45', '2007-06-07 20:30', '2007-06-07 13:30', '2007-05-30 17:00', '2007-04-18 19:05', '2007-04-09 15:45', '2007-04-09 15:30', '2007-03-27 11:30', '2007-03-14 11:30', '2007-03-06 10:30', '2007-02-22 17:00', '2006-12-28 17:30', '2006-12-05 10:30', '2006-11-27 15:30', '2006-11-20 13:30', '2006-11-07 11:30', '2006-10-02 10:24',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: multiFlexServerDrivesMibModule.setRevisionsDescriptions(('Fixed up minor errors causing some managers grief (ommission or addition of commas in lists) Corrected a few entries that were marked as read-write when they should have been read-only', 'Minor edit to make MIB SMIv2 compliant Dropped driveBackplaneBmcFirmwareVersion as there is no BMC for the drive backplane', 'Added the IdromBinary16 to represent the asset tag, part number, and serial number fields within the IDROM fields.', 'Corrected maximum/nominal IDROM parameters and comments', 'Added enumeration for exceptions Added missing Presence column to sharedDriveTable', 'Moved the trees and chassis nodes around to accomodate the unique power supply characteristics', 'Moved driveBackplane IDROM data to sharedDrives tree from storage tree where it makes more logical sense Relocated both tables after the driveBackplane tree', 'Dropped sDriveAlias', 'Renamed all references of Array to StoragePool', 'Renamed all references of Disk to Drive Dropped redundant sDriveIndex (moved and replaced it with sDriveSlotNumber)', "Changed Mask representation from an Opaque to a DisplayString at the request of the architects such that it now is an ASCII representation of bit string reflecting the presence with the left most 'bit' being bit 1 and max* bits being represented.", 'Renamed MIB file and updated internal relevance to formal product name Multi-Flex Server', 'Corrected sharedDiskStatsTable INDEX to AUGMENTS.', "Updated several object types to reflect changes in the OEM objects. sDiskArrayID Integer32 -> INTEGER sDiskSequenceNumber Integer32 -> INTEGER sDiskDriveType DisplayString -> INTEGER Cleaned up some illegal character usage to make it SMIv2 compliant. Renamed all of the *Transfered to *Transferred Renumbered sharedDiskStatsTable to match OEM's.", 'Removed nolonger supported SATA & SAS drive feature tables Renumbered Stats from { sharedDisks 4 } to { sharedDisks 2 }', 'Replaced sharedDisksStats table index with sDiskIndex to be consistent with the rest of the tables. All tables are indexed by the drive ID', "Consolodated use of Presence datatype and changed 'chassis' to 'chassis'", "Partitioned off and created as it's own module",)) if mibBuilder.loadTexts: multiFlexServerDrivesMibModule.setLastUpdated('200708161200Z') if mibBuilder.loadTexts: multiFlexServerDrivesMibModule.setOrganization('Intel Corporation') if mibBuilder.loadTexts: multiFlexServerDrivesMibModule.setContactInfo('Brian Kurle Intel Corporation JF5-2-C3 Tel: 503-712-5032 E-Mail: brianx.j.kurle@intel.com') if mibBuilder.loadTexts: multiFlexServerDrivesMibModule.setDescription('Shared Disks Module of the Multi-Flex Server') maxSharedDrives = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 15), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: maxSharedDrives.setStatus('current') if mibBuilder.loadTexts: maxSharedDrives.setDescription('Maximum number of Shared Drives possible in this chassis') numOfSharedDrives = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 25), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numOfSharedDrives.setStatus('current') if mibBuilder.loadTexts: numOfSharedDrives.setDescription('The number of Shared Drives in the system') sDrivePresenceMask = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 35), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDrivePresenceMask.setStatus('current') if mibBuilder.loadTexts: sDrivePresenceMask.setDescription("ASCII representation of bit string reflecting the presence of the shared drives with the left most 'bit' being bit 1 and maxSharedDrives bits being represented. Thus, '11001111111111' would express that all the shared drives (of fourteen shared drives) are present with exception of drives 3 & 4") sharedDrives = ObjectIdentity((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205)) if mibBuilder.loadTexts: sharedDrives.setStatus('current') if mibBuilder.loadTexts: sharedDrives.setDescription('Container for Shared Drive specific information as well as all components logically contained within.') driveBackplane = ObjectIdentity((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1)) if mibBuilder.loadTexts: driveBackplane.setStatus('current') if mibBuilder.loadTexts: driveBackplane.setDescription('IDROM information from the Drive Backplane') driveBackplaneVendor = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 1), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplaneVendor.setStatus('current') if mibBuilder.loadTexts: driveBackplaneVendor.setDescription('Device manufacturer') driveBackplaneMfgDate = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplaneMfgDate.setStatus('current') if mibBuilder.loadTexts: driveBackplaneMfgDate.setDescription('Manufacture date/time') driveBackplaneDeviceName = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplaneDeviceName.setStatus('current') if mibBuilder.loadTexts: driveBackplaneDeviceName.setDescription('Device Name') driveBackplanePart = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 4), IdromBinary16()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplanePart.setStatus('current') if mibBuilder.loadTexts: driveBackplanePart.setDescription('Device Part Number') driveBackplaneSerialNo = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 5), IdromBinary16()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplaneSerialNo.setStatus('current') if mibBuilder.loadTexts: driveBackplaneSerialNo.setDescription('Device Serial Number') driveBackplaneMaximumPower = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 6), Power()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplaneMaximumPower.setStatus('current') if mibBuilder.loadTexts: driveBackplaneMaximumPower.setDescription('Static maximum power generation / consumption (in watts): <0 - Negative numbers indicate device consumes power (in watts) >0 - Positive numbers indicate device generates power (in watts) 0 - Device is passive (does not not consume or generate power) -1 - Maximum power generation/consumption not known or specified') driveBackplaneNominalPower = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 7), Power()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplaneNominalPower.setStatus('current') if mibBuilder.loadTexts: driveBackplaneNominalPower.setDescription('Static Nominal power generation / consumption (in watts): <0 - Negative numbers indicate device consumes power (in watts) >0 - Positive numbers indicate device generates power (in watts) 0 - Device is passive (does not not consume or generate power) -1 - Nominal power generation/consumption not known or specified') driveBackplaneAssetTag = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 1, 8), IdromBinary16()).setMaxAccess("readonly") if mibBuilder.loadTexts: driveBackplaneAssetTag.setStatus('current') if mibBuilder.loadTexts: driveBackplaneAssetTag.setDescription('Asset Tag # of device') sharedDriveTable = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2), ) if mibBuilder.loadTexts: sharedDriveTable.setStatus('current') if mibBuilder.loadTexts: sharedDriveTable.setDescription('Each row describes a shared drive in the chassis') sharedDriveEntry = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1), ).setIndexNames((0, "INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveSlotNumber")) if mibBuilder.loadTexts: sharedDriveEntry.setStatus('current') if mibBuilder.loadTexts: sharedDriveEntry.setDescription('The parameters of a physical drive.') sDriveSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 1), Index()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveSlotNumber.setStatus('current') if mibBuilder.loadTexts: sDriveSlotNumber.setDescription('The slot number on the enclosure where the drive is located (drive ID)') sDrivePresence = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 2), Presence()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDrivePresence.setStatus('current') if mibBuilder.loadTexts: sDrivePresence.setDescription('column used to flag the existence of a particular FRU') sDriveInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveInterface.setStatus('current') if mibBuilder.loadTexts: sDriveInterface.setDescription('The Drive Interface of the physical drive.') sDriveModelNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 4), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveModelNumber.setStatus('current') if mibBuilder.loadTexts: sDriveModelNumber.setDescription('The Model Number of the physical drive.') sDriveSerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 5), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveSerialNumber.setStatus('current') if mibBuilder.loadTexts: sDriveSerialNumber.setDescription('The Serial Number of the physical drive.') sDriveFirmwareVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 6), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveFirmwareVersion.setStatus('current') if mibBuilder.loadTexts: sDriveFirmwareVersion.setDescription('The Firmware Version of the physical drive.') sDriveProtocolVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 7), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveProtocolVersion.setStatus('current') if mibBuilder.loadTexts: sDriveProtocolVersion.setDescription('The Protocol Version of the physical drive.') sDriveOperationalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 8), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveOperationalStatus.setStatus('current') if mibBuilder.loadTexts: sDriveOperationalStatus.setDescription('The Operational Status of the physical drive.') sDriveCondition = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 9), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveCondition.setStatus('current') if mibBuilder.loadTexts: sDriveCondition.setDescription('The condition of the physical drive, e.g. PFA.') sDriveOperation = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 10), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveOperation.setStatus('current') if mibBuilder.loadTexts: sDriveOperation.setDescription('The current operation on the physical drive, e.g. mediapatrolling, migrating.') sDriveConfiguration = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 11), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveConfiguration.setStatus('current') if mibBuilder.loadTexts: sDriveConfiguration.setDescription('The configuration on the physical drive, e.g. array %d seqno %d, or dedicated spare.') sDriveStoragePoolID = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-32, -16, -1, 256))).clone(namedValues=NamedValues(("notApplicable", -32), ("unknown", -16), ("unavailable", -1), ("notavailable", 256)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStoragePoolID.setStatus('current') if mibBuilder.loadTexts: sDriveStoragePoolID.setDescription('The drive array id, if the physical drive is part of a drive array; the spare id, if the drive is a spare.') sDriveSequenceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-32, -16, -1))).clone(namedValues=NamedValues(("notApplicable", -32), ("unknown", -16), ("unavailable", -1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveSequenceNumber.setStatus('current') if mibBuilder.loadTexts: sDriveSequenceNumber.setDescription('The sequence number of the drive in the drive array. Valid only when the drive is part of a drive array.') sDriveEnclosureID = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 14), INT32withException()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveEnclosureID.setStatus('current') if mibBuilder.loadTexts: sDriveEnclosureID.setDescription('The id of the enclosure to which the drive is inserted.') sDriveBlockSize = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 15), INT32withException()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveBlockSize.setStatus('current') if mibBuilder.loadTexts: sDriveBlockSize.setDescription(' The Block Size in bytes of the physical drive.') sDrivePhysicalCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 16), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDrivePhysicalCapacity.setStatus('current') if mibBuilder.loadTexts: sDrivePhysicalCapacity.setDescription(' The Physical Size in bytes of the physical drive.') sDriveConfigurableCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 17), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveConfigurableCapacity.setStatus('current') if mibBuilder.loadTexts: sDriveConfigurableCapacity.setDescription(' The Configurable Size in bytes of the physical drive.') sDriveUsedCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 18), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveUsedCapacity.setStatus('current') if mibBuilder.loadTexts: sDriveUsedCapacity.setDescription('The Used Size in bytes of the physical drive.') sDriveType = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 2, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-32, -16, -1, 1, 4))).clone(namedValues=NamedValues(("notApplicable", -32), ("unknown", -16), ("unavailable", -1), ("sata", 1), ("sas", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveType.setStatus('current') if mibBuilder.loadTexts: sDriveType.setDescription('The type of the physical drive. e.g. SATA or SAS') sharedDriveStatsTable = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3), ) if mibBuilder.loadTexts: sharedDriveStatsTable.setStatus('current') if mibBuilder.loadTexts: sharedDriveStatsTable.setDescription('A table of Physical Drive Statistics (augments sharedDriveTable)') sharedDriveStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1), ) sharedDriveEntry.registerAugmentions(("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sharedDriveStatsEntry")) sharedDriveStatsEntry.setIndexNames(*sharedDriveEntry.getIndexNames()) if mibBuilder.loadTexts: sharedDriveStatsEntry.setStatus('current') if mibBuilder.loadTexts: sharedDriveStatsEntry.setDescription('The statistics of a physical drive since its last reset or statistics rest.') sDriveStatsDataTransferred = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 1), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsDataTransferred.setStatus('current') if mibBuilder.loadTexts: sDriveStatsDataTransferred.setDescription('The total number of bytes of data transfered to and from the controller.') sDriveStatsReadDataTransferred = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsReadDataTransferred.setStatus('current') if mibBuilder.loadTexts: sDriveStatsReadDataTransferred.setDescription('The total number of bytes of data transfered from the controller.') sDriveStatsWriteDataTransferred = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsWriteDataTransferred.setStatus('current') if mibBuilder.loadTexts: sDriveStatsWriteDataTransferred.setDescription('The total number of bytes of data transfered to the controller.') sDriveStatsNumOfErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfErrors.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfErrors.setDescription('The total number of errors.') sDriveStatsNumOfNonRWErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfNonRWErrors.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfNonRWErrors.setDescription('The total number of non-RW errors.') sDriveStatsNumOfReadErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfReadErrors.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfReadErrors.setDescription('The total number of Read errors.') sDriveStatsNumOfWriteErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfWriteErrors.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfWriteErrors.setDescription('The total number of Write errors.') sDriveStatsNumOfIORequests = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 8), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfIORequests.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfIORequests.setDescription('The total number of IO requests.') sDriveStatsNumOfNonRWRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 9), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfNonRWRequests.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfNonRWRequests.setDescription('The total number of non-RW requests.') sDriveStatsNumOfReadRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 10), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfReadRequests.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfReadRequests.setDescription('The total number of read requests.') sDriveStatsNumOfWriteRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 11), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsNumOfWriteRequests.setStatus('current') if mibBuilder.loadTexts: sDriveStatsNumOfWriteRequests.setDescription('The total number of write requests.') sDriveStatsStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 12), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsStartTime.setStatus('current') if mibBuilder.loadTexts: sDriveStatsStartTime.setDescription('The time when the statistics date starts to accumulate since last statistics reset.') sDriveStatsCollectionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 10, 205, 3, 1, 13), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: sDriveStatsCollectionTime.setStatus('current') if mibBuilder.loadTexts: sDriveStatsCollectionTime.setDescription('The time when the statistics data was collected or updated last time.') sDriveGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 343, 2, 19, 1, 2, 2, 2, 15)).setObjects(("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "maxSharedDrives"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "numOfSharedDrives"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDrivePresenceMask"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplaneVendor"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplaneMfgDate"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplaneDeviceName"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplanePart"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplaneSerialNo"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplaneMaximumPower"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplaneNominalPower"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "driveBackplaneAssetTag"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveSlotNumber"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDrivePresence"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveInterface"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveModelNumber"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveSerialNumber"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveFirmwareVersion"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveProtocolVersion"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveOperationalStatus"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveCondition"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveOperation"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveConfiguration"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStoragePoolID"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveSequenceNumber"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveEnclosureID"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveBlockSize"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDrivePhysicalCapacity"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveConfigurableCapacity"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveUsedCapacity"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveType"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsDataTransferred"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsReadDataTransferred"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsWriteDataTransferred"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfErrors"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfNonRWErrors"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfReadErrors"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfWriteErrors"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfIORequests"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfNonRWRequests"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfReadRequests"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsNumOfWriteRequests"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsStartTime"), ("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", "sDriveStatsCollectionTime")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): sDriveGroup = sDriveGroup.setStatus('current') if mibBuilder.loadTexts: sDriveGroup.setDescription('Description.') mibBuilder.exportSymbols("INTELCORPORATION-MULTI-FLEX-SERVER-DRIVE-MIB", sharedDriveStatsEntry=sharedDriveStatsEntry, sDriveStoragePoolID=sDriveStoragePoolID, driveBackplanePart=driveBackplanePart, multiFlexServerDrivesMibModule=multiFlexServerDrivesMibModule, driveBackplaneMaximumPower=driveBackplaneMaximumPower, driveBackplaneSerialNo=driveBackplaneSerialNo, driveBackplaneAssetTag=driveBackplaneAssetTag, sDriveStatsNumOfWriteErrors=sDriveStatsNumOfWriteErrors, sDriveUsedCapacity=sDriveUsedCapacity, driveBackplaneNominalPower=driveBackplaneNominalPower, sharedDrives=sharedDrives, sDriveStatsCollectionTime=sDriveStatsCollectionTime, sDriveOperationalStatus=sDriveOperationalStatus, sDrivePresence=sDrivePresence, sDriveInterface=sDriveInterface, sDrivePhysicalCapacity=sDrivePhysicalCapacity, maxSharedDrives=maxSharedDrives, sharedDriveStatsTable=sharedDriveStatsTable, sDriveStatsNumOfReadErrors=sDriveStatsNumOfReadErrors, sDriveType=sDriveType, sDriveStatsNumOfIORequests=sDriveStatsNumOfIORequests, sDriveSerialNumber=sDriveSerialNumber, sDriveGroup=sDriveGroup, sDrivePresenceMask=sDrivePresenceMask, sDriveModelNumber=sDriveModelNumber, driveBackplane=driveBackplane, numOfSharedDrives=numOfSharedDrives, sDriveConfiguration=sDriveConfiguration, sDriveStatsNumOfErrors=sDriveStatsNumOfErrors, driveBackplaneVendor=driveBackplaneVendor, sDriveStatsNumOfNonRWRequests=sDriveStatsNumOfNonRWRequests, sDriveStatsNumOfWriteRequests=sDriveStatsNumOfWriteRequests, sharedDriveEntry=sharedDriveEntry, sDriveBlockSize=sDriveBlockSize, sDriveSlotNumber=sDriveSlotNumber, driveBackplaneMfgDate=driveBackplaneMfgDate, sDriveFirmwareVersion=sDriveFirmwareVersion, sDriveSequenceNumber=sDriveSequenceNumber, driveBackplaneDeviceName=driveBackplaneDeviceName, sDriveConfigurableCapacity=sDriveConfigurableCapacity, sDriveStatsStartTime=sDriveStatsStartTime, sDriveProtocolVersion=sDriveProtocolVersion, sDriveEnclosureID=sDriveEnclosureID, sDriveStatsDataTransferred=sDriveStatsDataTransferred, sDriveStatsWriteDataTransferred=sDriveStatsWriteDataTransferred, sDriveStatsNumOfNonRWErrors=sDriveStatsNumOfNonRWErrors, PYSNMP_MODULE_ID=multiFlexServerDrivesMibModule, sDriveStatsNumOfReadRequests=sDriveStatsNumOfReadRequests, sDriveCondition=sDriveCondition, sDriveOperation=sDriveOperation, sDriveStatsReadDataTransferred=sDriveStatsReadDataTransferred, sharedDriveTable=sharedDriveTable)
155.983333
3,322
0.785625
3,272
28,077
6.740831
0.14945
0.056583
0.099021
0.071681
0.474973
0.363801
0.182989
0.163312
0.13697
0.113937
0
0.060649
0.087403
28,077
179
3,323
156.854749
0.800141
0.013605
0
0.011696
0
0.081871
0.369898
0.104096
0
0
0
0
0
1
0
false
0.011696
0.052632
0
0.052632
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
4f028c1f594b3043319b005f48b66bbffcdbcaa4
2,750
py
Python
bench_press/run/actions/action.py
s-tian/bench-press
04240a8c9402c2b8639900567d6bfbee3cf0222e
[ "MIT" ]
9
2020-03-18T05:11:07.000Z
2020-11-21T03:17:30.000Z
bench_press/run/actions/action.py
s-tian/bench-press
04240a8c9402c2b8639900567d6bfbee3cf0222e
[ "MIT" ]
6
2020-11-13T17:45:10.000Z
2022-02-09T23:36:40.000Z
bench_press/run/actions/action.py
s-tian/bench-press
04240a8c9402c2b8639900567d6bfbee3cf0222e
[ "MIT" ]
4
2020-03-23T07:51:40.000Z
2020-11-10T03:15:14.000Z
import time import numpy as np class Action: def apply(self, environment): raise NotImplementedError class EndAction: def apply(self, environment): pass class DeltaAction(Action): def __init__(self, delta): assert len(delta) == 3, 'A delta action must contain three dimensions (x, y, z)' self.delta = np.array(delta) def apply(self, environment): environment.move_delta(self.delta) def inverse(self): return DeltaAction(-self.delta) def __str__(self): return f'[Action: DeltaAction {self.delta}]' class AbsoluteAction(Action): def __init__(self, pos): assert len(pos) == 3, 'A position command must contain three dimensions (x, y, z)' self.pos = np.array(pos) def apply(self, environment): environment.move_to(self.pos) def __str__(self): return f'[Action: AbsoluteAction {self.pos}]' class SleepAction(Action): def __init__(self, time): assert 0 <= time, 'Sleep time has to be nonnegative' self.time = time def apply(self, environment): time.sleep(self.time) def inverse(self): return self def __str__(self): return f'[Action: Sleep for {self.time} seconds]' class DebugAction(Action): def apply(self, environment): import ipdb; ipdb.set_trace() def inverse(self): return self class SequentialAction(Action): # A sequence of actions, taken one after the other def __init__(self, action_list): assert all([isinstance(action, Action) for action in action_list]), 'All list elems must be actions!' self.action_list = action_list def apply(self, environment): for action in self.action_list: action.apply(environment) def inverse(self): """ Compute the "inverse" of a sequence of actions. This just computes the inverse of each action individually, and returns them in reverse order. This is so that applying the inverse right after an action undoes the most recent action first, then the second most recent, etc. :return: the SequentialAction representing the inverse """ inverse_actions = [] for action in reversed(self.action_list): inverse_actions.append(action.inverse()) return SequentialAction(inverse_actions) def __str__(self): return '\n'.join([str(a) for a in self.action_list]) class DynamixelAngleAction(Action): def __init__(self, angle): self.angle = angle def apply(self, environment): environment.move_dyna_to_angle(self.angle) def __str__(self): return f'[Action: Dynamixel to {self.angle} degrees]'
25
122
0.651273
346
2,750
5.014451
0.289017
0.036888
0.055331
0.106052
0.217867
0.156772
0.03804
0.03804
0
0
0
0.001464
0.254909
2,750
109
123
25.229358
0.84529
0.137091
0
0.301587
0
0
0.141075
0
0
0
0
0
0.063492
1
0.349206
false
0.015873
0.047619
0.126984
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
4f0711bd2678b5ff53245846eae56f46d25130f6
4,382
py
Python
NER/src/CH/predict_update.py
STHSF/DeepNaturallanguageprocessing
1fe79a961abf3e55fb3ce2b0266b26f56ade2483
[ "Apache-2.0" ]
15
2016-09-13T05:41:53.000Z
2019-12-30T13:01:33.000Z
NER/src/CH/predict_update.py
STHSF/DeepNaturallanguageprocessing
1fe79a961abf3e55fb3ce2b0266b26f56ade2483
[ "Apache-2.0" ]
20
2020-01-28T21:42:25.000Z
2022-02-10T00:44:02.000Z
NER/src/CH/predict_update.py
STHSF/DeepNaturallanguageprocessing
1fe79a961abf3e55fb3ce2b0266b26f56ade2483
[ "Apache-2.0" ]
10
2016-09-28T02:56:11.000Z
2022-03-12T16:41:20.000Z
# coding=utf-8 """ Training model """ import pickle import time import numpy as np import tensorflow as tf from sklearn.model_selection import train_test_split import config from batch_generate import BatchGenerator from sequence_labelling_ner_crf import SequenceLabelingModel # 数据导入 # data_path = "/Users/li/workshop/MyRepository/DeepNaturalLanguageProcessing/NER/src/CH/" with open('data.pkl', 'rb') as pk: X = pickle.load(pk) y = pickle.load(pk) word2id = pickle.load(pk) id2word = pickle.load(pk) tag2id = pickle.load(pk) id2tag = pickle.load(pk) # 划分训练集、测试集、和验证集 X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size=0.2, random_state=42) print('X_train.shape={}, y_train.shape={}; ' '\nX_valid.shape={}, y_valid.shape={};' '\nX_test.shape={}, y_test.shape={}'.format(X_train.shape, y_train.shape, X_valid.shape, y_valid.shape, X_test.shape, y_test.shape)) print('Creating the data generator ...') data_train = BatchGenerator(X_train, y_train, shuffle=True) data_valid = BatchGenerator(X_valid, y_valid, shuffle=False) data_test = BatchGenerator(X_test, y_test, shuffle=False) print('Finished creating the data generator.') class configuration(object): # hyper-parameter init_scale = config.FLAGS.init_scale batch_size = config.FLAGS.batch_size # size of per batch num_steps = config.FLAGS.max_sequence vocab_size = config.FLAGS.vocab_size embedding_size = config.FLAGS.embedding_size hidden_units = config.FLAGS.hidden_units layers_num = config.FLAGS.layers_num num_classes = config.FLAGS.num_classes max_grad_norm = config.FLAGS.max_grad_norm model_save_path = config.FLAGS.model_save_path lr = config.FLAGS.lr keep_pro = config.FLAGS.dropout train_config = configuration() eval_config = configuration() eval_config.batch_size = 1 decay = 0.85 max_epoch = 3 max_max_epoch = 10 def main(): with tf.Graph().as_default(): initializer = tf.random_uniform_initializer(-train_config.init_scale, train_config.init_scale) with tf.name_scope("Train") as train_scope: with tf.variable_scope("Train_Model", reuse=None, initializer=initializer): train_model = SequenceLabelingModel(train_scope, is_training=True, config=train_config) tf.summary.scalar("Training Loss", train_model.loss) tf.summary.scalar("Learning Rate", train_model.lr) with tf.name_scope("Valid") as valid_scope: with tf.variable_scope("Valid_Model", reuse=True, initializer=initializer): valid_model = SequenceLabelingModel(valid_scope, is_training=False, config=train_config) tf.summary.scalar("Validation Loss", valid_model.loss) with tf.Session() as session: train_summary_writer = tf.summary.FileWriter('data/model/tensorflowlogs/train', session.graph) valid_summary_writer = tf.summary.FileWriter('data/model/tensorflowlogs/valid', session.graph) session.run(tf.global_variables_initializer()) for epoch in range(max_max_epoch): _batch_size = train_model.batch_size print("model_batch_size in run_epoch", _batch_size) data_size = data_train.y.shape[0] print("data_size %s", data_size) batch_num = int(data_size / _batch_size) print("batch_num: %d", batch_num) for batch in range(batch_num): fetches = [train_model.logits, train_model.transition_params, train_model.loss, train_model.train_op] X_batch, y_batch = data_train.next_batch(_batch_size) feed_dict = {train_model.source_input: X_batch, train_model.target_input: y_batch} _logits, _transition_params, _loss, _ = session.run(fetches, feed_dict) print _loss if __name__ == '__main__': main()
40.201835
121
0.644455
550
4,382
4.830909
0.267273
0.04968
0.027098
0.011291
0.147535
0.09936
0.058713
0.0414
0
0
0
0.006481
0.260612
4,382
109
122
40.201835
0.81358
0.035144
0
0
1
0
0.090952
0.014762
0
0
0
0
0
0
null
null
0
0.097561
null
null
0.085366
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
4f0c55ea8fcd7f4363f7d9e6b60d178dfba69d15
689
py
Python
kiwiii/test/parser/test_helper.py
mojaie/kiwiii-server
2f550d58bfc9876b9153a2899610e8ca256c294e
[ "MIT" ]
null
null
null
kiwiii/test/parser/test_helper.py
mojaie/kiwiii-server
2f550d58bfc9876b9153a2899610e8ca256c294e
[ "MIT" ]
3
2017-09-08T02:00:30.000Z
2017-10-05T08:14:59.000Z
kiwiii/test/parser/test_helper.py
mojaie/kiwiii-server
2f550d58bfc9876b9153a2899610e8ca256c294e
[ "MIT" ]
null
null
null
# # (C) 2014-2017 Seiji Matsuoka # Licensed under the MIT License (MIT) # http://opensource.org/licenses/MIT # import os import unittest from kiwiii.parser import helper TEST_FILE = os.path.join( os.path.dirname(__file__), "../../../resources/raw/instruments/SpectraMaxM2.txt" ) class TestHelper(unittest.TestCase): def test_well_index(self): self.assertEqual(helper.well_index("A1"), 0) self.assertEqual(helper.well_index("A24"), 23) self.assertEqual(helper.well_index("P1"), 360) self.assertEqual(helper.well_index("P24"), 383) self.assertEqual(helper.well_index("A01"), 0) self.assertEqual(helper.well_index("p24"), 383)
26.5
57
0.689405
90
689
5.133333
0.533333
0.136364
0.272727
0.324675
0.419913
0.225108
0.155844
0
0
0
0
0.055363
0.161103
689
25
58
27.56
0.743945
0.145138
0
0
0
0
0.114923
0.087479
0
0
0
0
0.4
1
0.066667
false
0
0.2
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
4f0d2c131ccff9a41d0a446c111066b2fe28c87f
3,104
py
Python
src/3rdparty/sheets/plugins/scripting/scripts/logger.py
afarcat/QtSheetView
6d5ef3418238e9402c5a263a6f499557cc7215bf
[ "Apache-2.0" ]
null
null
null
src/3rdparty/sheets/plugins/scripting/scripts/logger.py
afarcat/QtSheetView
6d5ef3418238e9402c5a263a6f499557cc7215bf
[ "Apache-2.0" ]
null
null
null
src/3rdparty/sheets/plugins/scripting/scripts/logger.py
afarcat/QtSheetView
6d5ef3418238e9402c5a263a6f499557cc7215bf
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env kross import os, time, Kross, KSpread T = Kross.module("kdetranslation") class Logger: def __init__(self, scriptaction): self.scriptaction = scriptaction #self.currentpath = self.scriptaction.currentPath() #self.undostack = KSpread.undoStack() #self.undostack.connect("indexChanged(int)",self.indexChanged) #file = os.path(self.getLogDir(),"KSpread.log") self.forms = Kross.module("forms") self.dialog = self.forms.createDialog(T.i18n("Logger")) self.dialog.setButtons("Ok|Cancel") self.dialog.setFaceType("Plain") #Auto Plain List Tree Tabbed savepage = self.dialog.addPage(T.i18nc("Options page name", "Save"),T.i18n("Save to Log File"),"document-save") self.savewidget = self.forms.createFileWidget(savepage, "kfiledialog:///kspreadlogger") self.savewidget.setMode("Saving") self.savewidget.setFilter("*.txt *.log|%(1)s\n*|%(2)s" % { '1' : T.i18n("Log File"), '2' : T.i18n("All Files") } ) if self.dialog.exec_loop(): filename = self.savewidget.selectedFile() if os.path.isfile(filename): if self.forms.showMessageBox("WarningContinueCancel", T.i18n("Overwrite file?"), T.i18n("The file \"%1\" does already exist. Overwrite the file?", [filename])) != "Continue": raise Exception, T.i18n("Aborted.") sheetname = KSpread.currentSheet().sheetName() cellrange = "A1:F50" #FIXME try: self.file = open(filename, "w") self.startLogging(sheetname, cellrange) except IOError, (errno, strerror): raise Exception, T.i18n("Failed to write Log File \"%1\":\n%2", [filename], [strerror]) def addLog(self, message, flush = True): date = time.strftime("%Y-%M-%d %H:%M.%S") self.file.write( "%s %s\n" % (date,message) ) if flush: self.file.flush() def startLogging(self, sheetname, cellrange = ""): self.sheet = KSpread.sheetByName(sheetname) self.listener = KSpread.createListener(sheetname, cellrange) if not self.listener: raise Exception, T.i18n("Failed to create listener for sheetname '%1' and range '%2'", [sheetname], [cellrange]) self.addLog( "Start logging sheet='%s' range='%s'" % (sheetname,cellrange) ) self.listener.connect("regionChanged(QVariantList)", self.regionChanged) self.listener.connect("cellChanged(int,int)", self.cellChanged) def regionChanged(self, regions): self.lastCount = len(regions) print "Logger: Region changed %s" % regions self.addLog( "regions=%s" % regions ) def cellChanged(self, column, row): text = self.sheet.text(column, row) if self.lastCount > 1: flush = False self.lastCount -= 1 else: flush = True print "Logger: Cell changed column=%i row=%i text=%s" % (column,row,text) self.addLog( "column=%i row=%i text=%s" % (column,row,text), flush ) Logger( self )
43.71831
190
0.610825
357
3,104
5.296919
0.369748
0.023797
0.023797
0.030143
0.059228
0.059228
0.030672
0.030672
0.030672
0
0
0.014388
0.238724
3,104
70
191
44.342857
0.785865
0.078608
0
0
0
0
0.202243
0.026639
0
0
0
0.014286
0
0
null
null
0
0.019231
null
null
0.038462
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
2
4f0e7657d15b4f76d2bb38999b9c06b6e60156b5
1,603
py
Python
pyannote/audio/train/checkpoint.py
Mymoza/pyannote-audio
9ac612ee6b854a1a65c3d8992856550304969674
[ "MIT" ]
1
2020-09-30T23:47:27.000Z
2020-09-30T23:47:27.000Z
pyannote/audio/train/checkpoint.py
picheny-nyu/pyannote-audio
9ac612ee6b854a1a65c3d8992856550304969674
[ "MIT" ]
null
null
null
pyannote/audio/train/checkpoint.py
picheny-nyu/pyannote-audio
9ac612ee6b854a1a65c3d8992856550304969674
[ "MIT" ]
1
2020-02-06T16:22:54.000Z
2020-02-06T16:22:54.000Z
#!/usr/bin/env python # encoding: utf-8 # The MIT License (MIT) # Copyright (c) 2016-2019 CNRS # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # AUTHORS # Hervé BREDIN - http://herve.niderb.fr from pyannote.audio.util import mkdir_p from .callback import Callback class Checkpoint(Callback): """Model checkpoints""" def __init__(self): super().__init__() def on_train_start(self, trainer): mkdir_p(trainer.log_dir_) def load_epoch(self, trainer, epoch): trainer.load_epoch(epoch) def on_epoch_end(self, trainer): trainer.save_epoch()
33.395833
79
0.746725
237
1,603
4.970464
0.56962
0.074703
0.022071
0
0
0
0
0
0
0
0
0.00687
0.182782
1,603
47
80
34.106383
0.892366
0.731129
0
0
0
0
0
0
0
0
0
0
0
1
0.363636
false
0
0.181818
0
0.636364
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
87815daadc010085e501b4708152cf7fe5cd2adb
15,779
py
Python
ecommerce_recommender_system_with_word2vec.py
ryosuke1224-haas/ecommerce_recommender_system_with_ward2vec
b097f5c0e7ccaedf3edec5a7eb6781ecf4d9320d
[ "MIT" ]
null
null
null
ecommerce_recommender_system_with_word2vec.py
ryosuke1224-haas/ecommerce_recommender_system_with_ward2vec
b097f5c0e7ccaedf3edec5a7eb6781ecf4d9320d
[ "MIT" ]
null
null
null
ecommerce_recommender_system_with_word2vec.py
ryosuke1224-haas/ecommerce_recommender_system_with_ward2vec
b097f5c0e7ccaedf3edec5a7eb6781ecf4d9320d
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Ecommerce Recommender System with Word2Vec.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1HiQz7t-5RXlehJtTWjSvuU2HuVQul2oc # Ecommerce Recommender System with Word2Vec """ # Commented out IPython magic to ensure Python compatibility. import numpy as np import pandas as pd import matplotlib.pyplot as plt import seaborn as sns from sklearn.model_selection import train_test_split import gensim from gensim import corpora, models, similarities # %matplotlib inline import warnings; warnings.filterwarnings('ignore') df = pd.read_excel('https://drive.google.com/uc?export=download&id=1icqOqpQ9fcBIbN2E_dWDJzXaF1Js2laf') #Define ntile function def ntile(a,n): q = a.quantile(np.linspace(1/n,1,n)) output = [] for i in a: if np.isnan(i): k = np.nan else: k = 0 for j in q: if i<=j: break k += 1 output.append(k) return np.array(output) """# Preprocessing & Exploration""" df.head() # some data exploration # top 10 product print(df['Description'].value_counts()[:10]) print('---------') # top 10 country print(df['Country'].value_counts()[:10]) #average price by country df[['UnitPrice','Country']].groupby('Country').mean().sort_values(by=['UnitPrice']) #average Quantity by country df[['Quantity','Country']].groupby('Country').mean().sort_values(by=['Quantity']) df.info() #Appears to be some nulls in CustomerID and Description df.isna().sum() #Drop these rows from the dataset df = df.dropna() #Number of unique customers df['CustomerID'].nunique() df['Description'].nunique() df.columns df['Quantity'].describe() df['UnitPrice'].describe() sns.distplot(df['Quantity']) sns.distplot(df['UnitPrice']) plt.figure(figsize=(20,6)) sns.boxplot(x='Country',y='Quantity',data=df) plt.figure(figsize=(20,6)) sns.boxplot(x='Country',y='UnitPrice',data=df) df[['Country','InvoiceNo']].groupby('Country').count().plot(kind='bar') df[['Country','Quantity']].groupby('Country').sum().plot(kind='bar') df[['Country','UnitPrice']].groupby('Country').mean().plot(kind='bar') df[['Description','Quantity']].groupby('Description').sum().sort_values(by='Quantity',ascending=False) df[['Description','InvoiceNo']].groupby('Description').count().sort_values(by='InvoiceNo',ascending=False) df[['Description','UnitPrice']].groupby('Description').mean().sort_values(by='UnitPrice',ascending=False) """# Product Recommender""" #Break unique customers into a train and validation set customers_train, customers_test = train_test_split(df['CustomerID'].unique(),test_size=0.1,random_state=42) #Create train and test datasets df_train = df[df['CustomerID'].isin(customers_train)] df_test = df[df['CustomerID'].isin(customers_test)] print(len(customers_train)) print(len(customers_test)) #For each customer, create sequence of purchases - training set purchase_list_train = [] for customer in customers_train: purch_list = df_train[df_train['CustomerID']==customer]['Description'].tolist() purchase_list_train.append(purch_list) #Quick validation: check the first customer in customers_train customers_train[0] #Validate the number of instances in the training set for customer 17007 len(df_train[df_train['CustomerID']==17007.0]) #Validate the number of purchases in the purchases list for customer 17007: matches above len(purchase_list_train[0]) purchase_list_train[0] #For each customer, create sequence of purchases - test set purchase_list_test = [] for customer in customers_test: purch_list = df_test[df_test['CustomerID']==customer]['Description'].tolist() purchase_list_test.append(purch_list) #Train a word2vec model #Starting with a vector size of 10 product_model = gensim.models.Word2Vec(purchase_list_train,min_count=1,size=100,window=10,seed=7) #Define a function to give the n-most similar products based a single description #The use case here is to recommend products to the consumer based on their most recent purchase def n_most_similar(Description,n): return product_model.most_similar(positive=Description)[:n] #Find 5 most similar products to first customer in training set's most recent purchase n_most_similar(purchase_list_train[0][-1],5) #Define a function to give the n-most similar products based on ALL products a consumer has purchased #The use case here is to recommend products to the consumer based on ALL purchases that they have made def n_most_similar_list(purchase_list,n): product_vec = [] for i in purchase_list: vec = product_model[i] product_vec.append(vec) mean_vec = np.mean(product_vec, axis=0) return product_model.similar_by_vector(mean_vec,n) #Find 5 most similar products to first customer in training set's entire purchasing history n_most_similar_list(purchase_list_train[0],5) #Let's examine our two functions to see how they're working #List of purchases for second customer in our training set list_of_purchases = purchase_list_train[1] print('Description of products purchased by this customer:\n', list_of_purchases,'\n') print('Five top recommended products based on most recent purchase:\n', n_most_similar(list_of_purchases[-1],5),'\n') print('Five top recommended products based on all purchases:\n', n_most_similar_list(list_of_purchases,5),'\n') #Test similarity of different products product_model.similarity('WHITE HANGING HEART T-LIGHT HOLDER','WHITE METAL LANTERN') #Analogy product finder - potential for a consumer facing product discovery app product_model.most_similar(positive=['PINK PARTY SUNGLASSES'],negative=["SILK PURSE BABUSHKA PINK"])[:5] #Dimensionality reduction for visualization vector_list = product_model[df_train['Description'].unique().tolist()] #Reduce the vector list to 2 dimensions for visualization from sklearn.manifold import TSNE data_embed=TSNE(n_components=2, perplexity=50, verbose=2, method='barnes_hut').fit_transform(vector_list) #put the reduced vectors into a dataframe reduced_df = pd.DataFrame(data_embed,columns=['x','y']) vocab = pd.DataFrame(list(product_model.wv.vocab)) df_forviz = pd.concat([reduced_df,vocab],axis=1) len(vocab) df_train['Description'].nunique() df_forviz #Adding features to the dataframe to color the visualization; here is avg unit price for each product product_price_bins = df_train[['Description','UnitPrice']].groupby('Description').mean().reset_index() product_price_bins['Price_Ntile'] = ntile(product_price_bins['UnitPrice'],5) product_price_bins len(df_train[df_train['Description']==' 4 PURPLE FLOCK DINNER CANDLES']) #Adding features to the dataframe to color the visualization; here total order count for each product product_order_bins = df_train[['Description','InvoiceNo']].groupby('Description').count().reset_index() product_order_bins['Orders_Ntile'] = ntile(product_order_bins['InvoiceNo'],5) product_order_bins #Adding features to the dataframe to color the visualization; here is total quantity ordered for each product total_quantity = df_train[['Description','Quantity']].groupby('Description').sum().reset_index() total_quantity['Quantity_Ntile'] = ntile(total_quantity['Quantity'],5) total_quantity df_forviz_temp = df_forviz.merge(product_price_bins,left_on=0,right_on='Description') df_forviz_temp df_forviz_temp2 = df_forviz_temp.merge(product_order_bins,left_on='Description',right_on='Description') df_forviz_temp2 df_forviz_final = df_forviz_temp2.merge(total_quantity,left_on='Description',right_on='Description') df_forviz_final df_forviz_final.to_csv('mytext.tsv',sep='\t',index=False) from google.colab.output import eval_js from IPython.display import Javascript !git clone https://github.com/CAHLR/d3-scatterplot.git def show_port(port, data_file, width=600, height=800): display(Javascript(""" (async ()=>{ fm = document.createElement('iframe') fm.src = await google.colab.kernel.proxyPort(%d) + '/index.html?dataset=%s' fm.width = '90%%' fm.height = '%d' fm.frameBorder = 0 document.body.append(fm) })(); """ % (port, data_file, height))) port = 8000 data_file = 'mytext.tsv' height = 1500 get_ipython().system_raw('cd d3-scatterplot && python3 -m http.server %d &' % port) show_port(port, data_file, height) """#Product Recommender Validation""" def aggregate_vectors(products): product_vec = [] for i in products: try: product_vec.append(product_model[i]) except KeyError: continue return np.mean(product_vec, axis=0) purchase_list_test_2=[] for i in purchase_list_test: if len(i)>1: purchase_list_test_2.append(i) purchased_item_without_last=[] last_purchase_item=[] for i in range(len(purchase_list_test_2)): last_purchase=purchase_list_test_2[i][-1] last_purchase_item.append(last_purchase) without_last=purchase_list_test_2[i][:-1] purchased_item_without_last.append(without_last) last_purchase=pd.DataFrame(last_purchase_item) last_purchase.columns=['last_purchase'] average_item=[] for i in purchased_item_without_last: average=aggregate_vectors(i) average_item.append(average) pred = pd.DataFrame() n=100 for i in range(len(purchase_list_test_2)): a=pd.DataFrame(product_model.similar_by_vector(average_item[i],n)) a=a.drop(1, axis=1).T pred=pd.concat([pred, a],axis=0) pred=pred.reset_index().drop('index', axis=1) pred=pd.concat([pred,last_purchase],axis=1) a=pd.DataFrame() for i in range(n): a['correct_'+str(i)]=(pred[i]==pred['last_purchase']).astype(int) a['correct']=a.sum(axis=1) pred['correct']=a['correct'] accuracy_rate = pred['correct'].sum()/len(pred) accuracy_rate """# Customer Recommender""" #copy dataframe to be used for customer recommender df2 = pd.DataFrame(df) #Convert CustomerID to string df['CustomerID'] = df['CustomerID'].astype(str) #Break unique products into a train and validation set products_train, products_test = train_test_split(df2['StockCode'].unique(),test_size=0.1,random_state=42) #Create train and test datasets df2_train = df2[df2['StockCode'].isin(products_train)] df2_test = df2[df2['StockCode'].isin(products_test)] print(len(products_train)) print(len(products_test)) #For each product, create sequence of purchases (customers who purchased that product) - training set customer_list_train = [] for product in products_train: cust_list = df2_train[df2_train['StockCode'] == product]['CustomerID'].tolist() customer_list_train.append(cust_list) #Validate first StockCode print('First StockCode:', products_train[0]) #Number of instances of stock code 23006 in the training set print('Number of instances of 23006:', len(df2_train[df2_train['StockCode']==23006])) #Validate same number in customer list - checks out print('Number of instances of 23006 in customer list:', len(customer_list_train[0])) #For each product, create sequence of purchases (customers who purchased that product) - test set customer_list_test = [] for product in products_test: cust_list = df2_test[df2_test['StockCode']==product]['CustomerID'].tolist() customer_list_test.append(cust_list) df2.head(60) #Train a word2vec model #Starting with a vector size of 10 customer_model = gensim.models.Word2Vec(customer_list_train, min_count=1, size=100, window=100,seed=7) #Find 5 most similar customers customer_model.most_similar(positive='17850.0')[:5] #Seeing this customer's history df2[df2.CustomerID == '17850.0']['Description'][:10].tolist() #Seeing this customer's most similar customer's history most_similar_customer = customer_model.most_similar(positive='17850.0')[:1][0][0] df2[df2.CustomerID == most_similar_customer]['Description'][:10].tolist() #Dimensionality reduction vector_list = customer_model[df2_train['CustomerID'].unique().tolist()] #function to output list of customers likely to buy a product def potential_customers(stock_code): past_cust = df2_train[df2_train['StockCode']==stock_code]['CustomerID'].tolist()[:10] potential_customers = customer_model.most_similar(positive=past_cust)[:10] return potential_customers #Example product product_id = '84406B' #Description of the product code we are using print('Description of produce we are looking at:\n', df2[df2.StockCode == product_id]['Description'][:1].tolist()[0], '\n') #Testing the function by sending in the product code potential_cust = potential_customers(product_id) print('Customers likely to want to buy this product:\n', potential_cust[:5], '\n') #Purchase history of the most likely next customer print('Purchase history of', potential_cust[0][0], ':') print(df2[df2.CustomerID == potential_cust[0][0]]['Description'][:10].tolist()) #Purchase history of the second most likely next customer print('Purchase history of', potential_cust[1][0], ':') print(df2[df2.CustomerID == potential_cust[1][0]]['Description'][:10].tolist(), '\n') from sklearn.manifold import TSNE data_embed = TSNE(n_components=2, perplexity=50, verbose=2, method='barnes_hut').fit_transform(vector_list) data_embed vector_list df2_train.head() df2_train[['CustomerID','Quantity']].groupby('CustomerID').sum().reset_index()['Quantity'] reduced_df = pd.DataFrame(data_embed,columns=['x','y']) vocab = pd.DataFrame(list(customer_model.wv.vocab)) df2_forviz = pd.concat([reduced_df,vocab],axis=1) df2_forviz['country'] = df2_train[['CustomerID','Country']].groupby('CustomerID').first().reset_index()['Country'] df2_forviz['quantity'] = df2_train[['CustomerID','Quantity']].groupby('CustomerID').sum().reset_index()['Quantity'] df2_train['total_spend'] = df2_train.Quantity * df2_train.UnitPrice df2_forviz['total_spend'] = df2_train[['CustomerID','total_spend']].groupby('CustomerID').sum().reset_index()['total_spend'] df2_forviz = df2_forviz.rename(columns={0: 'CustomerId'}) df2_forviz.head() df2_forviz.to_csv('mytext2.tsv',sep='\t',index=False) def show_port(port, data_file, width=600, height=800): display(Javascript(""" (async ()=>{ fm = document.createElement('iframe') fm.src = await google.colab.kernel.proxyPort(%d) + '/index.html?dataset=%s' fm.width = '90%%' fm.height = '%d' fm.frameBorder = 0 document.body.append(fm) })(); """ % (port, data_file, height))) port = 8000 data_file = 'mytext2.tsv' height = 1600 get_ipython().system_raw('cd d3-scatterplot && python3 -m http.server %d &' % port) show_port(port, data_file, height) """#Customer Recommender Validation""" def aggregate_vectors_cust(customers): customer_vec = [] for i in customers: try: customer_vec.append(customer_model[i]) except KeyError: continue return np.mean(customer_vec, axis=0) customer_list_test_2=[] for i in customer_list_test: if len(i)>1: customer_list_test_2.append(i) purchased_cust_without_last=[] last_purchase_cust=[] for i in range(len(customer_list_test_2)): last_purchase=customer_list_test_2[i][-1] last_purchase_cust.append(last_purchase) without_last=customer_list_test_2[i][:-1] purchased_cust_without_last.append(without_last) last_purchase=pd.DataFrame(last_purchase_cust) last_purchase.columns=['last_purchase'] average_cust=[] for i in purchased_cust_without_last: average=aggregate_vectors_cust(i) average_cust.append(average) pred = pd.DataFrame() n=100 for i in range(len(customer_list_test_2)): a=pd.DataFrame(customer_model.similar_by_vector(average_cust[i],n)) a=a.drop(1, axis=1).T pred=pd.concat([pred, a],axis=0) pred=pred.reset_index().drop('index', axis=1) pred=pd.concat([pred,last_purchase],axis=1) pred a=pd.DataFrame() for i in range(n): a['correct_'+str(i)]=(pred[i]==pred['last_purchase']).astype(int) a['correct']=a.sum(axis=1) pred['correct']=a['correct'] accuracy_rate = pred['correct'].sum()/len(pred) accuracy_rate
32.534021
124
0.751315
2,329
15,779
4.908974
0.169171
0.022041
0.007347
0.008922
0.49742
0.381265
0.318639
0.288376
0.255838
0.247442
0
0.02224
0.110907
15,779
485
125
32.534021
0.792715
0.189999
0
0.278571
1
0.007143
0.202957
0.018362
0
0
0
0
0
0
null
null
0
0.042857
null
null
0.067857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
878680ee9ff25d5ab51c0b265659b965e99632c2
141
py
Python
2021-10-Python-SEA-ENGRSL-Workshops/Workshop_1-Hello-Python/code/s06a_s3.py
shawnduong/manimations
1d36d9d1e7dff90a1a8da1e687ef442f750e29c5
[ "MIT" ]
null
null
null
2021-10-Python-SEA-ENGRSL-Workshops/Workshop_1-Hello-Python/code/s06a_s3.py
shawnduong/manimations
1d36d9d1e7dff90a1a8da1e687ef442f750e29c5
[ "MIT" ]
null
null
null
2021-10-Python-SEA-ENGRSL-Workshops/Workshop_1-Hello-Python/code/s06a_s3.py
shawnduong/manimations
1d36d9d1e7dff90a1a8da1e687ef442f750e29c5
[ "MIT" ]
null
null
null
if condition1: ... elif condition2: ... elif condition3: ... elif condition4: ... elif condition5: ... elif condition6: ... else: ...
9.4
16
0.609929
13
141
6.615385
0.692308
0
0
0
0
0
0
0
0
0
0
0.052632
0.191489
141
14
17
10.071429
0.701754
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
87930bc3d45493f93ea60f44e2d9fd9c57a9941c
886
py
Python
checkout_sdk/sessions/session_secret_credentials.py
riaz-bordie-cko/checkout-sdk-python
d9bc073306c1a98544c326be693ed722576ea895
[ "MIT" ]
null
null
null
checkout_sdk/sessions/session_secret_credentials.py
riaz-bordie-cko/checkout-sdk-python
d9bc073306c1a98544c326be693ed722576ea895
[ "MIT" ]
null
null
null
checkout_sdk/sessions/session_secret_credentials.py
riaz-bordie-cko/checkout-sdk-python
d9bc073306c1a98544c326be693ed722576ea895
[ "MIT" ]
null
null
null
from __future__ import absolute_import from checkout_sdk.authorization_type import AuthorizationType from checkout_sdk.exception import CheckoutAuthorizationException from checkout_sdk.platform_type import PlatformType from checkout_sdk.sdk_authorization import SdkAuthorization from checkout_sdk.sdk_credentials import SdkCredentials class SessionSecretSdkCredentials(SdkCredentials): def __init__(self, secret): self.secret = secret def get_authorization(self, authorization_type: AuthorizationType): if AuthorizationType.CUSTOM == authorization_type: if self.secret is None: raise CheckoutAuthorizationException.invalid_key(AuthorizationType.CUSTOM) return SdkAuthorization(PlatformType.CUSTOM, self.secret) raise CheckoutAuthorizationException.invalid_authorization(authorization_type=authorization_type)
42.190476
105
0.812641
86
886
8.081395
0.360465
0.086331
0.107914
0.051799
0
0
0
0
0
0
0
0
0.14447
886
20
106
44.3
0.916887
0
0
0
0
0
0
0
0
0
0
0
0
1
0.133333
false
0
0.4
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
87b089b4b628602fe4f969daeef363813d0674cc
1,164
py
Python
setup.py
texodus/perspective-python
5f51ade90c2933c75cd0710e65fc8c7a5ed95cfa
[ "Apache-2.0" ]
1
2019-01-23T21:01:44.000Z
2019-01-23T21:01:44.000Z
setup.py
texodus/perspective-python
5f51ade90c2933c75cd0710e65fc8c7a5ed95cfa
[ "Apache-2.0" ]
null
null
null
setup.py
texodus/perspective-python
5f51ade90c2933c75cd0710e65fc8c7a5ed95cfa
[ "Apache-2.0" ]
null
null
null
import os import os.path from setuptools import setup, find_packages from codecs import open here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='perspective-python', version='0.0.14', description='Analytics library', long_description=long_description, url='https://github.com/timkpaine/perspective-python', download_url='https://github.com/timkpaine/perspective-python/archive/v0.0.14.tar.gz', author='Tim Paine', author_email='timothy.k.paine@gmail.com', license='Apache 2.0', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', ], keywords='analytics tools plotting', # packages=find_packages(exclude=['tests', ]), packages=find_packages(), include_package_data=True, zip_safe=False, )
30.631579
90
0.659794
143
1,164
5.265734
0.538462
0.151394
0.199203
0.207171
0.185923
0.11421
0.11421
0
0
0
0
0.02439
0.189863
1,164
37
91
31.459459
0.774125
0.037801
0
0
0
0.032258
0.439177
0.022361
0
0
0
0
0
1
0
false
0
0.129032
0
0.129032
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
87bbb1ab8ef4f323b33305c4741c032eeb535c52
732
py
Python
tests.py
julzhk/minimal_django
f7105fd7a12953259738b4771a48f71460b730cd
[ "MIT" ]
1
2016-07-27T13:39:03.000Z
2016-07-27T13:39:03.000Z
tests.py
julzhk/minimal_django
f7105fd7a12953259738b4771a48f71460b730cd
[ "MIT" ]
null
null
null
tests.py
julzhk/minimal_django
f7105fd7a12953259738b4771a48f71460b730cd
[ "MIT" ]
1
2016-07-27T13:40:04.000Z
2016-07-27T13:40:04.000Z
from django.core.urlresolvers import resolve from django.test import TestCase from django.http import HttpRequest, QueryDict from minimal_django import index, urlpatterns class HomePageTest(TestCase): def test_root_url_resolves_to_home_page_view(self): found = resolve('/', urlconf = urlpatterns) self.assertEqual(found.func, index) def test_home_page_returns_correct_html(self): request = HttpRequest() response = index(request) self.assertIn(b'Hello', response.content) # self.assertTrue(response.content.startswith(b'<html>')) # self.assertIn(b'<title>Artist Search API</title>', response.content) # self.assertTrue(response.content.endswith(b'</html>'))
40.666667
78
0.724044
88
732
5.875
0.511364
0.116054
0.05029
0.112186
0.170213
0.170213
0
0
0
0
0
0
0.168033
732
17
79
43.058824
0.848933
0.244536
0
0
0
0
0.010929
0
0
0
0
0
0.166667
1
0.166667
false
0
0.333333
0
0.583333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
87decfb6904dc64c9921334b229518f0cbfef6bc
1,311
py
Python
Cardio/cardio.py
abrahamwebdev/covid_cardio
24dc641d7249d702824bc2c60c4a72d14e1b32c3
[ "MIT" ]
null
null
null
Cardio/cardio.py
abrahamwebdev/covid_cardio
24dc641d7249d702824bc2c60c4a72d14e1b32c3
[ "MIT" ]
null
null
null
Cardio/cardio.py
abrahamwebdev/covid_cardio
24dc641d7249d702824bc2c60c4a72d14e1b32c3
[ "MIT" ]
null
null
null
import numpy as np import pandas as pd import tensorflow as tf import matplotlib.pyplot as plt import keras import time import keras.backend as K import os import shutil import random from keras.optimizers import Adam from tensorflow.keras.optimizers.schedules import InverseTimeDecay,ExponentialDecay from tensorflow.keras.applications.resnet50 import ResNet50 from tensorflow.keras.applications import VGG16 from tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions from tensorflow.keras.utils import Progbar from keras.losses import CategoricalCrossentropy,BinaryCrossentropy from keras.optimizers import Adam,RMSprop from keras.applications.densenet import DenseNet121 from keras.applications import InceptionV3 from keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.models import Model, Sequential from keras.metrics import CategoricalAccuracy, Precision, Recall from keras.layers import Dense, Conv2D, BatchNormalization, MaxPool2D, AveragePooling2D, Flatten, Input,GlobalAveragePooling2D from keras.utils.vis_utils import plot_model from keras.optimizers.schedules import InverseTimeDecay,ExponentialDecay from sklearn.metrics import classification_report,confusion_matrix,ConfusionMatrixDisplay model = keras.models.load_model('CNN_Deep_vgg8.h5')
42.290323
126
0.870328
162
1,311
6.987654
0.438272
0.079505
0.100707
0.082155
0.243816
0.19258
0.116608
0
0
0
0
0.015025
0.086194
1,311
31
127
42.290323
0.929883
0
0
0
0
0
0.012195
0
0
0
0
0
0
1
0
false
0
0.964286
0
0.964286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
87ed877d50ca502ff3f758d0feacf2158a3ce4a3
842
py
Python
checkov/kubernetes/checks/resource/k8s/ContainerSecurityContext.py
jamesholland-uk/checkov
d73fd4bd7096d48ab3434a92a177bcc55605460a
[ "Apache-2.0" ]
1
2021-02-13T15:24:42.000Z
2021-02-13T15:24:42.000Z
checkov/kubernetes/checks/resource/k8s/ContainerSecurityContext.py
jamesholland-uk/checkov
d73fd4bd7096d48ab3434a92a177bcc55605460a
[ "Apache-2.0" ]
7
2021-04-12T06:54:07.000Z
2022-03-21T14:04:14.000Z
checkov/kubernetes/checks/resource/k8s/ContainerSecurityContext.py
jamesholland-uk/checkov
d73fd4bd7096d48ab3434a92a177bcc55605460a
[ "Apache-2.0" ]
1
2021-12-16T03:09:55.000Z
2021-12-16T03:09:55.000Z
from typing import Any, Dict from checkov.common.models.enums import CheckResult from checkov.kubernetes.checks.resource.base_container_check import BaseK8sContainerCheck class ContainerSecurityContext(BaseK8sContainerCheck): def __init__(self) -> None: # CIS-1.5 5.7.3 name = "Apply security context to your pods and containers" # Security context can be set at pod or container level. # Location: container .securityContext id = "CKV_K8S_30" super().__init__(name=name, id=id) def scan_container_conf(self, metadata: Dict[str, Any], conf: Dict[str, Any]) -> CheckResult: self.evaluated_container_keys = ["securityContext"] if conf.get("securityContext"): return CheckResult.PASSED return CheckResult.FAILED check = ContainerSecurityContext()
35.083333
97
0.710214
98
842
5.938776
0.632653
0.037801
0.034364
0
0
0
0
0
0
0
0
0.014925
0.204276
842
23
98
36.608696
0.853731
0.124703
0
0
0
0
0.122783
0
0
0
0
0
0
1
0.142857
false
0.071429
0.214286
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2