hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
dea12a2ca4fac67f05420974cd972936ed10948b
170
py
Python
ex110/teste.py
danoliveiradev/PythonExercicios
e788191e755f54cd3c51ca24a2d3a2ed32923b12
[ "MIT" ]
null
null
null
ex110/teste.py
danoliveiradev/PythonExercicios
e788191e755f54cd3c51ca24a2d3a2ed32923b12
[ "MIT" ]
null
null
null
ex110/teste.py
danoliveiradev/PythonExercicios
e788191e755f54cd3c51ca24a2d3a2ed32923b12
[ "MIT" ]
null
null
null
from ex110 import moeda p = float(input('Digite o preço: R$')) a = int(input('Digite a % de aumento: ')) r = int(input('Digite a % de redução: ')) moeda.resumo(p, a, r)
24.285714
41
0.635294
30
170
3.6
0.566667
0.305556
0.259259
0.277778
0.314815
0
0
0
0
0
0
0.021429
0.176471
170
6
42
28.333333
0.75
0
0
0
0
0
0.376471
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
deb62e907cbc4901b69b9cd21fb14a11b5e94d40
158
py
Python
openpipe/tests/lib/openpipe/actions/sample_.py
OpenPipe/openpipe
bf00bafc1d9240459dd26873141074b89b53d36f
[ "Apache-2.0" ]
2
2021-01-28T15:33:15.000Z
2022-03-09T10:20:27.000Z
openpipe/tests/lib/openpipe/actions/sample_.py
Openpipe/openpipe
bf00bafc1d9240459dd26873141074b89b53d36f
[ "Apache-2.0" ]
35
2019-03-22T10:49:21.000Z
2019-11-29T09:23:47.000Z
openpipe/tests/lib/openpipe/actions/sample_.py
OpenPipe/openpipe
bf00bafc1d9240459dd26873141074b89b53d36f
[ "Apache-2.0" ]
1
2022-01-26T22:08:40.000Z
2022-01-26T22:08:40.000Z
from openpipe.pipeline.engine import ActionRuntime class Action(ActionRuntime): def on_input(self, item): print("SAMPLE TEST LIB", self.config)
22.571429
50
0.734177
20
158
5.75
0.9
0
0
0
0
0
0
0
0
0
0
0
0.170886
158
6
51
26.333333
0.877863
0
0
0
0
0
0.094937
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
decffaa1e3b8fea3d366c7b31622b5d7841dd99a
67
py
Python
python/testData/refactoring/changeSignature/fixSphinxDocStringRemoveMultiple.after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2018-12-29T09:53:39.000Z
2018-12-29T09:53:42.000Z
python/testData/refactoring/changeSignature/fixSphinxDocStringRemoveMultiple.after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/refactoring/changeSignature/fixSphinxDocStringRemoveMultiple.after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def f(a, d): """ :param a : foo :param d : quux """
13.4
19
0.373134
10
67
2.5
0.7
0
0
0
0
0
0
0
0
0
0
0
0.41791
67
5
20
13.4
0.641026
0.447761
0
0
0
0
0
0
0
0
0
0
0
1
1
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
ded167d97ac4d04ed401b529e9e1dac376eb8450
307
py
Python
optims/helpers.py
R3NI3/pytorch-rl
20b3b738ca400b1916197f27a91367878b09803c
[ "MIT" ]
851
2017-04-09T19:01:27.000Z
2022-03-30T17:57:01.000Z
optims/helpers.py
R3NI3/pytorch-rl
20b3b738ca400b1916197f27a91367878b09803c
[ "MIT" ]
10
2017-05-12T14:15:54.000Z
2020-09-24T12:30:42.000Z
optims/helpers.py
R3NI3/pytorch-rl
20b3b738ca400b1916197f27a91367878b09803c
[ "MIT" ]
161
2017-04-12T16:11:30.000Z
2022-01-12T07:55:46.000Z
from __future__ import absolute_import from __future__ import division from __future__ import print_function # NOTE: refer to: https://discuss.pytorch.org/t/adaptive-learning-rate/320/31 def adjust_learning_rate(optimizer, lr): for param_group in optimizer.param_groups: param_group['lr'] = lr
34.111111
77
0.788274
44
307
5.068182
0.659091
0.134529
0.215247
0
0
0
0
0
0
0
0
0.018727
0.130293
307
8
78
38.375
0.816479
0.2443
0
0
0
0
0.008696
0
0
0
0
0
0
1
0.166667
false
0
0.5
0
0.666667
0.166667
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
dee93a905508ec960f7f65c438caff0e55de96c1
23
py
Python
shellfoundry/version.py
Akashdeepsingh1/shellfoundry
873844e51c00bdf71f865493ce836edd75ba5ff5
[ "Apache-2.0" ]
null
null
null
shellfoundry/version.py
Akashdeepsingh1/shellfoundry
873844e51c00bdf71f865493ce836edd75ba5ff5
[ "Apache-2.0" ]
1
2021-03-25T23:21:02.000Z
2021-03-25T23:21:02.000Z
shellfoundry/version.py
Akashdeepsingh1/shellfoundry
873844e51c00bdf71f865493ce836edd75ba5ff5
[ "Apache-2.0" ]
null
null
null
__version__ = '1.2.16'
11.5
22
0.652174
4
23
2.75
1
0
0
0
0
0
0
0
0
0
0
0.2
0.130435
23
1
23
23
0.35
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
def10bcaa2701c2bbfd7a082ce36b6ea61e573d7
448
py
Python
23_Aug_2021_re-exam/project_astro/planet/planet_repository.py
vasetousa/OOP
e4fedc497dd149c9800613ea11846e0e770d122c
[ "MIT" ]
null
null
null
23_Aug_2021_re-exam/project_astro/planet/planet_repository.py
vasetousa/OOP
e4fedc497dd149c9800613ea11846e0e770d122c
[ "MIT" ]
null
null
null
23_Aug_2021_re-exam/project_astro/planet/planet_repository.py
vasetousa/OOP
e4fedc497dd149c9800613ea11846e0e770d122c
[ "MIT" ]
null
null
null
from project_astro.planet.planet import Planet class PlanetRepository: def __init__(self): self.planets = [] def add(self, planet: Planet): if planet in self.planets: self.planets.remove(planet) def remove(self, planet): self.planets.remove(planet) def find_by_name(self, name): planet = [pl for pl in self.planets if pl.name == name] if planet: return planet[0]
23.578947
63
0.620536
58
448
4.672414
0.37931
0.202952
0.095941
0.169742
0.191882
0
0
0
0
0
0
0.003115
0.283482
448
18
64
24.888889
0.841122
0
0
0.153846
0
0
0
0
0
0
0
0
0
1
0.307692
false
0
0.076923
0
0.538462
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
720406bbeb697649175a5dc55a1259b26d816f69
1,787
py
Python
phovea_security_flask/dummy_store.py
phovea/phovea_security_flask
60434e26352dabe45c6ddae4a41b2838df10154f
[ "BSD-3-Clause" ]
null
null
null
phovea_security_flask/dummy_store.py
phovea/phovea_security_flask
60434e26352dabe45c6ddae4a41b2838df10154f
[ "BSD-3-Clause" ]
29
2016-11-11T14:18:22.000Z
2021-02-04T09:25:34.000Z
phovea_security_flask/dummy_store.py
phovea/phovea_security_flask
60434e26352dabe45c6ddae4a41b2838df10154f
[ "BSD-3-Clause" ]
3
2017-11-30T16:39:21.000Z
2018-11-21T08:48:17.000Z
from . import flask_login_impl import hashlib __author__ = 'Samuel Gratzl' def hash_password(password, salt): return hashlib.sha512((password + salt).encode('utf-8')).hexdigest() class User(flask_login_impl.User): def __init__(self, id, password, salt, roles): super(User, self).__init__(id) self.name = id self._password = password self._salt = salt self.roles = roles @property def is_authenticated(self): return True @property def is_active(self): return True def is_password(self, given): given_h = hash_password(given, self._salt) return given_h == self._password def from_env_var(k, v): elems = v.split(';') name = k[12:] # PHOVEA_USER_ salt = elems[0] password = elems[1] roles = elems[2:] return User(name, password, salt, roles) class UserStore(object): def __init__(self): import phovea_server.config import os # define users via env variables env_users = [from_env_var(k, v) for k, v in os.environ.items() if k.startswith('PHOVEA_USER_')] if env_users: self._users = env_users else: self._users = [User(v['name'], v['password'], v['salt'], v['roles']) for v in phovea_server.config.get('phovea_security_flask.users')] def load(self, id): return next((u for u in self._users if u.id == id), None) def load_from_key(self, api_key): parts = api_key.split(':') if len(parts) != 2: return None return next((u for u in self._users if u.id == parts[0] and u.is_password(parts[1])), None) def login(self, username, extra_fields={}): return next((u for u in self._users if u.id == username and u.is_password(extra_fields['password'])), None) def logout(self, user): pass def create(): return UserStore()
24.819444
111
0.659765
269
1,787
4.163569
0.289963
0.040179
0.029464
0.0375
0.104464
0.083036
0.083036
0.083036
0.083036
0.083036
0
0.008475
0.207611
1,787
71
112
25.169014
0.782486
0.024063
0
0.078431
0
0
0.050546
0.015508
0
0
0
0
0
1
0.235294
false
0.254902
0.078431
0.117647
0.54902
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
7209d0e8f58a40bf4b3f3f4be4f29f703d372f5f
944
py
Python
starter/core/commands.py
ManfredBalvet/bomberjam-bot
1929c7dfa888791e96f79b42329d4a809dfd85cd
[ "MIT" ]
null
null
null
starter/core/commands.py
ManfredBalvet/bomberjam-bot
1929c7dfa888791e96f79b42329d4a809dfd85cd
[ "MIT" ]
null
null
null
starter/core/commands.py
ManfredBalvet/bomberjam-bot
1929c7dfa888791e96f79b42329d4a809dfd85cd
[ "MIT" ]
3
2021-03-08T01:12:29.000Z
2021-12-28T04:42:53.000Z
from abc import ABC, abstractmethod class Command(ABC): """ Base class for a command. All commands must implement their custom __str__ method to be able to be sent to the game via stdout. """ @abstractmethod def __str__(self) -> str: pass class RegisterBotCommand(Command): """ Command used to register your bot before starting a game. """ def __init__(self, name): """ :param name: The name of your bot """ self.name = name def __str__(self): return f"0:{self.name}" class ActionCommand(Command): """ Command used to send an Action during the game. """ def __init__(self, tick, action): """ :param tick: The current game tick :param action: The Action you want to do """ self.tick = tick self.action = action def __str__(self): return f"{self.tick}:{self.action}"
20.977778
96
0.591102
119
944
4.487395
0.436975
0.033708
0.05618
0.074906
0.06367
0
0
0
0
0
0
0.001538
0.311441
944
44
97
21.454545
0.82
0.363347
0
0.125
0
0
0.076459
0.050302
0
0
0
0
0
1
0.3125
false
0.0625
0.0625
0.125
0.6875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
7233bda8def42986b0788bb21f26480733c829f9
1,860
py
Python
pytorch/ScanPred_conv_only/model.py
kreimanlab/RefixationModel
94e5336ec2558ab0e22abdd2f4573015c153c74f
[ "MIT" ]
null
null
null
pytorch/ScanPred_conv_only/model.py
kreimanlab/RefixationModel
94e5336ec2558ab0e22abdd2f4573015c153c74f
[ "MIT" ]
null
null
null
pytorch/ScanPred_conv_only/model.py
kreimanlab/RefixationModel
94e5336ec2558ab0e22abdd2f4573015c153c74f
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue May 5 16:27:22 2020 @author: mengmi """ from torchvision import models import torch.nn as nn import torch.nn as nn import torch.nn.functional as F class Net(nn.Module): def __init__(self, n_channel, device, imgsize): self.imgsize = imgsize self.n_channel = n_channel super(Net, self).__init__() # self.conv1 = nn.Conv2d(self.n_channel, self.n_channel, 3, 1, 1) # self.conv2 = nn.Conv2d(self.n_channel, 1, 1, 1, 0) #self.fc1 = nn.Linear(1*self.imgsize*self.imgsize, self.imgsize*self.imgsize) #self.conv1 = nn.Conv2d(self.n_channel, self.n_channel, 3, 1, 1) #self.fc1 = nn.Linear(self.n_channel*self.imgsize*self.imgsize, self.imgsize*self.imgsize) self.conv1 = nn.Conv2d(self.n_channel, 1, 1, 1, 0) #self.pool = nn.MaxPool2d(2,2,0) #self.conv2 = nn.Conv2d(self.n_channel, 1, 3, 1, 1) #self.adaptpool = nn.AdaptiveMaxPool2d((7,7)) #self.fc2 = nn.Linear(self.imgsize*self.imgsize, self.imgsize*self.imgsize) #self.logsoft = nn.LogSoftmax(dim=1) def forward(self, x): # x = F.relu(self.conv1(x)) # x = F.relu(self.conv2(x)) # x = x.view(-1, 1*self.imgsize*self.imgsize) #x = self.fc1(x) #x = F.relu(self.conv1(x)) #x = x.view(-1, self.n_channel*self.imgsize*self.imgsize) #x = self.fc1(x) #x = self.pool(F.relu(self.conv1(x))) #x = self.pool(F.relu(self.conv2(x))) #x = self.pool(F.relu(self.conv1(x))) #x = F.relu(self.fc1(x)) #x = self.logsoft(x) #x = self.fc3(x) x = self.conv1(x) x = x.view(-1, 1*self.imgsize*self.imgsize) return x
31
98
0.559677
282
1,860
3.620567
0.219858
0.204701
0.220372
0.25857
0.647405
0.63761
0.627816
0.572968
0.475024
0.355534
0
0.051608
0.281183
1,860
60
99
31
0.712042
0.593548
0
0.142857
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.285714
0
0.571429
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
a0dcb38ba7419fb87c85579c477a56c3a8cc3218
199
py
Python
divmod.py
PraghadeshManivannan/Built-in-Functions-Python
a3120641e03e7be8e1408dd467997ad6fdf04d87
[ "MIT" ]
null
null
null
divmod.py
PraghadeshManivannan/Built-in-Functions-Python
a3120641e03e7be8e1408dd467997ad6fdf04d87
[ "MIT" ]
null
null
null
divmod.py
PraghadeshManivannan/Built-in-Functions-Python
a3120641e03e7be8e1408dd467997ad6fdf04d87
[ "MIT" ]
null
null
null
#returns the quotient and remainder of the number #retturns two arguements #first arguement gives the quotient #second arguement gives the remainder print(divmod(9,2)) print(divmod(9.6,2.5))
24.875
50
0.763819
31
199
4.903226
0.645161
0.144737
0.223684
0
0
0
0
0
0
0
0
0.035714
0.155779
199
7
51
28.428571
0.869048
0.708543
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
a0e925ea61fa75c8d910ef41f49ccf39603d4a62
126
py
Python
muselsl/__init__.py
fusefactory/muse-lsl
b713b32e93b0ed13b6ffd4d431bb29450ec0db54
[ "BSD-3-Clause" ]
414
2017-01-27T16:01:40.000Z
2022-03-31T13:48:59.000Z
muselsl/__init__.py
fusefactory/muse-lsl
b713b32e93b0ed13b6ffd4d431bb29450ec0db54
[ "BSD-3-Clause" ]
160
2017-02-07T19:53:40.000Z
2022-03-30T08:11:04.000Z
muselsl/__init__.py
fusefactory/muse-lsl
b713b32e93b0ed13b6ffd4d431bb29450ec0db54
[ "BSD-3-Clause" ]
149
2017-02-09T02:03:16.000Z
2022-03-31T20:06:33.000Z
from .stream import stream, list_muses from .record import record, record_direct from .view import view __version__ = "1.0.0"
25.2
41
0.785714
20
126
4.65
0.55
0
0
0
0
0
0
0
0
0
0
0.027523
0.134921
126
4
42
31.5
0.825688
0
0
0
0
0
0.039683
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
9d1bed8188e4e8c3404049edd0638ecfbc4d2aeb
190
py
Python
config/__init__.py
Maples7/py-config
c6964bd3a12391904af6c6718326af0c9b6f8e33
[ "MIT" ]
1
2018-06-11T04:08:49.000Z
2018-06-11T04:08:49.000Z
config/__init__.py
Maples7/py-config
c6964bd3a12391904af6c6718326af0c9b6f8e33
[ "MIT" ]
null
null
null
config/__init__.py
Maples7/py-config
c6964bd3a12391904af6c6718326af0c9b6f8e33
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import os from py_config import set_config_dir, get_config config_dir = os.path.abspath(os.path.dirname(__file__)) set_config_dir(config_dir) config = get_config()
21.111111
55
0.763158
31
190
4.258065
0.483871
0.272727
0.181818
0
0
0
0
0
0
0
0
0.005917
0.110526
190
8
56
23.75
0.775148
0.110526
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
9d2dc6787028914fa65033a617aba1e945fb8442
933
py
Python
web/library/admin.py
py-paulo/community-library
b7400230ef166b5b0e122e7c10334827d4656cee
[ "Apache-2.0" ]
null
null
null
web/library/admin.py
py-paulo/community-library
b7400230ef166b5b0e122e7c10334827d4656cee
[ "Apache-2.0" ]
null
null
null
web/library/admin.py
py-paulo/community-library
b7400230ef166b5b0e122e7c10334827d4656cee
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from .models import (Category, PublishingCompany, Author, Book, BookAuthor, BookCategories, Review, Person, Comment, Lending) @admin.register(Author) class AuthorAdmin(admin.ModelAdmin): pass @admin.register(Category) class CategoryAdmin(admin.ModelAdmin): pass @admin.register(PublishingCompany) class PublishingCompanyAdmin(admin.ModelAdmin): pass @admin.register(Book) class BookAdmin(admin.ModelAdmin): pass @admin.register(BookAuthor) class BookAuthorAdmin(admin.ModelAdmin): pass @admin.register(BookCategories) class BookCategoriesAdmin(admin.ModelAdmin): pass @admin.register(Review) class ReviewAdmin(admin.ModelAdmin): pass @admin.register(Person) class PersonAdmin(admin.ModelAdmin): pass @admin.register(Comment) class CommentAdmin(admin.ModelAdmin): pass @admin.register(Lending) class LendingAdmin(admin.ModelAdmin): pass
17.277778
91
0.770632
98
933
7.336735
0.306122
0.180807
0.264256
0.300417
0.400556
0
0
0
0
0
0
0
0.129689
933
53
92
17.603774
0.885468
0
0
0.30303
0
0
0
0
0
0
0
0
0
1
0
true
0.30303
0.060606
0
0.363636
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
9d35469f8be1c004a4cd3189425d12e9a184e305
1,967
py
Python
src/portafolio_app/models.py
kmacho16/Portafolio-Django
2a4409dbf5c904ad146cd00734b8e7cf4993ec2a
[ "MIT" ]
null
null
null
src/portafolio_app/models.py
kmacho16/Portafolio-Django
2a4409dbf5c904ad146cd00734b8e7cf4993ec2a
[ "MIT" ]
null
null
null
src/portafolio_app/models.py
kmacho16/Portafolio-Django
2a4409dbf5c904ad146cd00734b8e7cf4993ec2a
[ "MIT" ]
1
2020-11-04T08:47:43.000Z
2020-11-04T08:47:43.000Z
from django.db import models # Create your models here. class empresa(models.Model): nombre = models.CharField(max_length=100) logo = models.ImageField(upload_to="logos/",blank=True) banner = models.ImageField(upload_to="banners/",blank=True) slogan = models.CharField(max_length=500) saludo = models.CharField(max_length=200) def __str__(self): return self.nombre class aboutme(models.Model): titulo = models.CharField(max_length=100) foto = models.ImageField(upload_to="about_me/",blank=True) contenido = models.TextField() def __str__(self): return self.titulo class skill(models.Model): skill = models.CharField(max_length=100) porcentaje = models.IntegerField() def __str__(self): return self.skill class servicio(models.Model): titulo = models.CharField(max_length=100) icono = models.CharField(max_length=50) descripcion = models.CharField(max_length=600) def __str__(self): return self.titulo class comments(models.Model): comentario = models.TextField() autor = models.CharField(max_length=100) cargo = models.CharField(max_length=100) foto_autor = models.ImageField("comentarios/") def __str__(self): return self.autor class categorias(models.Model): nombre_cat = models.CharField(max_length=100) descripcion = models.TextField(blank=True) def __str__(self): return self.nombre_cat class proyectos(models.Model): mi_cat = models.ForeignKey(categorias,on_delete = models.CASCADE,related_name="Proy_cat") nombre_proy = models.CharField(max_length=100) descripcion_corta = models.CharField(max_length=100) imagen = models.ImageField() descripcion = models.TextField() web_site = models.CharField(max_length=100,blank=True) def __str__(self): return self.nombre_proy class mensaje(models.Model): nombre_cli = models.CharField(max_length=200) email_cli = models.CharField(max_length=200) mensaje_cli = models.TextField() revisado = models.BooleanField() def __str__(self): return self.email_cli
28.1
90
0.774784
266
1,967
5.477444
0.274436
0.164722
0.197666
0.263555
0.438572
0.286205
0.150995
0.108442
0
0
0
0.026796
0.108287
1,967
69
91
28.507246
0.803877
0.012201
0
0.222222
0
0
0.022199
0
0
0
0
0
0
1
0.148148
false
0
0.018519
0.148148
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
9d5de4b8a069053a4c141d43532453cd495dce7e
304
py
Python
data/model/ability.py
Friff14/phoenixbuzz
d50f944e0aa6b317ac575c1881e849fdf72ae17f
[ "MIT" ]
null
null
null
data/model/ability.py
Friff14/phoenixbuzz
d50f944e0aa6b317ac575c1881e849fdf72ae17f
[ "MIT" ]
null
null
null
data/model/ability.py
Friff14/phoenixbuzz
d50f944e0aa6b317ac575c1881e849fdf72ae17f
[ "MIT" ]
null
null
null
class Ability: cost = None exhaustible = True constant = False def __init__(self, cost, context): self.cost = cost def get_context(self): return self.cost.context def is_valid(self, cost): return cost.context == self.get_context() and cost == self.cost
21.714286
71
0.628289
39
304
4.717949
0.435897
0.217391
0.163043
0
0
0
0
0
0
0
0
0
0.276316
304
13
72
23.384615
0.836364
0
0
0
0
0
0
0
0
0
0
0
0
1
0.3
false
0
0
0.2
0.9
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
19b6bfee3f315c75fef3f10a48918789de678514
294
py
Python
kivymt/image_btn.py
inteplus/kivymt
e7d7e701ed568b3bae9bbb2d8a2980bf106dcd05
[ "MIT" ]
null
null
null
kivymt/image_btn.py
inteplus/kivymt
e7d7e701ed568b3bae9bbb2d8a2980bf106dcd05
[ "MIT" ]
null
null
null
kivymt/image_btn.py
inteplus/kivymt
e7d7e701ed568b3bae9bbb2d8a2980bf106dcd05
[ "MIT" ]
null
null
null
import kivy #kivy.require('1.10.0') # replace with your current Kivy version from kivy.uix.behaviors import ButtonBehavior from kivy.uix.image import Image from kivy.factory import Factory class ImageButton(ButtonBehavior, Image): pass Factory.register("ImageButton", cls=ImageButton)
21
64
0.789116
40
294
5.8
0.55
0.103448
0.094828
0
0
0
0
0
0
0
0
0.015564
0.12585
294
13
65
22.615385
0.88716
0.210884
0
0
0
0
0.047826
0
0
0
0
0
0
1
0
true
0.142857
0.571429
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
19d43843cab771833a5c3cdce02e4f008b3a1e21
171
py
Python
pyutils/test/test_solr.py
DANS-repo/pyutils
cf29aafb55723cd00e205318f95a0111917f4918
[ "Apache-2.0" ]
null
null
null
pyutils/test/test_solr.py
DANS-repo/pyutils
cf29aafb55723cd00e205318f95a0111917f4918
[ "Apache-2.0" ]
null
null
null
pyutils/test/test_solr.py
DANS-repo/pyutils
cf29aafb55723cd00e205318f95a0111917f4918
[ "Apache-2.0" ]
null
null
null
import unittest import pyutils.solr as solr class TestSolr(unittest.TestCase): def test_search(self): print(solr.search('emd_audience:"easy-discipline:2"'))
21.375
62
0.736842
23
171
5.391304
0.782609
0
0
0
0
0
0
0
0
0
0
0.006849
0.146199
171
8
62
21.375
0.842466
0
0
0
0
0
0.186047
0.186047
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
19e01b4b7240e4dbe066a9ec70088749776d168d
158
py
Python
Chapter01/example_1_2_1.py
PacktPublishing/Daniel-Arbuckles-Mastering-Python
ecd1e78d95aedc832d8f5ceec4794374459e890a
[ "MIT" ]
28
2017-08-10T18:21:52.000Z
2021-11-22T11:13:01.000Z
Chapter01/example_1_2_1.py
PacktPublishing/Daniel-Arbuckles-Mastering-Python
ecd1e78d95aedc832d8f5ceec4794374459e890a
[ "MIT" ]
2
2019-06-17T13:22:00.000Z
2019-06-17T14:06:18.000Z
Chapter01/example_1_2_1.py
PacktPublishing/Daniel-Arbuckles-Mastering-Python
ecd1e78d95aedc832d8f5ceec4794374459e890a
[ "MIT" ]
20
2017-07-07T16:25:50.000Z
2022-01-11T15:57:18.000Z
print(1) print(1 + 1) print(3 * 1 + 2) print(3 * (1 + 2)) if 2 > 1: print("One is the loneliest number") else: print('Two is the lonliest number?')
14.363636
40
0.582278
29
158
3.172414
0.448276
0.195652
0.152174
0.173913
0
0
0
0
0
0
0
0.092437
0.246835
158
10
41
15.8
0.680672
0
0
0
0
0
0.343949
0
0
0
0
0
0
1
0
true
0
0
0
0
0.75
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
19e16e64dba83377fd9d1182f3968ebadcb6edf5
552
py
Python
Python/[7 kyu] complete series.py
KonstantinosAng/CodeWars
9ec9da9ed95b47b9656a5ecf77f486230fd15e3a
[ "MIT" ]
null
null
null
Python/[7 kyu] complete series.py
KonstantinosAng/CodeWars
9ec9da9ed95b47b9656a5ecf77f486230fd15e3a
[ "MIT" ]
null
null
null
Python/[7 kyu] complete series.py
KonstantinosAng/CodeWars
9ec9da9ed95b47b9656a5ecf77f486230fd15e3a
[ "MIT" ]
null
null
null
# see https://www.codewars.com/kata/580a4001d6df740d61000301/solutions/python from TestFunction import Test def complete_series(seq): seq = sorted(seq) for number in seq: if seq.count(number) > 1: return [0] return [i for i in range(seq[-1]+1)] Test([0,1]).assert_result(complete_series([0,1])) Test([0,1,2,3,4,5,6]).assert_result(complete_series([1,4,6])) Test([0,1,2,3,4,5]).assert_result(complete_series([3,4,5])) Test([0,1,2]).assert_result(complete_series([2,1])) Test([0]).assert_result(complete_series([1,4,4,6]))
30.666667
77
0.684783
98
552
3.744898
0.357143
0.228883
0.27248
0.354223
0.207084
0.207084
0.054496
0
0
0
0
0.114754
0.115942
552
17
78
32.470588
0.637295
0.13587
0
0
0
0
0
0
0
0
0
0
0.416667
1
0.083333
false
0
0.083333
0
0.333333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
19e1b1c4c0a89dc41edc8b89695611dc52d3b700
113
py
Python
scripts/field/VisitorleaveDirectionMode.py
Snewmy/swordie
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
[ "MIT" ]
9
2021-04-26T11:59:29.000Z
2021-12-20T13:15:27.000Z
scripts/field/VisitorleaveDirectionMode.py
Snewmy/swordie
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
[ "MIT" ]
null
null
null
scripts/field/VisitorleaveDirectionMode.py
Snewmy/swordie
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
[ "MIT" ]
6
2021-07-14T06:32:05.000Z
2022-02-06T02:32:56.000Z
# Pink Zakum : Pink Zakum Entrance # Called when user enters pink zakum auto event lobby sm.sendAutoEventClock()
28.25
53
0.787611
16
113
5.5625
0.75
0.303371
0
0
0
0
0
0
0
0
0
0
0.159292
113
4
54
28.25
0.936842
0.743363
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
19ea8461097decd35d42a7aa4bf92fef7dde766e
247
py
Python
src/common/jinja_filters.py
DmitryBurnaev/podcast
48c7c60e2a46378f36635dc58222e5e7682f977f
[ "MIT" ]
1
2020-09-05T10:37:55.000Z
2020-09-05T10:37:55.000Z
src/common/jinja_filters.py
DmitryBurnaev/podcast
48c7c60e2a46378f36635dc58222e5e7682f977f
[ "MIT" ]
null
null
null
src/common/jinja_filters.py
DmitryBurnaev/podcast
48c7c60e2a46378f36635dc58222e5e7682f977f
[ "MIT" ]
null
null
null
import datetime def datetime_format(value: datetime.datetime, output_format="%d.%m.%Y %H:%M"): return value.strftime(output_format) if value else "-" def human_length(length: int) -> str: return str(datetime.timedelta(seconds=length))
24.7
78
0.728745
35
247
5.028571
0.571429
0.136364
0
0
0
0
0
0
0
0
0
0
0.129555
247
9
79
27.444444
0.818605
0
0
0
0
0
0.060729
0
0
0
0
0
0
1
0.4
false
0
0.2
0.4
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
c2167e357e4f1f7cfeeaa937a704441a0fd52a75
769
py
Python
bank/migrations/0009_auto_20180518_2157.py
Guilehm/expense-control-system
c0f8393497f54076cb15008f1bda9efc5081025b
[ "MIT" ]
1
2022-02-16T23:23:02.000Z
2022-02-16T23:23:02.000Z
bank/migrations/0009_auto_20180518_2157.py
guilehm/expense-control-system
c0f8393497f54076cb15008f1bda9efc5081025b
[ "MIT" ]
40
2018-07-01T15:49:05.000Z
2018-09-06T02:37:24.000Z
bank/migrations/0009_auto_20180518_2157.py
Guilehm/Expense-Control-System
c0f8393497f54076cb15008f1bda9efc5081025b
[ "MIT" ]
1
2019-05-05T13:43:55.000Z
2019-05-05T13:43:55.000Z
# Generated by Django 2.0.5 on 2018-05-19 00:57 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('bank', '0008_auto_20180510_2241'), ] operations = [ migrations.RemoveField( model_name='credit', name='account', ), migrations.RemoveField( model_name='credit', name='user', ), migrations.RemoveField( model_name='debit', name='account', ), migrations.RemoveField( model_name='debit', name='user', ), migrations.DeleteModel( name='Credit', ), migrations.DeleteModel( name='Debit', ), ]
21.361111
47
0.505852
64
769
5.96875
0.515625
0.219895
0.272251
0.314136
0.450262
0.450262
0
0
0
0
0
0.064854
0.378414
769
35
48
21.971429
0.73431
0.058518
0
0.689655
1
0
0.113573
0.031856
0
0
0
0
0
1
0
false
0
0.034483
0
0.137931
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c23262a251f2045d9c1a650e7888871d14a973fa
133
py
Python
print_params.py
choujujua/Semantic-Segmentation
98eae3c8465f8cd4d2cdeb708d81e9bb8941a90f
[ "Apache-2.0" ]
5
2020-07-14T01:39:05.000Z
2021-08-03T03:09:30.000Z
print_params.py
choujujua/Semantic-Segmentation
98eae3c8465f8cd4d2cdeb708d81e9bb8941a90f
[ "Apache-2.0" ]
null
null
null
print_params.py
choujujua/Semantic-Segmentation
98eae3c8465f8cd4d2cdeb708d81e9bb8941a90f
[ "Apache-2.0" ]
null
null
null
for k in model.state_dict(): print(k) for name,parameters in net.named_parameters(): print(name,':',parameters.size())
19
46
0.661654
19
133
4.526316
0.631579
0.325581
0
0
0
0
0
0
0
0
0
0
0.180451
133
6
47
22.166667
0.788991
0
0
0
0
0
0.007576
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
df998af8ae4cae641c216f33bd79e069fca917b7
146
py
Python
bin/csgui.py
glaroc/Circuitscape
90d9dad869abae619c74e8ccd9d2a2e43311a18c
[ "CC-BY-3.0" ]
13
2015-03-23T19:40:20.000Z
2019-03-04T16:56:16.000Z
bin/csgui.py
glaroc/Circuitscape
90d9dad869abae619c74e8ccd9d2a2e43311a18c
[ "CC-BY-3.0" ]
22
2015-01-19T21:05:29.000Z
2019-12-15T17:16:20.000Z
bin/csgui.py
glaroc/Circuitscape
90d9dad869abae619c74e8ccd9d2a2e43311a18c
[ "CC-BY-3.0" ]
7
2020-02-24T00:58:03.000Z
2022-03-27T18:26:33.000Z
#!/usr/bin/python ## ## Circuitscape (C) 2013, Brad McRae, Viral B. Shah. and Tanmay Mohapatra from circuitscape.gui import show_gui show_gui()
20.857143
74
0.732877
22
146
4.772727
0.818182
0.133333
0
0
0
0
0
0
0
0
0
0.032
0.143836
146
6
75
24.333333
0.808
0.59589
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
dfbab151314dbc23d059928a4926faf143d67361
176
py
Python
app/model/cluster.py
pedrolp85/pydevice
39b961bb67f59ac9a9373ecc99748e07505b249e
[ "Apache-2.0" ]
null
null
null
app/model/cluster.py
pedrolp85/pydevice
39b961bb67f59ac9a9373ecc99748e07505b249e
[ "Apache-2.0" ]
null
null
null
app/model/cluster.py
pedrolp85/pydevice
39b961bb67f59ac9a9373ecc99748e07505b249e
[ "Apache-2.0" ]
null
null
null
from typing import List from model.device import Device from pydantic import BaseModel class Cluster(BaseModel): id: int cluster_name: str devices: List[Device]
16
31
0.75
24
176
5.458333
0.625
0
0
0
0
0
0
0
0
0
0
0
0.198864
176
10
32
17.6
0.929078
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.428571
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
dfce1ab74da223b4aebe12caa4af154b1c03e443
37
py
Python
xcopilot/config/aircraft/__init__.py
owentar/x-copilot-py
15ffa9d22fc2a906b5437d8484b4002c635085fe
[ "MIT" ]
null
null
null
xcopilot/config/aircraft/__init__.py
owentar/x-copilot-py
15ffa9d22fc2a906b5437d8484b4002c635085fe
[ "MIT" ]
null
null
null
xcopilot/config/aircraft/__init__.py
owentar/x-copilot-py
15ffa9d22fc2a906b5437d8484b4002c635085fe
[ "MIT" ]
null
null
null
__all__ = ['XP_Cessna172', 'FJS727']
18.5
36
0.675676
4
37
5
1
0
0
0
0
0
0
0
0
0
0
0.181818
0.108108
37
1
37
37
0.424242
0
0
0
0
0
0.486486
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
dffd49715656f25393b43f2ca1df54248ff10d75
277
py
Python
openpeerpower/components/acmeda/errors.py
pcaston/core
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
[ "Apache-2.0" ]
1
2021-07-08T20:09:55.000Z
2021-07-08T20:09:55.000Z
openpeerpower/components/acmeda/errors.py
pcaston/core
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
[ "Apache-2.0" ]
47
2021-02-21T23:43:07.000Z
2022-03-31T06:07:10.000Z
openpeerpower/components/acmeda/errors.py
OpenPeerPower/core
f673dfac9f2d0c48fa30af37b0a99df9dd6640ee
[ "Apache-2.0" ]
null
null
null
"""Errors for the Acmeda Pulse component.""" from openpeerpower.exceptions import OpenPeerPowerError class PulseException(OpenPeerPowerError): """Base class for Acmeda Pulse exceptions.""" class CannotConnect(PulseException): """Unable to connect to the bridge."""
25.181818
55
0.761733
29
277
7.275862
0.655172
0.104265
0
0
0
0
0
0
0
0
0
0
0.137184
277
10
56
27.7
0.882845
0.400722
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
5f22d0c5fc524e0084c054d53135fbfc88c19a50
135
py
Python
satchless/util/exceptions.py
cajun-code/satchless
068b26046c3af63268f8eecd6e33da2bbb78b8d1
[ "BSD-4-Clause" ]
1
2015-11-05T05:09:27.000Z
2015-11-05T05:09:27.000Z
satchless/util/exceptions.py
cajun-code/satchless
068b26046c3af63268f8eecd6e33da2bbb78b8d1
[ "BSD-4-Clause" ]
null
null
null
satchless/util/exceptions.py
cajun-code/satchless
068b26046c3af63268f8eecd6e33da2bbb78b8d1
[ "BSD-4-Clause" ]
null
null
null
class FinalValue(Exception): "Force a value and break the handler chain" def __init__(self, value): self.value = value
27
47
0.681481
18
135
4.888889
0.777778
0.204545
0
0
0
0
0
0
0
0
0
0
0.237037
135
4
48
33.75
0.854369
0.303704
0
0
0
0
0.303704
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
5f370c574f342d4f4ba05fd4fba8bd2cabb38282
132
py
Python
config_loader.py
denadai2/precise-mapping-human-settlements
0c3c2d10e223438a74cbe3187dc5bb75cb7d3c78
[ "MIT" ]
22
2020-06-12T15:12:08.000Z
2022-01-31T17:34:34.000Z
config_loader.py
denadai2/precise-mapping-human-settlements
0c3c2d10e223438a74cbe3187dc5bb75cb7d3c78
[ "MIT" ]
5
2020-03-24T17:36:35.000Z
2022-03-12T00:00:12.000Z
config_loader.py
denadai2/precise-mapping-human-settlements
0c3c2d10e223438a74cbe3187dc5bb75cb7d3c78
[ "MIT" ]
1
2020-08-24T16:18:03.000Z
2020-08-24T16:18:03.000Z
import json def load_config(): with open('config.json') as json_file: config = json.load(json_file) return config
16.5
42
0.666667
19
132
4.473684
0.526316
0.235294
0
0
0
0
0
0
0
0
0
0
0.234848
132
7
43
18.857143
0.841584
0
0
0
0
0
0.083333
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.6
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
a03e8be46bf3adb477e2599e431e88f8c3bfbbe6
239
py
Python
p1_navigation/src/replay/factory.py
JeffersonH44/deep-reinforcement-nd
90e403d7ba35dd001c0636695351accaa3034075
[ "MIT" ]
null
null
null
p1_navigation/src/replay/factory.py
JeffersonH44/deep-reinforcement-nd
90e403d7ba35dd001c0636695351accaa3034075
[ "MIT" ]
null
null
null
p1_navigation/src/replay/factory.py
JeffersonH44/deep-reinforcement-nd
90e403d7ba35dd001c0636695351accaa3034075
[ "MIT" ]
null
null
null
from enum import Enum from random import uniform from .base_replay import ReplayBuffer from .prioritized_replay import PrioritizedReplayBuffer class ReplayFactory(Enum): uniform = ReplayBuffer prioritized = PrioritizedReplayBuffer
29.875
55
0.83682
25
239
7.92
0.48
0.121212
0
0
0
0
0
0
0
0
0
0
0.133891
239
8
56
29.875
0.956522
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.571429
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
a05d347f6decfe2e8fd809e5ff90104d52d1fbb5
96
py
Python
exercises/exc_A4.py
dataXcode/IPP
c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b
[ "MIT" ]
null
null
null
exercises/exc_A4.py
dataXcode/IPP
c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b
[ "MIT" ]
null
null
null
exercises/exc_A4.py
dataXcode/IPP
c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b
[ "MIT" ]
null
null
null
# Create variable a _____ # Create variable b _____ # Print out the sum of a and b _____________
16
30
0.770833
14
96
3.642857
0.714286
0.54902
0
0
0
0
0
0
0
0
0
0
0.197917
96
6
31
16
0.662338
0.666667
0
0.666667
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
39fc1a450398d4e1b4059597008b8c53c888e4a5
2,255
py
Python
s3prl/upstream/mos_prediction/hubconf.py
hhhaaahhhaa/s3prl
a469787f05c42196c4d989555082f5fd9dcbe8a6
[ "Apache-2.0" ]
856
2021-01-15T15:40:32.000Z
2022-03-31T07:08:17.000Z
s3prl/upstream/mos_prediction/hubconf.py
hhhaaahhhaa/s3prl
a469787f05c42196c4d989555082f5fd9dcbe8a6
[ "Apache-2.0" ]
210
2021-01-15T13:28:50.000Z
2022-03-30T06:13:51.000Z
s3prl/upstream/mos_prediction/hubconf.py
hhhaaahhhaa/s3prl
a469787f05c42196c4d989555082f5fd9dcbe8a6
[ "Apache-2.0" ]
208
2021-01-15T03:03:12.000Z
2022-03-31T08:33:27.000Z
import os from s3prl.utility.download import _urls_to_filepaths from .expert import UpstreamExpert as _UpstreamExpert def mos_wav2vec2_local(ckpt, *args, **kwargs): """ The model from local ckpt ckpt (str): PATH """ assert os.path.isfile(ckpt) kwargs["upstream"] = "wav2vec2" return _UpstreamExpert(ckpt, *args, **kwargs) def mos_wav2vec2_url(ckpt, refresh=False, *args, **kwargs): """ The model from URL ckpt (str): URL """ return mos_wav2vec2_local(_urls_to_filepaths(ckpt), *args, **kwargs) def mos_wav2vec2(refresh=False, *args, **kwargs): """ The model from local ckpt ckpt (str): PATH """ kwargs[ "ckpt" ] = "https://www.dropbox.com/s/s9zpouk5svu1a4l/wav2vec2-dev-SRCC-best.ckpt?dl=0" return mos_wav2vec2_url(refresh=refresh, *args, **kwargs) def mos_tera_local(ckpt, *args, **kwargs): """ The model from local ckpt ckpt (str): PATH """ assert os.path.isfile(ckpt) kwargs["upstream"] = "tera" return _UpstreamExpert(ckpt, *args, **kwargs) def mos_tera_url(ckpt, refresh=False, *args, **kwargs): """ The model from URL ckpt (str): URL """ return mos_tera_local(_urls_to_filepaths(ckpt), *args, **kwargs) def mos_tera(refresh=False, *args, **kwargs): """ The model from local ckpt ckpt (str): PATH """ kwargs[ "ckpt" ] = "https://www.dropbox.com/s/w4jk5bujaoosk69/tera-dev-SRCC-best.ckpt?dl=0" return mos_tera_url(refresh=refresh, *args, **kwargs) def mos_apc_local(ckpt, *args, **kwargs): """ The model from local ckpt ckpt (str): PATH """ assert os.path.isfile(ckpt) kwargs["upstream"] = "apc" return _UpstreamExpert(ckpt, *args, **kwargs) def mos_apc_url(ckpt, refresh=False, *args, **kwargs): """ The model from URL ckpt (str): URL """ return mos_apc_local(_urls_to_filepaths(ckpt), *args, **kwargs) def mos_apc(refresh=False, *args, **kwargs): """ The model from local ckpt ckpt (str): PATH """ kwargs[ "ckpt" ] = "https://www.dropbox.com/s/ulng31as15hsvz1/apc-dev-SRCC-best.ckpt?dl=0" return mos_apc_url(refresh=refresh, *args, **kwargs)
24.51087
84
0.625721
292
2,255
4.684932
0.15411
0.131579
0.092105
0.118421
0.840643
0.815789
0.79386
0.657895
0.598684
0.510965
0
0.017775
0.226608
2,255
91
85
24.78022
0.766628
0.176497
0
0.333333
0
0.083333
0.156677
0
0
0
0
0
0.083333
1
0.25
false
0
0.083333
0
0.583333
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
39ff8d9d1a1f4bc0168026188f588da2e21ffcac
82
py
Python
corehq/apps/formplayer_api/const.py
kkrampa/commcare-hq
d64d7cad98b240325ad669ccc7effb07721b4d44
[ "BSD-3-Clause" ]
1
2020-05-05T13:10:01.000Z
2020-05-05T13:10:01.000Z
corehq/apps/formplayer_api/const.py
kkrampa/commcare-hq
d64d7cad98b240325ad669ccc7effb07721b4d44
[ "BSD-3-Clause" ]
1
2019-12-09T14:00:14.000Z
2019-12-09T14:00:14.000Z
corehq/apps/formplayer_api/const.py
MaciejChoromanski/commcare-hq
fd7f65362d56d73b75a2c20d2afeabbc70876867
[ "BSD-3-Clause" ]
5
2015-11-30T13:12:45.000Z
2019-07-01T19:27:07.000Z
from __future__ import unicode_literals ENDPOINT_VALIDATE_FORM = '/validate_form'
27.333333
41
0.865854
10
82
6.3
0.8
0.380952
0
0
0
0
0
0
0
0
0
0
0.085366
82
2
42
41
0.84
0
0
0
0
0
0.170732
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
2612d7348806be5626f6f94e422aac7a3a3c7f71
189
py
Python
config/config.py
LunaGao/fastapi_auth
509587967938d53255f22f3170e1fcbdf8c452fb
[ "Apache-2.0" ]
1
2021-09-07T13:24:45.000Z
2021-09-07T13:24:45.000Z
config/config.py
LunaGao/fastapi_auth
509587967938d53255f22f3170e1fcbdf8c452fb
[ "Apache-2.0" ]
null
null
null
config/config.py
LunaGao/fastapi_auth
509587967938d53255f22f3170e1fcbdf8c452fb
[ "Apache-2.0" ]
null
null
null
# to get a string like this run: # openssl rand -hex 32 SECRET_KEY = "de45991397da83e674149beb168b17fde74ebd4a8d6ca65310aedc7a082d5309" ALGORITHM = "HS256" ACCESS_TOKEN_EXPIRE_MINUTES = 30
31.5
79
0.825397
21
189
7.238095
1
0
0
0
0
0
0
0
0
0
0
0.275449
0.116402
189
5
80
37.8
0.634731
0.269841
0
0
0
0
0.511111
0.474074
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
264940551a878ec9ce1e22a67fd75e8e7f25236b
1,701
py
Python
pygics/__init__.py
HyechurnJang/pygics
3ad7a703a282c3791a287a269d2ec9cd125a9133
[ "Apache-2.0" ]
2
2021-01-13T02:33:13.000Z
2021-01-13T03:32:56.000Z
pygics/__init__.py
HyechurnJang/pygics
3ad7a703a282c3791a287a269d2ec9cd125a9133
[ "Apache-2.0" ]
null
null
null
pygics/__init__.py
HyechurnJang/pygics
3ad7a703a282c3791a287a269d2ec9cd125a9133
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- ''' ____ ___ ____________ ___ ___ ____ _________________ / __ \/ _ | / __/ _/ __/ / _ \/ _ \/ __ \__ / / __/ ___/_ __/ / /_/ / __ |_\ \_/ /_\ \ / ___/ , _/ /_/ / // / _// /__ / / \____/_/ |_/___/___/___/ /_/ /_/|_|\____/\___/___/\___/ /_/ Operational Aid Source for Infra-Structure Created on 2020. 2. 15. @author: Hye-Churn Jang, CMBU Specialist in Korea, VMware [jangh@vmware.com] ''' import gevent.monkey gevent.monkey.patch_all() #=============================================================================== # General Structure & Function #=============================================================================== from .common import LogLevel, log, logDebug, logInfo, logWarn, logError, setEnv, setEnvObject, loadJson, dumpJson, ppJson, loadYaml, dumpYaml, load from .struct import PygObj, Inventory, kill, singleton from .constant import HttpMethodType, HttpContentType, HttpResponseType, Schema #=============================================================================== # Processing #=============================================================================== from .task import Queue, Lock, Task, Burst, sleep, repose #=============================================================================== # Server #=============================================================================== from .service import File, download, rest, server import pygics.plugin #=============================================================================== # Client #=============================================================================== from .sdk import Client, Model, ModelList, RestUser, Rest, Table, Database, sdk
44.763158
147
0.418577
100
1,701
5.97
0.79
0.040201
0
0
0
0
0
0
0
0
0
0.005337
0.118754
1,701
37
148
45.972973
0.392929
0.659024
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.888889
0
0.888889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
265dc67facac8083aca0955334762f0d9829e65f
917
py
Python
opteryx/samples/__init__.py
mabel-dev/mabel-sql
1a39253333233f5181be78b9e3c45aea1eb2afe3
[ "Apache-2.0" ]
1
2022-02-05T18:36:09.000Z
2022-02-05T18:36:09.000Z
opteryx/samples/__init__.py
mabel-dev/mabel-sql
1a39253333233f5181be78b9e3c45aea1eb2afe3
[ "Apache-2.0" ]
43
2021-12-29T22:33:49.000Z
2022-03-25T20:12:07.000Z
opteryx/samples/__init__.py
mabel-dev/mabel-sql
1a39253333233f5181be78b9e3c45aea1eb2afe3
[ "Apache-2.0" ]
1
2022-01-26T21:44:15.000Z
2022-01-26T21:44:15.000Z
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. def satellites(): from .satellite_data import SatelliteData return SatelliteData().get() def planets(): from .planet_data import PlanetData return PlanetData().get() def astronauts(): from .astronaut_data import AstronautData return AstronautData().get() def no_table(): from .no_table_data import NoTable return NoTable().get()
25.472222
74
0.738277
128
917
5.242188
0.601563
0.089419
0.038748
0.04769
0
0
0
0
0
0
0
0.005348
0.184297
917
35
75
26.2
0.891711
0.568157
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
4
26703bbdf99963c24274ae0da9a71ee9ab3c4f32
939
py
Python
backend/family_app/migrations/0003_auto_20190404_1659.py
berserg2010/family_and_history_backend
08fd5901e6e0c9cbd75a72e46d69ac53c737786a
[ "Apache-2.0" ]
null
null
null
backend/family_app/migrations/0003_auto_20190404_1659.py
berserg2010/family_and_history_backend
08fd5901e6e0c9cbd75a72e46d69ac53c737786a
[ "Apache-2.0" ]
null
null
null
backend/family_app/migrations/0003_auto_20190404_1659.py
berserg2010/family_and_history_backend
08fd5901e6e0c9cbd75a72e46d69ac53c737786a
[ "Apache-2.0" ]
null
null
null
# Generated by Django 2.1.7 on 2019-04-04 13:59 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('family_app', '0002_auto_20190404_1628'), ] operations = [ migrations.RenameField( model_name='marriage', old_name='family', new_name='_family', ), migrations.RenameField( model_name='marriage', old_name='husband', new_name='_husband', ), migrations.RenameField( model_name='marriage', old_name='husbname', new_name='_husbname', ), migrations.RenameField( model_name='marriage', old_name='wife', new_name='_wife', ), migrations.RenameField( model_name='marriage', old_name='wifename', new_name='_wifename', ), ]
24.076923
50
0.528222
84
939
5.619048
0.416667
0.222458
0.275424
0.317797
0.476695
0.476695
0.476695
0
0
0
0
0.051581
0.359957
939
38
51
24.710526
0.733777
0.047923
0
0.46875
1
0
0.161435
0.025785
0
0
0
0
0
1
0
false
0
0.03125
0
0.125
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
13fed050decabddd568052024b0cc1c9186cc31d
2,333
py
Python
forum/forms.py
Code-Institute-Submissions/patrickpulfer-Code-Institute-M4
c8d439544351f1b3ef7a872675694301f0760cf6
[ "Adobe-Glyph" ]
null
null
null
forum/forms.py
Code-Institute-Submissions/patrickpulfer-Code-Institute-M4
c8d439544351f1b3ef7a872675694301f0760cf6
[ "Adobe-Glyph" ]
null
null
null
forum/forms.py
Code-Institute-Submissions/patrickpulfer-Code-Institute-M4
c8d439544351f1b3ef7a872675694301f0760cf6
[ "Adobe-Glyph" ]
null
null
null
from django import forms from .models import Comment, Discussion class CommentForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(CommentForm, self).__init__(*args, **kwargs) self.fields['visible'].initial = True class Meta: model = Comment fields = ('body', 'visible') labels = {'body': 'Your comment', } widgets = { 'body': forms.Textarea(attrs={'class': 'form-control'}), 'visible': forms.HiddenInput, } class ForumNewTopic(forms.ModelForm): def __init__(self, *args, **kwargs): super(ForumNewTopic, self).__init__(*args, **kwargs) self.fields['visible'].initial = True self.fields['premium_only'].initial = False class Meta: model = Discussion fields = { 'picture', 'title', 'body', 'forum', 'poster', 'premium_only', 'visible', } widgets = { 'picture': forms.FileInput(), 'title': forms.TextInput(attrs={'class': 'form-control'}), 'body': forms.Textarea(attrs={'class': 'form-control'}), 'premium_only': forms.CheckboxInput( attrs={'class': 'form-check-input'} ), 'visible': forms.HiddenInput(), } class Discussion_Edit_Form(forms.ModelForm): def __init__(self, *args, **kwargs): super(Discussion_Edit_Form, self).__init__(*args, **kwargs) self.fields['visible'].initial = True self.fields['premium_only'].initial = False class Meta: model = Discussion fields = { 'picture', 'title', 'body', 'forum', 'poster', 'premium_only', 'visible', } widgets = { 'picture': forms.FileInput(), 'title': forms.TextInput(attrs={'class': 'form-control'}), 'body': forms.Textarea(attrs={'class': 'form-control'}), 'forum': forms.HiddenInput(), 'premium_only': forms.CheckboxInput( attrs={'class': 'form-check-input'} ), 'visible': forms.CheckboxInput( attrs={'class': 'form-check-input'} ), }
30.298701
70
0.513931
201
2,333
5.79602
0.223881
0.06867
0.096137
0.090129
0.764807
0.764807
0.764807
0.700429
0.597425
0.55794
0
0
0.337334
2,333
76
71
30.697368
0.753558
0
0
0.701493
0
0
0.170596
0
0
0
0
0
0
1
0.044776
false
0
0.029851
0
0.164179
0
0
0
0
null
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
cd0e01e8725c63f4cc105520e6878a18657888d4
108
py
Python
tkmorfo/applications/textparser/apps.py
DonAurelio/TweetAnalyzer
2789d3968aa6233087803905f5de832edcb702de
[ "MIT" ]
1
2020-09-28T13:52:23.000Z
2020-09-28T13:52:23.000Z
tkmorfo/applications/textparser/apps.py
DonAurelio/TweetAnalyzer
2789d3968aa6233087803905f5de832edcb702de
[ "MIT" ]
2
2020-06-05T17:10:40.000Z
2021-06-10T18:07:15.000Z
tkmorfo/applications/textparser/apps.py
DonAurelio/text-analyzer
2789d3968aa6233087803905f5de832edcb702de
[ "MIT" ]
1
2017-12-21T05:44:40.000Z
2017-12-21T05:44:40.000Z
from django.apps import AppConfig class TextparserConfig(AppConfig): name = 'applications.textparser'
18
36
0.787037
11
108
7.727273
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.138889
108
5
37
21.6
0.913978
0
0
0
0
0
0.212963
0.212963
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
cd2fda45c40696f16b86a111830ddb81a6205aee
148
py
Python
pitivi support/assets/js/search/gprof2dot.py
owlrana/Deep-learning-CNN-model
5f7e6bc44fb676750db8d24b976b82b899d174d5
[ "MIT" ]
null
null
null
pitivi support/assets/js/search/gprof2dot.py
owlrana/Deep-learning-CNN-model
5f7e6bc44fb676750db8d24b976b82b899d174d5
[ "MIT" ]
null
null
null
pitivi support/assets/js/search/gprof2dot.py
owlrana/Deep-learning-CNN-model
5f7e6bc44fb676750db8d24b976b82b899d174d5
[ "MIT" ]
null
null
null
urls_downloaded_cb({"token":"gprof2dot.py","urls":[{"url":"HACKING.html#page-description","node_type":"p","context":{"gi-language":["default"]}}]});
148
148
0.682432
19
148
5.157895
0.947368
0
0
0
0
0
0
0
0
0
0
0.006757
0
148
1
148
148
0.655405
0
0
0
0
0
0.590604
0.194631
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
cd8f78d661fdfcaa8424663c15ee05cc35e75662
123
py
Python
apkcli/plugins/base.py
ToxicPika/apkcli
2eb19f03ad0920e554d50d78163bacef155f1ac4
[ "MIT" ]
18
2020-01-02T22:36:23.000Z
2022-02-18T09:39:27.000Z
apkcli/plugins/base.py
ToxicPika/apkcli
2eb19f03ad0920e554d50d78163bacef155f1ac4
[ "MIT" ]
1
2020-12-05T21:13:28.000Z
2020-12-05T21:13:28.000Z
machocli/plugins/base.py
Te-k/machocli
a3e02eddc0faa3d6d259ecd65ec89332f2c415ad
[ "MIT" ]
4
2020-01-02T23:00:07.000Z
2021-06-15T01:56:59.000Z
class Plugin(object): name = "base" description = "base plugin" def add_arguments(self, parser): pass
17.571429
36
0.617886
14
123
5.357143
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.276423
123
6
37
20.5
0.842697
0
0
0
0
0
0.121951
0
0
0
0
0
0
1
0.2
false
0.2
0
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
269cc0ddafacdacbb119f32ecef5d2de28a0cd3f
74
py
Python
runserver.py
bable5/Backbone-Playground
a917338032cfe4fbf718d3d2d39584b4f99c3b18
[ "MIT" ]
null
null
null
runserver.py
bable5/Backbone-Playground
a917338032cfe4fbf718d3d2d39584b4f99c3b18
[ "MIT" ]
null
null
null
runserver.py
bable5/Backbone-Playground
a917338032cfe4fbf718d3d2d39584b4f99c3b18
[ "MIT" ]
null
null
null
#!/usr/bin/env python from flask_rest_1 import app app.run(debug=True)
10.571429
28
0.743243
14
74
3.785714
0.928571
0
0
0
0
0
0
0
0
0
0
0.015625
0.135135
74
6
29
12.333333
0.8125
0.27027
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
26a438b90c9dd884d96d58e68d9d6a67c0df3e93
272
py
Python
tests/system/histwsrv/test_smoke.py
jean-edouard-boulanger/finbot
ddc3c0e4673b1025d2352719755ff77ef445577c
[ "MIT" ]
1
2020-12-25T19:33:27.000Z
2020-12-25T19:33:27.000Z
tests/system/histwsrv/test_smoke.py
jean-edouard-boulanger/finbot
ddc3c0e4673b1025d2352719755ff77ef445577c
[ "MIT" ]
1
2021-01-18T23:19:58.000Z
2021-01-19T17:35:13.000Z
tests/system/histwsrv/test_smoke.py
jean-edouard-boulanger/finbot
ddc3c0e4673b1025d2352719755ff77ef445577c
[ "MIT" ]
1
2020-01-19T22:37:36.000Z
2020-01-19T22:37:36.000Z
from finbot.core.environment import get_histwsrv_endpoint from finbot.clients import HistoryClient import pytest @pytest.fixture def api() -> HistoryClient: return HistoryClient(get_histwsrv_endpoint()) def test_healthy(api: HistoryClient): assert api.healthy
20.923077
57
0.805147
33
272
6.484848
0.545455
0.093458
0.17757
0
0
0
0
0
0
0
0
0
0.125
272
12
58
22.666667
0.89916
0
0
0
0
0
0
0
0
0
0
0
0.125
1
0.25
false
0
0.375
0.125
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
26b4f2948491987d192e1c967c7569cf24ee1abf
325
py
Python
hdlogger/serializers/__init__.py
incognitoRepo/hdlogger
c738161ef3144469ba0f47caf89770613031e96e
[ "BSD-2-Clause" ]
null
null
null
hdlogger/serializers/__init__.py
incognitoRepo/hdlogger
c738161ef3144469ba0f47caf89770613031e96e
[ "BSD-2-Clause" ]
null
null
null
hdlogger/serializers/__init__.py
incognitoRepo/hdlogger
c738161ef3144469ba0f47caf89770613031e96e
[ "BSD-2-Clause" ]
null
null
null
# from .classes import PickleableFrame, PickleableState, PickleableGenerator, PickleableTraceback, PickleableOptparseOption from .classes import State, PickleableState from .pickle_dispatch import pickleable_dispatch, initialize_copyreg, pickleable_dict from .transformers import make_pickleable_frame, make_pickleable_state
65
123
0.883077
32
325
8.71875
0.5625
0.078853
0.121864
0
0
0
0
0
0
0
0
0
0.076923
325
4
124
81.25
0.93
0.372308
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
26b9fe6ac25cc1713813d3d28fe14922a0d6308d
5,930
py
Python
tests/unit_tests/fixtures/tasks.py
Barometre-de-la-Science-Ouverte/bso3-harvest-publication
06c729a1e44ed87e8f73b4c2bd456f5e09a73e34
[ "MIT" ]
null
null
null
tests/unit_tests/fixtures/tasks.py
Barometre-de-la-Science-Ouverte/bso3-harvest-publication
06c729a1e44ed87e8f73b4c2bd456f5e09a73e34
[ "MIT" ]
null
null
null
tests/unit_tests/fixtures/tasks.py
Barometre-de-la-Science-Ouverte/bso3-harvest-publication
06c729a1e44ed87e8f73b4c2bd456f5e09a73e34
[ "MIT" ]
null
null
null
import os from unittest import expectedFailure FIXTURES_PATH = os.path.dirname(__file__) source_metadata_file = os.path.join(FIXTURES_PATH, 'bso-publications-5k.jsonl.gz') filtered_metadata_filename = os.path.join(FIXTURES_PATH, 'filtered_' + os.path.basename(source_metadata_file)) doi_list = ['10.1111/jdv.15719'] expected_doi_filtered_content = ['{"affiliations": [{"detected_countries": ["de"], "name": "Department Dermatology and Allergy, Ludwig-Maximilian University, Munich, Germany."}, {"detected_countries": ["de"], "name": "M\\u00fcnchen Klinik Thalkirchner Stra\\u00dfe, Munich, Germany."}, {"detected_countries": ["pl"], "name": "Department of Dermatology, Venereology and Allergology, Wroclaw Medical University, Wroclaw, Poland."}, {"detected_countries": ["fr"], "name": "Department of Dermatology and Pediatric Dermatology, Hospital St. Andre, Bordeaux, France."}, {"detected_countries": ["de"], "name": "Department of Dermatology and Allergy Biederstein, Technische Universit\\u00e4t M\\u00fcnchen, Munich, Germany."}, {"detected_countries": ["ch"], "name": "Christine K\\u00fchne Center for Allergy Research and Education CK-CARE, Davos, Switzerland."}], "asjc_classification": [{"code_asjc": "2725", "field": "Infectious Diseases", "subject_area": "Health Sciences"}, {"code_asjc": "2708", "field": "Dermatology", "subject_area": "Health Sciences"}, {"code_asjc": "2725", "field": "Infectious Diseases", "subject_area": "Health Sciences"}, {"code_asjc": "2708", "field": "Dermatology", "subject_area": "Health Sciences"}], "bso_classification": "Medical research", "bsso_classification": {"field": ["Clinical Sciences"], "field_journal_title": ["Clinical Sciences"], "models": ["journal_title"], "weighted_score": 1.5}, "coi": null, "databank": [], "datasource": "pubmed_fr", "detected_countries": ["de", "fr", "ch", "pl"], "doi": "10.1111/jdv.15719", "domains": ["health"], "genre": "journal-article", "grants": null, "has_grant": false, "is_paratext": false, "issn_electronic": "1468-3083", "issn_list": ["0926-9959", "1468-3083"], "issn_print": null, "journal_issn_l": "0926-9959", "journal_issns": "0926-9959,1468-3083", "journal_name": "Journal of the European Academy of Dermatology and Venereology", "journal_title": "Journal of the European Academy of Dermatology and Venereology : JEADV", "keywords": [], "lang": "en", "mesh_headings": [], "pmid": 31259446, "publication_date": "2019-07-02T00:00:00", "publication_types": ["Published Erratum"], "publication_year": 2019, "published_date": "2019-07-01T00:00:00", "publisher": "Wiley", "sdg_classification": [{"sdg_code": "sdg3", "sdg_label": "3. Good health and well-being"}], "sources": ["pubmed"], "title": "Corrigendum: Consensus-based European guidelines for treatment of atopic eczema (atopic dermatitis) in adults and children: part I.", "url": "https://www.ncbi.nlm.nih.gov/pubmed/31259446", "year": 2019, "has_apc": true, "amount_apc_EUR": 2225.7061166538756, "apc_source": "openAPC_estimation_publisher_year", "amount_apc_openapc_EUR": 2225.7061166538756, "count_apc_openapc_key": 1303, "predatory_publisher": false, "predatory_journal": false, "publisher_normalized": "Wiley", "publisher_group": "Wiley", "publisher_dissemination": "Wiley", "genre_raw": "journal-article", "french_affiliations_types": ["hospital"], "author_useful_rank_fr": false, "author_useful_rank_countries": ["ch", "de"], "observation_dates": ["2021Q3", "2020", "2019"], "oa_details": {"2019": {"snapshot_date": "20191122", "observation_date": "2019", "is_oa": true, "journal_is_in_doaj": false, "journal_is_oa": false, "licence_publisher": ["no license"], "oa_locations": [{"url": "https://onlinelibrary.wiley.com/doi/pdfdirect/10.1111/jdv.15719", "pmh_id": null, "is_best": true, "license": null, "updated": "2019-11-08T17:51:52.649712", "version": "publishedVersion", "evidence": "open (via free article)", "host_type": "publisher", "endpoint_id": null, "url_for_pdf": "https://onlinelibrary.wiley.com/doi/pdfdirect/10.1111/jdv.15719", "url_for_landing_page": "https://doi.org/10.1111/jdv.15719", "repository_institution": null, "license_normalized": "no license"}], "oa_colors": ["hybrid"], "oa_colors_with_priority_to_publisher": ["hybrid"], "oa_host_type": "publisher"}, "2020": {"snapshot_date": "20201009", "observation_date": "2020", "is_oa": true, "journal_is_in_doaj": false, "journal_is_oa": false, "licence_publisher": ["no license"], "oa_locations": [{"url": "https://onlinelibrary.wiley.com/doi/pdfdirect/10.1111/jdv.15719", "pmh_id": null, "is_best": true, "license": null, "oa_date": null, "updated": "2020-02-14T06:38:06.258581", "version": "publishedVersion", "evidence": "open (via free article)", "host_type": "publisher", "endpoint_id": null, "url_for_pdf": "https://onlinelibrary.wiley.com/doi/pdfdirect/10.1111/jdv.15719", "url_for_landing_page": "https://doi.org/10.1111/jdv.15719", "repository_institution": null, "license_normalized": "no license"}], "oa_colors": ["hybrid"], "oa_colors_with_priority_to_publisher": ["hybrid"], "oa_host_type": "publisher"}, "2021Q3": {"snapshot_date": "20210901", "observation_date": "2021Q3", "is_oa": true, "journal_is_in_doaj": false, "journal_is_oa": false, "licence_publisher": ["no license"], "oa_locations": [{"url": "https://onlinelibrary.wiley.com/doi/pdfdirect/10.1111/jdv.15719", "pmh_id": null, "is_best": true, "license": null, "oa_date": null, "updated": "2021-02-02T03:04:37.981717", "version": "publishedVersion", "evidence": "open (via free article)", "host_type": "publisher", "endpoint_id": null, "url_for_pdf": "https://onlinelibrary.wiley.com/doi/pdfdirect/10.1111/jdv.15719", "url_for_landing_page": "https://doi.org/10.1111/jdv.15719", "repository_institution": null, "license_normalized": "no license"}], "oa_colors": ["hybrid"], "oa_colors_with_priority_to_publisher": ["hybrid"], "oa_host_type": "publisher"}}, "amount_apc_doaj": null, "amount_apc_doaj_EUR": null, "has_coi": null}']
658.888889
5,612
0.724789
764
5,930
5.386126
0.328534
0.016039
0.024058
0.037424
0.469259
0.431349
0.431349
0.431349
0.431349
0.405103
0
0.075575
0.076223
5,930
9
5,612
658.888889
0.675612
0
0
0
0
0.142857
0.94925
0.183949
0
0
0
0
0
1
0
false
0
0.285714
0
0.285714
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
26fcae5b692616b3d1986ce5091e964bd2f8ca20
129
py
Python
dosna/support/pyclovis/__init__.py
ijjorama/DosNa
8e5322a0e1b93a377a9a443d442253b45957dac2
[ "Apache-2.0" ]
10
2017-04-06T08:01:59.000Z
2022-03-29T14:03:38.000Z
dosna/support/pyclovis/__init__.py
ijjorama/DosNa
8e5322a0e1b93a377a9a443d442253b45957dac2
[ "Apache-2.0" ]
20
2021-09-01T14:21:38.000Z
2022-02-15T12:19:27.000Z
dosna/support/pyclovis/__init__.py
ijjorama/DosNa
8e5322a0e1b93a377a9a443d442253b45957dac2
[ "Apache-2.0" ]
6
2017-10-17T13:17:25.000Z
2021-05-19T12:26:28.000Z
#!/usr/bin/env python from .pyclovis import (Clovis, ClovisConnectionNotInitialised, ClovisOptionRequired)
43
62
0.689922
10
129
8.9
1
0
0
0
0
0
0
0
0
0
0
0
0.232558
129
3
63
43
0.89899
0.155039
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
f817341bc923a9a638a44b5f1af0632534970cb8
64
py
Python
ParkFinder/wish_list.py
Krause2023/CS224-ParkFinder-Web_App
ccfa2faf61d6adb300f319ae11dd983483451410
[ "MIT" ]
1
2021-12-22T16:00:36.000Z
2021-12-22T16:00:36.000Z
ParkFinder/wish_list.py
Krause2023/CS224-ParkFinder-Web_App
ccfa2faf61d6adb300f319ae11dd983483451410
[ "MIT" ]
null
null
null
ParkFinder/wish_list.py
Krause2023/CS224-ParkFinder-Web_App
ccfa2faf61d6adb300f319ae11dd983483451410
[ "MIT" ]
null
null
null
wish_list = [] def add_to_list(park): wish_list.appen(park)
16
25
0.703125
11
64
3.727273
0.636364
0.390244
0
0
0
0
0
0
0
0
0
0
0.15625
64
4
25
16
0.759259
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
f839f2e586fcde64fc11137dfaf92a19efe81a96
291
py
Python
acceptance_tests/features/steps/view_collection_exercise_details_state.py
ONSdigital/rasrm-acceptance-tests
ac31b8c2e79fec70e1475731edabc5acd54e8874
[ "MIT" ]
2
2018-03-06T12:33:55.000Z
2018-05-04T15:28:55.000Z
acceptance_tests/features/steps/view_collection_exercise_details_state.py
ONSdigital/rasrm-acceptance-tests
ac31b8c2e79fec70e1475731edabc5acd54e8874
[ "MIT" ]
99
2018-02-23T10:52:22.000Z
2021-02-03T11:35:54.000Z
acceptance_tests/features/steps/view_collection_exercise_details_state.py
ONSdigital/ras-integration-tests
ac31b8c2e79fec70e1475731edabc5acd54e8874
[ "MIT" ]
1
2021-04-11T07:55:47.000Z
2021-04-11T07:55:47.000Z
from behave import then from acceptance_tests.features.pages import collection_exercise_details @then('the user is able to view the status of the collection exercise') def ce_details_state_is_displayed(_): ce_state = collection_exercise_details.get_status() assert ce_state != ''
29.1
71
0.800687
42
291
5.238095
0.595238
0.245455
0.227273
0
0
0
0
0
0
0
0
0
0.137457
291
9
72
32.333333
0.876494
0
0
0
0
0
0.213058
0
0
0
0
0
0.166667
1
0.166667
false
0
0.333333
0
0.5
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
f8508a4eda73156393853963e4f3b86b21604597
28
py
Python
test/utils/__init__.py
LaudateCorpus1/py-spiffe
71921f508315c7fcffe9304aad75a0d368d1b3ed
[ "Apache-2.0" ]
5
2021-01-06T18:15:39.000Z
2022-02-23T07:44:37.000Z
test/utils/__init__.py
dfeldman/py-spiffe
ab46e05171b9d804f2aec4c6c4f024a573047215
[ "Apache-2.0" ]
52
2020-12-17T20:08:06.000Z
2021-07-07T16:44:42.000Z
test/utils/__init__.py
dfeldman/py-spiffe
ab46e05171b9d804f2aec4c6c4f024a573047215
[ "Apache-2.0" ]
7
2020-12-11T18:42:49.000Z
2022-01-25T15:47:35.000Z
""" tests utils Module. """
7
19
0.571429
3
28
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.178571
28
3
20
9.333333
0.695652
0.678571
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
f890ce3d03867953f569cfd2a1719fc82e5e1f0e
46
py
Python
easytorch/version.py
zezhishao/easytorch
d351341af770cbcc6b661d1757628660416557fe
[ "Apache-2.0" ]
1
2022-03-29T06:04:30.000Z
2022-03-29T06:04:30.000Z
easytorch/version.py
zezhishao/easytorch
d351341af770cbcc6b661d1757628660416557fe
[ "Apache-2.0" ]
null
null
null
easytorch/version.py
zezhishao/easytorch
d351341af770cbcc6b661d1757628660416557fe
[ "Apache-2.0" ]
1
2022-02-05T03:18:43.000Z
2022-02-05T03:18:43.000Z
__version__ = '1.1' __all__ = ['__version__']
15.333333
25
0.673913
5
46
3.8
0.6
0
0
0
0
0
0
0
0
0
0
0.05
0.130435
46
2
26
23
0.425
0
0
0
0
0
0.304348
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f8938fb101337a2c042140719c4cce1159b39992
119
py
Python
weldx/asdf/tags/weldx/aws/process/__init__.py
vhirtham/weldx
50d212e9755271d7299acac103f3f0a8f1390fd6
[ "BSD-3-Clause" ]
13
2020-02-20T07:45:02.000Z
2021-12-10T13:15:47.000Z
weldx/asdf/tags/weldx/aws/process/__init__.py
vhirtham/weldx
50d212e9755271d7299acac103f3f0a8f1390fd6
[ "BSD-3-Clause" ]
675
2020-02-20T07:47:00.000Z
2022-03-31T15:17:19.000Z
weldx/asdf/tags/weldx/aws/process/__init__.py
vhirtham/weldx
50d212e9755271d7299acac103f3f0a8f1390fd6
[ "BSD-3-Clause" ]
5
2020-09-02T07:19:17.000Z
2021-12-05T08:57:50.000Z
from . import ( arc_welding_process, gas_component, shielding_gas_for_procedure, shielding_gas_type, )
17
32
0.731092
14
119
5.642857
0.785714
0.303797
0
0
0
0
0
0
0
0
0
0
0.210084
119
6
33
19.833333
0.840426
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.166667
0
0.166667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
f8aefdf02c0cdf5a8b77b5b938ece4884a1fcc38
452
py
Python
src/big_torch/utils/registry.py
Denchidlo/big-torch
f5a65e6216e46e6d4fe98670c52618e4cccc8163
[ "MIT" ]
null
null
null
src/big_torch/utils/registry.py
Denchidlo/big-torch
f5a65e6216e46e6d4fe98670c52618e4cccc8163
[ "MIT" ]
1
2021-11-21T13:11:31.000Z
2021-11-22T00:18:29.000Z
src/big_torch/utils/registry.py
Denchidlo/big-torch
f5a65e6216e46e6d4fe98670c52618e4cccc8163
[ "MIT" ]
null
null
null
class ModuleAggregator: def __init__(self, registry_name) -> None: self.registry_name = registry_name self._registry = {} def __getitem__(self, key): if isinstance(key, str): return self._registry[key] return key def register(self, name=None): def wrapper(func): self._registry[name if name != None else func.__name__] = func return func return wrapper
26.588235
74
0.603982
51
452
5
0.352941
0.235294
0.188235
0
0
0
0
0
0
0
0
0
0.309735
452
16
75
28.25
0.817308
0
0
0
0
0
0
0
0
0
0
0
0
1
0.307692
false
0
0
0
0.692308
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
f8afab474c46e9caa8fc816f0c1e85f5adc732b9
356
py
Python
mesh/shapes/__init__.py
icemtel/stokes
022de2417919a18ed5b0262111e430384053137d
[ "MIT" ]
null
null
null
mesh/shapes/__init__.py
icemtel/stokes
022de2417919a18ed5b0262111e430384053137d
[ "MIT" ]
null
null
null
mesh/shapes/__init__.py
icemtel/stokes
022de2417919a18ed5b0262111e430384053137d
[ "MIT" ]
null
null
null
from .Plane import preparePlane, prepareCuboid from .Ellipsoid import Ellipsoid, prepareEllipsoid from .FlatEllipse import prepareFlatEllipse, prepare_flat_ellipse_old_style from .FlagellumVel import FlagellumVel, prepareFlagellaVel from .FlagellumVelNorm import FlagellumVelNorm, prepareFlagellaVelNorm from .Flagellum2 import Flagellum2, prepareFlagella2
59.333333
75
0.884831
34
356
9.147059
0.588235
0
0
0
0
0
0
0
0
0
0
0.009174
0.081461
356
6
76
59.333333
0.941896
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
f8ce1713a8ac0eae0858de6c00afe32c59e628d5
48
py
Python
coding/__init__.py
sty16/django-educational-website
741925af23485d7c0ea9a8553646fc5b5af75258
[ "MIT" ]
null
null
null
coding/__init__.py
sty16/django-educational-website
741925af23485d7c0ea9a8553646fc5b5af75258
[ "MIT" ]
12
2020-02-12T03:22:46.000Z
2022-03-12T00:11:10.000Z
coding/__init__.py
sty16/django-educational-website
741925af23485d7c0ea9a8553646fc5b5af75258
[ "MIT" ]
null
null
null
default_app_config = 'coding.apps.CodingConfig'
24
47
0.833333
6
48
6.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.0625
48
1
48
48
0.844444
0
0
0
0
0
0.5
0.5
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6efd6e405c3bf5309f712f9b301ddb9b3b8c70a0
201
py
Python
tools/cardiac_py/pio/__init__.py
paulkefer/cardioid
59c07b714d8b066b4f84eb50487c36f6eadf634c
[ "MIT-0", "MIT" ]
33
2018-12-12T20:05:06.000Z
2021-09-26T13:30:16.000Z
tools/cardiac_py/pio/__init__.py
paulkefer/cardioid
59c07b714d8b066b4f84eb50487c36f6eadf634c
[ "MIT-0", "MIT" ]
5
2019-04-25T11:34:43.000Z
2021-11-14T04:35:37.000Z
tools/cardiac_py/pio/__init__.py
paulkefer/cardioid
59c07b714d8b066b4f84eb50487c36f6eadf634c
[ "MIT-0", "MIT" ]
15
2018-12-21T22:44:59.000Z
2021-08-29T10:30:25.000Z
import iter_read import seeker_read import check_sensor import append_complex import append_multiple __all__ = ["iter_read","seeker_read","check_sensor","append_complex", "append_multiple"]
25.125
69
0.791045
26
201
5.576923
0.384615
0.110345
0
0
0
0
0
0
0
0
0
0
0.124378
201
8
70
25.125
0.823864
0
0
0
0
0
0.30198
0
0
0
0
0
0
1
0
false
0
0.714286
0
0.714286
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3e0cc42ecd6ac1d006250ad814a603665ff80c57
208
py
Python
annotations/templatetags/annotations_extra.py
alexliyihao/auto-annotation-web
391bd2c4a8ea1d2d3aba92a13cd7c41dd77a609d
[ "MIT" ]
1
2021-11-17T15:34:33.000Z
2021-11-17T15:34:33.000Z
annotations/templatetags/annotations_extra.py
alexliyihao/auto-annotation-web
391bd2c4a8ea1d2d3aba92a13cd7c41dd77a609d
[ "MIT" ]
null
null
null
annotations/templatetags/annotations_extra.py
alexliyihao/auto-annotation-web
391bd2c4a8ea1d2d3aba92a13cd7c41dd77a609d
[ "MIT" ]
null
null
null
from django import template from django.template.defaulttags import register import json register = template.Library() @register.filter def get_value(dictionary, key): return json.loads(dictionary)[key]
23.111111
48
0.802885
27
208
6.148148
0.592593
0.120482
0
0
0
0
0
0
0
0
0
0
0.115385
208
8
49
26
0.902174
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.428571
0.142857
0.714286
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
0
0
0
4
3e5325887b9362da725655ca7ede72c2dd7f163a
153
py
Python
TradzQAI/tools/__init__.py
kkuette/AI_project
1f46cb2536b24cb3716250f1e9705daa76af4f60
[ "Apache-2.0" ]
164
2017-11-24T13:07:04.000Z
2022-03-10T04:54:46.000Z
TradzQAI/tools/__init__.py
kkuette/AI_project
1f46cb2536b24cb3716250f1e9705daa76af4f60
[ "Apache-2.0" ]
21
2018-09-29T10:27:10.000Z
2019-06-12T07:01:58.000Z
TradzQAI/tools/__init__.py
kkuette/AI_project
1f46cb2536b24cb3716250f1e9705daa76af4f60
[ "Apache-2.0" ]
49
2018-05-09T17:28:52.000Z
2022-02-27T04:50:45.000Z
from .indicators import Indicators from .logger import Logger from .saver import Saver from .databasemanager import dataBaseManager from .utils import *
25.5
44
0.830065
19
153
6.684211
0.368421
0
0
0
0
0
0
0
0
0
0
0
0.130719
153
5
45
30.6
0.954887
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
3e8d77a23167d8b70a88b1d1e40b830abfd03ee0
121
py
Python
sharingan/__init__.py
aoii103/Sharingan
d31889d1a7c1fdd8b97f8479316b984b426cdd93
[ "MIT" ]
11
2020-07-15T03:24:45.000Z
2020-11-09T07:37:02.000Z
sharingan/__init__.py
fakegit/Sharingan
d31889d1a7c1fdd8b97f8479316b984b426cdd93
[ "MIT" ]
2
2020-07-15T03:28:59.000Z
2020-08-03T02:06:50.000Z
sharingan/__init__.py
aoii103/Sharingan
d31889d1a7c1fdd8b97f8479316b984b426cdd93
[ "MIT" ]
5
2020-07-15T03:25:28.000Z
2020-11-07T16:56:58.000Z
""" Sharingan project We will try to find your visible basic footprint from social media as much as possible """
24.2
90
0.719008
18
121
4.833333
0.944444
0
0
0
0
0
0
0
0
0
0
0
0.231405
121
4
91
30.25
0.935484
0.859504
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
e45f4d77862c624ba9a1e110895a7e0e01845a83
2,949
py
Python
haystack/modeling/visual.py
OmniscienceAcademy/haystack
46fa166c36d4b2fb0f428041fb048e50613553a9
[ "Apache-2.0" ]
1
2022-03-06T02:13:15.000Z
2022-03-06T02:13:15.000Z
haystack/modeling/visual.py
OmniscienceAcademy/haystack
46fa166c36d4b2fb0f428041fb048e50613553a9
[ "Apache-2.0" ]
null
null
null
haystack/modeling/visual.py
OmniscienceAcademy/haystack
46fa166c36d4b2fb0f428041fb048e50613553a9
[ "Apache-2.0" ]
1
2022-02-17T05:08:53.000Z
2022-02-17T05:08:53.000Z
FLOWERS = r""" vVVVv vVVVv (___) vVVVv (___) vVVVv ~Y~ (___) ~Y~ (___) \| \~Y~/ \| \~Y~/ \\|// \\|// \\|// \\|// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ """ SAMPLE = r""" .--. _____ _ .'_\/_'. / ____| | | '. /\ .' | (___ __ _ _ __ ___ _ __ | | ___ "||" \___ \ / _` | '_ ` _ \| '_ \| |/ _ \ || /\ ____) | (_| | | | | | | |_) | | __/ /\ ||//\) |_____/ \__,_|_| |_| |_| .__/|_|\___| (/\\||/ |_| ______\||/___________________________________________ """ FENCE = r""" _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_ -| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |- | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | _| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_ -| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |-| |- |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, """ TRACTOR_SMALL = r""" ______ |o | ! __ |:`_|---'-. |__|______.-/ _ \-----.| (o)(o)------'\ _ / ( ) """ TRACTOR_WITH_SILO_LINE = r""" ____ /____\ ______ | | |o | ! | | __ |:`_|---'-. | | |__|______.-/ _ \-----.| |______| (o)(o)------'\ _ / ( ) | | """ ROOSTER = r""" _ m ,`.\/'> (`\<_/` `<< """ PIG = r""" .-~~~~-. |\\_ @_/ / oo\_ | \ \ _(") \ /-| ||'--' \_\ \_\\ """ SMALL_PIG = r""" @___,__ ( ^'_] //-\\' ^^ ^^ """ FENCE_SEP = r""" |---||---|---|---|---|---|---|---| """ BUSH_SEP = r"""\\|// \\|// \\|// \\|// \\|// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^""" WATERING_CAN = r""" ______ _ ,',----.`. '.`-. .-' '----. || `.`-'--------| ;; `.|--------|// \ / '--------' """ WORKER_M = r""" 0 /|\ /'\ """ WORKER_F = r""" 0 /w\ / \ """ WORKER_X = r""" 0 /w\ /'\ """
28.355769
82
0.154968
60
2,949
2.766667
0.416667
0.048193
0.180723
0.048193
0
0
0
0
0
0
0
0.002055
0.504917
2,949
103
83
28.631068
0.111644
0
0
0.232558
0
0.093023
0.900983
0.089183
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e46b9c081577512d78e6d0b2b562a5e09ee75881
245
py
Python
Pacote de Conteudo/Mundo 02/Exercicios/ex034.py
Michelle-On/ExerciciosPython_2020-21
61615237a310c15923138575ebc65f6a15b301ab
[ "MIT" ]
null
null
null
Pacote de Conteudo/Mundo 02/Exercicios/ex034.py
Michelle-On/ExerciciosPython_2020-21
61615237a310c15923138575ebc65f6a15b301ab
[ "MIT" ]
null
null
null
Pacote de Conteudo/Mundo 02/Exercicios/ex034.py
Michelle-On/ExerciciosPython_2020-21
61615237a310c15923138575ebc65f6a15b301ab
[ "MIT" ]
null
null
null
s = float(input('Digite o salario do funcionario: R$ ')) if s>1250: print('Quem ganhava R${} passa a ganhar R${:.2f} agora.'.format(s,s+(s*10/100))) else: print('Quem ganhava R${} passa a ganhar R${:.2f} agora'.format(s,s+(s*15/100)))
22.272727
84
0.620408
45
245
3.377778
0.533333
0.052632
0.210526
0.223684
0.605263
0.605263
0.605263
0.605263
0.605263
0.605263
0
0.077295
0.155102
245
10
85
24.5
0.657005
0
0
0
0
0
0.545833
0
0
0
0
0
0
1
0
false
0.4
0
0
0
0.4
0
0
0
null
0
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
e4730751584b5202518ac1c27300bdbcbf1dccb5
27
py
Python
data/studio21_generated/introductory/4807/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4807/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4807/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
def search_names(logins):
13.5
25
0.777778
4
27
5
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
27
2
26
13.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
e4ae6be719cb9ee20403b01976e9f73346daafd0
324
py
Python
gifs/home/api/serializers.py
DTullis-Discover/TrendsAPI
714019115dba5701d7fc322c9213857fbaa98066
[ "MIT" ]
null
null
null
gifs/home/api/serializers.py
DTullis-Discover/TrendsAPI
714019115dba5701d7fc322c9213857fbaa98066
[ "MIT" ]
null
null
null
gifs/home/api/serializers.py
DTullis-Discover/TrendsAPI
714019115dba5701d7fc322c9213857fbaa98066
[ "MIT" ]
null
null
null
from rest_framework import serializers from gifs.home.models import Trend, Keyword class TrendSerializer(serializers.ModelSerializer): class Meta: model = Trend fields = "__all__" class KeywordSerializer(serializers.ModelSerializer): class Meta: model = Keyword fields = "__all__"
24.923077
53
0.712963
32
324
6.9375
0.5625
0.234234
0.279279
0.315315
0.36036
0
0
0
0
0
0
0
0.222222
324
12
54
27
0.880952
0
0
0.4
0
0
0.04321
0
0
0
0
0
0
1
0
false
0
0.2
0
0.6
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
e4bb5a16f4e6b85de1aa263d46e226eefecd58d6
29
py
Python
data/studio21_generated/introductory/3868/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/3868/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/3868/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
def closest_sum(ints, num):
14.5
27
0.724138
5
29
4
1
0
0
0
0
0
0
0
0
0
0
0
0.137931
29
2
28
14.5
0.8
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
9005fb93975f7ef9b349d9acc49e415549e52553
192
py
Python
main.py
Tjapkinp/vk-wordcloud
bef4916e02c71158f5cc18196b368a19f4311500
[ "MIT" ]
null
null
null
main.py
Tjapkinp/vk-wordcloud
bef4916e02c71158f5cc18196b368a19f4311500
[ "MIT" ]
null
null
null
main.py
Tjapkinp/vk-wordcloud
bef4916e02c71158f5cc18196b368a19f4311500
[ "MIT" ]
null
null
null
# vk.com data archive export messages to cloud of words from vk_wordcloud import vk_message_wordcloud,vk_music_wordcloud # vk_message_wordcloud(mask="cloud") vk_music_wordcloud(mask="cloud")
32
64
0.833333
30
192
5.033333
0.533333
0.119205
0.238411
0
0
0
0
0
0
0
0
0
0.09375
192
6
65
32
0.867816
0.458333
0
0
0
0
0.04902
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
900abd5b27ee7033c914c8cee5086e5b832cbe6a
69,366
py
Python
src/testing/TestON/tests/FUNCintentRest/Dependency/FuncIntentFunction.py
securedataplane/preacher
2f76581de47036e79cd6e1183948c88b35ce4950
[ "MIT" ]
1
2020-07-23T08:06:44.000Z
2020-07-23T08:06:44.000Z
src/testing/TestON/tests/FUNCintentRest/Dependency/FuncIntentFunction.py
securedataplane/preacher
2f76581de47036e79cd6e1183948c88b35ce4950
[ "MIT" ]
null
null
null
src/testing/TestON/tests/FUNCintentRest/Dependency/FuncIntentFunction.py
securedataplane/preacher
2f76581de47036e79cd6e1183948c88b35ce4950
[ "MIT" ]
null
null
null
""" Wrapper functions for FuncIntent This functions include Onosclidriver and Mininetclidriver driver functions Author: kelvin@onlab.us """ import time import copy import json import types def __init__( self ): self.default = '' def installHostIntent( main, name, host1, host2, onosNode=0, ethType="", bandwidth="", lambdaAlloc=False, ipProto="", ipAddresses="", tcp="", sw1="", sw2=""): """ Installs a Host Intent Description: Install a host intent using add-host-intent Steps: - Fetch host data if not given - Add host intent - Ingress device is the first sender host - Egress devices are the recipient devices - Ports if defined in senders or recipients - MAC address ethSrc loaded from Ingress device - Check intent state with retry Required: name - Type of point intent to add eg. IPV4 | VLAN | Dualstack host1 - Dictionary for host1 { "name":"h8", "id":"of:0000000000000005/8" } host2 - Dictionary for host2 { "name":"h16", "id":"of:0000000000000006/8" } Optional: onosNode - ONOS node to install the intents in main.CLIs[ ] 0 by default so that it will always use the first ONOS node ethType - Ethernet type eg. IPV4, IPV6 bandwidth - Bandwidth capacity lambdaAlloc - Allocate lambda, defaults to False ipProto - IP protocol tcp - TCP ports in the same order as the hosts in hostNames """ assert main, "There is no main variable" assert host1, "You must specify host1" assert host2, "You must specify host2" global itemName # The name of this run. Used for logs. itemName = name onosNode = int( onosNode ) main.log.info( itemName + ": Adding single point to multi point intents" ) if not host1.get( "id" ): main.log.warn( "ID not given for host1 {0}. Loading from main.hostData".format( host1.get( "name" ) ) ) main.log.debug( main.hostsData.get( host1.get( "name" ) ) ) host1[ "id" ] = main.hostsData.get( host1.get( "name" ) ).get( "id" ) if not host2.get( "id" ): main.log.warn( "ID not given for host2 {0}. Loading from main.hostData".format( host2.get( "name" ) ) ) host2[ "id" ] = main.hostsData.get( host2.get( "name" ) ).get( "id" ) # Adding host intents main.log.info( itemName + ": Adding host intents" ) intent1 = main.CLIs[ onosNode ].addHostIntent( hostIdOne=host1.get( "id" ), hostIdTwo=host2.get( "id" ) ) # Get all intents ID in the system, time delay right after intents are added time.sleep( main.addIntentSleep ) intentsId = main.CLIs[ 0 ].getIntentsId() if utilities.retry ( f=checkIntentState, retValue=main.FALSE, args = (main, intentsId ), sleep=main.checkIntentSleep ): return intentsId else: main.log.error( "Host Intent did not install correctly" ) return main.FALSE def testHostIntent( main, name, intentId, host1, host2, onosNode=0, sw1="s5", sw2="s2", expectedLink=0): """ Test a Host Intent Description: Test a host intent of given ID between given hosts Steps: - Fetch host data if not given - Check Intent State - Check Flow State - Check Connectivity - Check Lack of Connectivity Between Hosts not in the Intent - Reroute - Take Expected Link Down - Check Intent State - Check Flow State - Check Topology - Check Connectivity - Bring Expected Link Up - Check Intent State - Check Flow State - Check Topology - Check Connectivity - Remove Topology Required: name - Type of point intent to add eg. IPV4 | VLAN | Dualstack intentId - intent ID to be tested ( and removed ) host1 - Dictionary for host1 { "name":"h8", "id":"of:0000000000000005/8" } host2 - Dictionary for host2 { "name":"h16", "id":"of:0000000000000006/8" } Optional: onosNode - ONOS node to install the intents in main.CLIs[ ] 0 by default so that it will always use the first ONOS node sw1 - First switch to bring down & up for rerouting purpose sw2 - Second switch to bring down & up for rerouting purpose expectedLink - Expected link when the switches are down, it should be two links lower than the links before the two switches are down """ # Parameter Validity Check assert main, "There is no main variable" assert host1, "You must specify host1" assert host2, "You must specify host2" global itemName itemName = name tempHostsData = {} onosNode = int( onosNode ) main.log.info( itemName + ": Testing Host Intent" ) if not host1.get( "id" ): main.log.warn( "Id not given for host1 {0}. Loading from main.hostData".format( host1.get( "name" ) ) ) host1[ "id" ] = main.hostsData.get( host1.get( "name" ) ).get( "location" ) if not host2.get( "id" ): main.log.warn( "Id not given for host2 {0}. Loading from main.hostData".format( host2.get( "name" ) ) ) host2[ "id" ] = main.hostsData.get( host2.get( "name" ) ).get( "location" ) senderNames = [ host1.get( "name" ), host2.get( "name" ) ] recipientNames = [ host1.get( "name" ), host2.get( "name" ) ] testResult = main.TRUE main.log.info( itemName + ": Adding single point to multi point intents" ) # Check intent state if utilities.retry( f=checkIntentState, retValue=main.FALSE, args=( main, intentId ), sleep=main.checkIntentSleep ): main.assertReturnString += 'Initial Intent State Passed\n' else: main.assertReturnString += 'Initial Intent State Failed\n' testResult = main.FALSE # Check flows count in each node if utilities.retry( f=checkFlowsCount, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ) and utilities.retry( f=checkFlowsState, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ): main.assertReturnString += 'Initial Flow State Passed\n' else: main.assertReturnString += 'Intial Flow State Failed\n' testResult = main.FALSE # Check Connectivity if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, senderNames, recipientNames ) ): main.assertReturnString += 'Initial Ping Passed\n' else: main.assertReturnString += 'Initial Ping Failed\n' testResult = main.FALSE # Test rerouting if these variables exist if sw1 and sw2 and expectedLink: # Take link down if utilities.retry( f=link, retValue=main.FALSE, args=( main, sw1, sw2, "down" ) ): main.assertReturnString += 'Link Down Passed\n' else: main.assertReturnString += 'Link Down Failed\n' testResult = main.FALSE # Check intent state if utilities.retry( f=checkIntentState, retValue=main.FALSE, args=( main, intentId ), sleep=main.checkIntentSleep ): main.assertReturnString += 'Link Down Intent State Passed\n' else: main.assertReturnString += 'Link Down Intent State Failed\n' testResult = main.FALSE # Check flows count in each node if utilities.retry( f=checkFlowsCount, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ) and utilities.retry( f=checkFlowsState, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ): main.assertReturnString += 'Link Down Flow State Passed\n' else: main.assertReturnString += 'Link Down Flow State Failed\n' testResult = main.FALSE # Check OnosTopology if utilities.retry( f=checkTopology, retValue=main.FALSE, args=( main, expectedLink ), sleep=10 ): main.assertReturnString += 'Link Down Topology State Passed\n' else: main.assertReturnString += 'Link Down Topology State Failed\n' testResult = main.FALSE # Check Connection if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, senderNames, recipientNames ) ): main.assertReturnString += 'Link Down Pingall Passed\n' else: main.assertReturnString += 'Link Down Pingall Failed\n' testResult = main.FALSE # Bring link up if utilities.retry( f=link, retValue=main.FALSE, args=( main, sw1, sw2, "up" ) ): main.assertReturnString += 'Link Up Passed\n' else: main.assertReturnString += 'Link Up Failed\n' testResult = main.FALSE # Wait for reroute time.sleep( main.rerouteSleep ) # Check Intents if utilities.retry( f=checkIntentState, retValue=main.FALSE, args=( main, intentId ), sleep=main.checkIntentSleep ): main.assertReturnString += 'Link Up Intent State Passed\n' else: main.assertReturnString += 'Link Up Intent State Failed\n' testResult = main.FALSE # Check flows count in each node if utilities.retry( f=checkFlowsCount, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ) and utilities.retry( f=checkFlowsState, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ): main.assertReturnString += 'Link Up Flow State Passed\n' else: main.assertReturnString += 'Link Up Flow State Failed\n' testResult = main.FALSE # Check OnosTopology if utilities.retry( f=checkTopology, retValue=main.FALSE, args=( main, main.numLinks ) ): main.assertReturnString += 'Link Up Topology State Passed\n' else: main.assertReturnString += 'Link Up Topology State Failed\n' testResult = main.FALSE # Check Connection if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, senderNames, recipientNames ) ): main.assertReturnString += 'Link Up Pingall Passed\n' else: main.assertReturnString += 'Link Up Pingall Failed\n' testResult = main.FALSE # Remove all intents if utilities.retry( f=removeAllIntents, retValue=main.FALSE, args=( main, ) ): main.assertReturnString += 'Remove Intents Passed' else: main.assertReturnString += 'Remove Intents Failed' testResult = main.FALSE return testResult def installPointIntent( main, name, senders, recipients, onosNode=0, ethType="", bandwidth="", lambdaAlloc=False, ipProto="", ipSrc="", ipDst="", tcpSrc="", tcpDst=""): """ Installs a Single to Single Point Intent Description: Install a single to single point intent Steps: - Fetch host data if not given - Add point intent - Ingress device is the first sender device - Egress device is the first recipient device - Ports if defined in senders or recipients - MAC address ethSrc loaded from Ingress device - Check intent state with retry Required: name - Type of point intent to add eg. IPV4 | VLAN | Dualstack senders - List of host dictionaries i.e. [ { "name":"h8", "device":"of:0000000000000005/8","mac":"00:00:00:00:00:08" } ] recipients - List of host dictionaries i.e. [ { "name":"h16", "device":"of:0000000000000006/8", "mac":"00:00:00:00:00:10" } ] Optional: onosNode - ONOS node to install the intents in main.CLIs[ ] 0 by default so that it will always use the first ONOS node ethType - Ethernet type eg. IPV4, IPV6 bandwidth - Bandwidth capacity lambdaAlloc - Allocate lambda, defaults to False ipProto - IP protocol tcp - TCP ports in the same order as the hosts in hostNames sw1 - First switch to bring down & up for rerouting purpose sw2 - Second switch to bring down & up for rerouting purpose expectedLink - Expected link when the switches are down, it should be two links lower than the links before the two switches are down """ assert main, "There is no main variable" assert senders, "You must specify a sender" assert recipients, "You must specify a recipient" # Assert devices or main.hostsData, "You must specify devices" global itemName # The name of this run. Used for logs. itemName = name onosNode = int( onosNode ) main.log.info( itemName + ": Adding single to single point intents" ) for sender in senders: if not sender.get( "device" ): main.log.warn( "Device not given for sender {0}. Loading from main.hostData".format( sender.get( "name" ) ) ) sender[ "device" ] = main.hostsData.get( sender.get( "name" ) ).get( "location" ) for recipient in recipients: if not recipient.get( "device" ): main.log.warn( "Device not given for recipient {0}. Loading from main.hostData".format( recipient.get( "name" ) ) ) recipient[ "device" ] = main.hostsData.get( recipient.get( "name" ) ).get( "location" ) ingressDevice = senders[ 0 ].get( "device" ) egressDevice = recipients[ 0 ].get( "device" ) portIngress = senders[ 0 ].get( "port", "" ) portEgress = recipients[ 0 ].get( "port", "" ) main.log.debug( ingressDevice ) main.log.debug( egressDevice ) srcMac = senders[ 0 ].get( "mac" ) dstMac = recipients[ 0 ].get( "mac" ) ipSrc = senders[ 0 ].get( "ip" ) ipDst = recipients[ 0 ].get( "ip" ) intent1 = main.CLIs[ onosNode ].addPointIntent( ingressDevice=ingressDevice, egressDevice=egressDevice, ingressPort=portIngress, egressPort=portEgress, ethType=ethType, ethSrc=srcMac, ethDst=dstMac, bandwidth=bandwidth, lambdaAlloc=lambdaAlloc, ipProto=ipProto, ipSrc=ipSrc, ipDst=ipDst, tcpSrc=tcpSrc, tcpDst=tcpDst ) time.sleep( main.addIntentSleep ) intentsId = main.CLIs[ 0 ].getIntentsId() if utilities.retry ( f=checkIntentState, retValue=main.FALSE, args = (main, intentsId ), sleep=main.checkIntentSleep ): return intentsId else: main.log.error( "Single to Single point intent did not install correctly" ) return main.FALSE # Check intents state if utilities.retry( f=checkIntentState, retValue=main.FALSE, args=( main, intentsId ), sleep=main.checkIntentSleep ): return intentsId else: main.log.error( "Point Intent did not install correctly" ) return main.FALSE def testPointIntent( main, name, intentId, senders, recipients, badSenders={}, badRecipients={}, onosNode=0, ethType="", bandwidth="", lambdaAlloc=False, ipProto="", ipAddresses="", tcp="", sw1="s5", sw2="s2", expectedLink=0): """ Test a Point Intent Description: Test a point intent Steps: - Fetch host data if not given - Check Intent State - Check Flow State - Check Connectivity - Check Lack of Connectivity Between Hosts not in the Intent - Reroute - Take Expected Link Down - Check Intent State - Check Flow State - Check Topology - Check Connectivity - Bring Expected Link Up - Check Intent State - Check Flow State - Check Topology - Check Connectivity - Remove Topology Required: name - Type of point intent to add eg. IPV4 | VLAN | Dualstack senders - List of host dictionaries i.e. { "name":"h8", "device":"of:0000000000000005/8","mac":"00:00:00:00:00:08" } recipients - List of host dictionaries i.e. { "name":"h16", "device":"of:0000000000000006/8", "mac":"00:00:00:00:00:10" } Optional: onosNode - ONOS node to install the intents in main.CLIs[ ] 0 by default so that it will always use the first ONOS node ethType - Ethernet type eg. IPV4, IPV6 bandwidth - Bandwidth capacity lambdaAlloc - Allocate lambda, defaults to False ipProto - IP protocol tcp - TCP ports in the same order as the hosts in hostNames sw1 - First switch to bring down & up for rerouting purpose sw2 - Second switch to bring down & up for rerouting purpose expectedLink - Expected link when the switches are down, it should be two links lower than the links before the two switches are down """ # Parameter Validity Check assert main, "There is no main variable" assert senders, "You must specify a sender" assert recipients, "You must specify a recipient" global itemName itemName = name tempHostsData = {} onosNode = int( onosNode ) main.log.info( itemName + ": Testing Point Intent" ) # Names for scapy senderNames = [ x.get( "name" ) for x in senders ] recipientNames = [ x.get( "name" ) for x in recipients ] badSenderNames = [ x.get( "name" ) for x in badSenders ] badRecipientNames = [ x.get( "name" ) for x in badRecipients ] for sender in senders: if not sender.get( "device" ): main.log.warn( "Device not given for sender {0}. Loading from main.hostData".format( sender.get( "name" ) ) ) sender[ "device" ] = main.hostsData.get( sender.get( "name" ) ).get( "location" ) for recipient in recipients: if not recipient.get( "device" ): main.log.warn( "Device not given for recipient {0}. Loading from main.hostData".format( recipient.get( "name" ) ) ) recipient[ "device" ] = main.hostsData.get( recipient.get( "name" ) ).get( "location" ) testResult = main.TRUE main.log.info( itemName + ": Testing point intents" ) # Check intent state if utilities.retry( f=checkIntentState, retValue=main.FALSE, args=( main, intentId ), sleep=main.checkIntentSleep ): main.assertReturnString += 'Initial Intent State Passed\n' else: main.assertReturnString += 'Initial Intent State Failed\n' testResult = main.FALSE # Check flows count in each node if utilities.retry( f=checkFlowsCount, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ) and utilities.retry( f=checkFlowsState, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ): main.assertReturnString += 'Initial Flow State Passed\n' else: main.assertReturnString += 'Intial Flow State Failed\n' testResult = main.FALSE # Check Connectivity if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, senderNames, recipientNames ) ): main.assertReturnString += 'Initial Ping Passed\n' else: main.assertReturnString += 'Initial Ping Failed\n' testResult = main.FALSE # Check connections that shouldn't work if badSenderNames: main.log.info( "Checking that packets from incorrect sender do not go through" ) if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, badSenderNames, recipientNames ), kwargs={ "expectFailure":True } ): main.assertReturnString += 'Bad Sender Ping Passed\n' else: main.assertReturnString += 'Bad Sender Ping Failed\n' testResult = main.FALSE if badRecipientNames: main.log.info( "Checking that packets to incorrect recipients do not go through" ) if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, senderNames, badRecipientNames ), kwargs={ "expectFailure":True } ): main.assertReturnString += 'Bad Recipient Ping Passed\n' else: main.assertReturnString += 'Bad Recipient Ping Failed\n' testResult = main.FALSE # Test rerouting if these variables exist if sw1 and sw2 and expectedLink: # Take link down if utilities.retry( f=link, retValue=main.FALSE, args=( main, sw1, sw2, "down" ) ): main.assertReturnString += 'Link Down Passed\n' else: main.assertReturnString += 'Link Down Failed\n' testResult = main.FALSE # Check intent state if utilities.retry( f=checkIntentState, retValue=main.FALSE, args=( main, intentId ), sleep=main.checkIntentSleep ): main.assertReturnString += 'Link Down Intent State Passed\n' else: main.assertReturnString += 'Link Down Intent State Failed\n' testResult = main.FALSE # Check flows count in each node if utilities.retry( f=checkFlowsCount, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ) and utilities.retry( f=checkFlowsState, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ): main.assertReturnString += 'Link Down Flow State Passed\n' else: main.assertReturnString += 'Link Down Flow State Failed\n' testResult = main.FALSE # Check OnosTopology if utilities.retry( f=checkTopology, retValue=main.FALSE, args=( main, expectedLink ), sleep=10 ): main.assertReturnString += 'Link Down Topology State Passed\n' else: main.assertReturnString += 'Link Down Topology State Failed\n' testResult = main.FALSE # Check Connection if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, senderNames, recipientNames ) ): main.assertReturnString += 'Link Down Pingall Passed\n' else: main.assertReturnString += 'Link Down Pingall Failed\n' testResult = main.FALSE # Bring link up if utilities.retry( f=link, retValue=main.FALSE, args=( main, sw1, sw2, "up" ) ): main.assertReturnString += 'Link Up Passed\n' else: main.assertReturnString += 'Link Up Failed\n' testResult = main.FALSE # Wait for reroute time.sleep( main.rerouteSleep ) # Check Intents if utilities.retry( f=checkIntentState, retValue=main.FALSE, args=( main, intentId ), sleep=main.checkIntentSleep ): main.assertReturnString += 'Link Up Intent State Passed\n' else: main.assertReturnString += 'Link Up Intent State Failed\n' testResult = main.FALSE # Check flows count in each node if utilities.retry( f=checkFlowsCount, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ) and utilities.retry( f=checkFlowsState, retValue=main.FALSE, args=[ main ], sleep=20, attempts=3 ): main.assertReturnString += 'Link Up Flow State Passed\n' else: main.assertReturnString += 'Link Up Flow State Failed\n' testResult = main.FALSE # Check OnosTopology if utilities.retry( f=checkTopology, retValue=main.FALSE, args=( main, main.numLinks ) ): main.assertReturnString += 'Link Up Topology State Passed\n' else: main.assertReturnString += 'Link Up Topology State Failed\n' testResult = main.FALSE # Check Connection if utilities.retry( f=scapyCheckConnection, retValue=main.FALSE, args=( main, senderNames, recipientNames ) ): main.assertReturnString += 'Link Up Scapy Packet Received Passed\n' else: main.assertReturnString += 'Link Up Scapy Packet Recieved Failed\n' testResult = main.FALSE # Remove all intents if utilities.retry( f=removeAllIntents, retValue=main.FALSE, args=( main, ) ): main.assertReturnString += 'Remove Intents Passed' else: main.assertReturnString += 'Remove Intents Failed' testResult = main.FALSE return testResult def pointIntentTcp( main, name, host1, host2, onosNode=0, deviceId1="", deviceId2="", port1="", port2="", ethType="", mac1="", mac2="", bandwidth="", lambdaAlloc=False, ipProto="", ip1="", ip2="", tcp1="", tcp2="", sw1="", sw2="", expectedLink=0 ): """ Description: Verify add-point-intent only for TCP Steps: - Get device ids | ports - Add point intents - Check intents - Verify flows - Ping hosts - Reroute - Link down - Verify flows - Check topology - Ping hosts - Link up - Verify flows - Check topology - Ping hosts - Remove intents Required: name - Type of point intent to add eg. IPV4 | VLAN | Dualstack host1 - Name of first host host2 - Name of second host Optional: onosNode - ONOS node to install the intents in main.CLIs[ ] 0 by default so that it will always use the first ONOS node deviceId1 - ONOS device id of the first switch, the same as the location of the first host eg. of:0000000000000001/1, located at device 1 port 1 deviceId2 - ONOS device id of the second switch port1 - The port number where the first host is attached port2 - The port number where the second host is attached ethType - Ethernet type eg. IPV4, IPV6 mac1 - Mac address of first host mac2 - Mac address of the second host bandwidth - Bandwidth capacity lambdaAlloc - Allocate lambda, defaults to False ipProto - IP protocol ip1 - IP address of first host ip2 - IP address of second host tcp1 - TCP port of first host tcp2 - TCP port of second host sw1 - First switch to bring down & up for rerouting purpose sw2 - Second switch to bring down & up for rerouting purpose expectedLink - Expected link when the switches are down, it should be two links lower than the links before the two switches are down """ assert main, "There is no main variable" assert name, "variable name is empty" assert host1 and host2, "You must specify hosts" global itemName itemName = name host1 = host1 host2 = host2 hostNames = [ host1, host2 ] intentsId = [] iperfResult = main.TRUE intentResult = main.TRUE removeIntentResult = main.TRUE flowResult = main.TRUE topoResult = main.TRUE linkDownResult = main.TRUE linkUpResult = main.TRUE onosNode = int( onosNode ) # Adding bidirectional point intents main.log.info( itemName + ": Adding point intents" ) intent1 = main.CLIs[ onosNode ].addPointIntent( ingressDevice=deviceId1, egressDevice=deviceId2, ingressPort=port1, egressPort=port2, ethType=ethType, ethSrc=mac1, ethDst=mac2, bandwidth=bandwidth, lambdaAlloc=lambdaAlloc, ipProto=ipProto, ipSrc=ip1, ipDst=ip2, tcpSrc=tcp1, tcpDst="" ) intent2 = main.CLIs[ onosNode ].addPointIntent( ingressDevice=deviceId2, egressDevice=deviceId1, ingressPort=port2, egressPort=port1, ethType=ethType, ethSrc=mac2, ethDst=mac1, bandwidth=bandwidth, lambdaAlloc=lambdaAlloc, ipProto=ipProto, ipSrc=ip2, ipDst=ip1, tcpSrc=tcp2, tcpDst="" ) intent3 = main.CLIs[ onosNode ].addPointIntent( ingressDevice=deviceId1, egressDevice=deviceId2, ingressPort=port1, egressPort=port2, ethType=ethType, ethSrc=mac1, ethDst=mac2, bandwidth=bandwidth, lambdaAlloc=lambdaAlloc, ipProto=ipProto, ipSrc=ip1, ipDst=ip2, tcpSrc="", tcpDst=tcp2 ) intent4 = main.CLIs[ onosNode ].addPointIntent( ingressDevice=deviceId2, egressDevice=deviceId1, ingressPort=port2, egressPort=port1, ethType=ethType, ethSrc=mac2, ethDst=mac1, bandwidth=bandwidth, lambdaAlloc=lambdaAlloc, ipProto=ipProto, ipSrc=ip2, ipDst=ip1, tcpSrc="", tcpDst=tcp1 ) # Get all intents ID in the system, time delay right after intents are added time.sleep( main.addIntentSleep ) intentsId = main.CLIs[ 0 ].getIntentsId() # Check intents state time.sleep( main.checkIntentSleep ) intentResult = checkIntentState( main, intentsId ) # Check flows count in each node checkFlowsCount( main ) # Check intents state again if first check fails... if not intentResult: intentResult = checkIntentState( main, intentsId ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Run iperf to both host iperfResult = iperfResult and main.Mininet1.iperftcp( host1, host2, 10 ) # Test rerouting if these variables exist if sw1 and sw2 and expectedLink: # link down linkDownResult = link( main, sw1, sw2, "down" ) intentResult = intentResult and checkIntentState( main, intentsId ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Check OnosTopology topoResult = checkTopology( main, expectedLink ) # Run iperf to both host iperfResult = iperfResult and main.Mininet1.iperftcp( host1, host2, 10 ) intentResult = checkIntentState( main, intentsId ) # Checks ONOS state in link down if linkDownResult and topoResult and iperfResult and intentResult: main.log.info( itemName + ": Successfully brought link down" ) else: main.log.error( itemName + ": Failed to bring link down" ) # link up linkUpResult = link( main, sw1, sw2, "up" ) time.sleep( main.rerouteSleep ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Check OnosTopology topoResult = checkTopology( main, main.numLinks ) # Run iperf to both host iperfResult = iperfResult and main.Mininet1.iperftcp( host1, host2, 10 ) intentResult = checkIntentState( main, intentsId ) # Checks ONOS state in link up if linkUpResult and topoResult and iperfResult and intentResult: main.log.info( itemName + ": Successfully brought link back up" ) else: main.log.error( itemName + ": Failed to bring link back up" ) # Remove all intents removeIntentResult = removeAllIntents( main ) stepResult = iperfResult and linkDownResult and linkUpResult \ and intentResult and removeIntentResult return stepResult def singleToMultiIntent( main, name, hostNames, onosNode=0, devices="", ports=None, ethType="", macs=None, bandwidth="", lambdaAlloc=False, ipProto="", ipAddresses="", tcp="", sw1="", sw2="", expectedLink=0 ): """ Verify Single to Multi Point intents NOTE:If main.hostsData is not defined, variables data should be passed in the same order index wise. All devices in the list should have the same format, either all the devices have its port or it doesn't. eg. hostName = [ 'h1', 'h2' ,.. ] devices = [ 'of:0000000000000001', 'of:0000000000000002', ...] ports = [ '1', '1', ..] ... Description: Verify add-single-to-multi-intent iterates through the list of given host | devices and add intents Steps: - Get device ids | ports - Add single to multi point intents - Check intents - Verify flows - Ping hosts - Reroute - Link down - Verify flows - Check topology - Ping hosts - Link up - Verify flows - Check topology - Ping hosts - Remove intents Required: name - Type of point intent to add eg. IPV4 | VLAN | Dualstack hostNames - List of host names Optional: onosNode - ONOS node to install the intents in main.CLIs[ ] 0 by default so that it will always use the first ONOS node devices - List of device ids in the same order as the hosts in hostNames ports - List of port numbers in the same order as the device in devices ethType - Ethernet type eg. IPV4, IPV6 macs - List of hosts mac address in the same order as the hosts in hostNames bandwidth - Bandwidth capacity lambdaAlloc - Allocate lambda, defaults to False ipProto - IP protocol ipAddresses - IP addresses of host in the same order as the hosts in hostNames tcp - TCP ports in the same order as the hosts in hostNames sw1 - First switch to bring down & up for rerouting purpose sw2 - Second switch to bring down & up for rerouting purpose expectedLink - Expected link when the switches are down, it should be two links lower than the links before the two switches are down """ assert main, "There is no main variable" assert hostNames, "You must specify hosts" assert devices or main.hostsData, "You must specify devices" global itemName itemName = name tempHostsData = {} intentsId = [] onosNode = int( onosNode ) macsDict = {} ipDict = {} if hostNames and devices: if len( hostNames ) != len( devices ): main.log.debug( "hosts and devices does not have the same length" ) #print "len hostNames = ", len( hostNames ) #print "len devices = ", len( devices ) return main.FALSE if ports: if len( ports ) != len( devices ): main.log.error( "Ports and devices does " + "not have the same length" ) #print "len devices = ", len( devices ) #print "len ports = ", len( ports ) return main.FALSE else: main.log.info( "Device Ports are not specified" ) if macs: for i in range( len( devices ) ): macsDict[ devices[ i ] ] = macs[ i ] elif hostNames and not devices and main.hostsData: devices = [] main.log.info( "singleToMultiIntent function is using main.hostsData" ) for host in hostNames: devices.append( main.hostsData.get( host ).get( 'location' ) ) macsDict[ main.hostsData.get( host ).get( 'location' ) ] = \ main.hostsData.get( host ).get( 'mac' ) ipDict[ main.hostsData.get( host ).get( 'location' ) ] = \ main.hostsData.get( host ).get( 'ipAddresses' ) #print main.hostsData #print 'host names = ', hostNames #print 'devices = ', devices #print "macsDict = ", macsDict pingResult = main.TRUE intentResult = main.TRUE removeIntentResult = main.TRUE flowResult = main.TRUE topoResult = main.TRUE linkDownResult = main.TRUE linkUpResult = main.TRUE devicesCopy = copy.copy( devices ) if ports: portsCopy = copy.copy( ports ) main.log.info( itemName + ": Adding single point to multi point intents" ) # Check flows count in each node checkFlowsCount( main ) # Adding bidirectional point intents for i in range( len( devices ) ): ingressDevice = devicesCopy[ i ] egressDeviceList = copy.copy( devicesCopy ) egressDeviceList.remove( ingressDevice ) if ports: portIngress = portsCopy[ i ] portEgressList = copy.copy( portsCopy ) del portEgressList[ i ] else: portIngress = "" portEgressList = None if not macsDict: srcMac = "" else: srcMac = macsDict[ ingressDevice ] if srcMac == None: main.log.debug( "There is no MAC in device - " + ingressDevice ) srcMac = "" intentsId.append( main.CLIs[ onosNode ].addSinglepointToMultipointIntent( ingressDevice=ingressDevice, egressDeviceList=egressDeviceList, portIngress=portIngress, portEgressList=portEgressList, ethType=ethType, ethSrc=srcMac, bandwidth=bandwidth, lambdaAlloc=lambdaAlloc, ipProto=ipProto, ipSrc="", ipDst="", tcpSrc="", tcpDst="" ) ) # Wait some time for the flow to go through when using multi instance pingResult = pingallHosts( main, hostNames ) # Check intents state time.sleep( main.checkIntentSleep ) intentResult = checkIntentState( main, intentsId ) # Check intents state again if first check fails... if not intentResult: intentResult = checkIntentState( main, intentsId ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) pingResult = pingResult and pingallHosts( main, hostNames ) # Test rerouting if these variables exist if sw1 and sw2 and expectedLink: # link down linkDownResult = link( main, sw1, sw2, "down" ) intentResult = intentResult and checkIntentState( main, intentsId ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Check OnosTopology topoResult = checkTopology( main, expectedLink ) # Ping hosts pingResult = pingResult and pingallHosts( main, hostNames ) intentResult = checkIntentState( main, intentsId ) # Checks ONOS state in link down if linkDownResult and topoResult and pingResult and intentResult: main.log.info( itemName + ": Successfully brought link down" ) else: main.log.error( itemName + ": Failed to bring link down" ) # link up linkUpResult = link( main, sw1, sw2, "up" ) time.sleep( main.rerouteSleep ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Check OnosTopology topoResult = checkTopology( main, main.numLinks ) # Ping hosts pingResult = pingResult and pingallHosts( main, hostNames ) intentResult = checkIntentState( main, intentsId ) # Checks ONOS state in link up if linkUpResult and topoResult and pingResult and intentResult: main.log.info( itemName + ": Successfully brought link back up" ) else: main.log.error( itemName + ": Failed to bring link back up" ) # Remove all intents removeIntentResult = removeAllIntents( main, intentsId ) stepResult = pingResult and linkDownResult and linkUpResult \ and intentResult and removeIntentResult return stepResult def multiToSingleIntent( main, name, hostNames, onosNode=0, devices="", ports=None, ethType="", macs=None, bandwidth="", lambdaAlloc=False, ipProto="", ipAddresses="", tcp="", sw1="", sw2="", expectedLink=0 ): """ Verify Single to Multi Point intents NOTE:If main.hostsData is not defined, variables data should be passed in the same order index wise. All devices in the list should have the same format, either all the devices have its port or it doesn't. eg. hostName = [ 'h1', 'h2' ,.. ] devices = [ 'of:0000000000000001', 'of:0000000000000002', ...] ports = [ '1', '1', ..] ... Description: Verify add-multi-to-single-intent Steps: - Get device ids | ports - Add multi to single point intents - Check intents - Verify flows - Ping hosts - Reroute - Link down - Verify flows - Check topology - Ping hosts - Link up - Verify flows - Check topology - Ping hosts - Remove intents Required: name - Type of point intent to add eg. IPV4 | VLAN | Dualstack hostNames - List of host names Optional: onosNode - ONOS node to install the intents in main.CLIs[ ] 0 by default so that it will always use the first ONOS node devices - List of device ids in the same order as the hosts in hostNames ports - List of port numbers in the same order as the device in devices ethType - Ethernet type eg. IPV4, IPV6 macs - List of hosts mac address in the same order as the hosts in hostNames bandwidth - Bandwidth capacity lambdaAlloc - Allocate lambda, defaults to False ipProto - IP protocol ipAddresses - IP addresses of host in the same order as the hosts in hostNames tcp - TCP ports in the same order as the hosts in hostNames sw1 - First switch to bring down & up for rerouting purpose sw2 - Second switch to bring down & up for rerouting purpose expectedLink - Expected link when the switches are down, it should be two links lower than the links before the two switches are down """ assert main, "There is no main variable" assert hostNames, "You must specify hosts" assert devices or main.hostsData, "You must specify devices" global itemName itemName = name tempHostsData = {} intentsId = [] onosNode = int( onosNode ) macsDict = {} ipDict = {} if hostNames and devices: if len( hostNames ) != len( devices ): main.log.debug( "hosts and devices does not have the same length" ) #print "len hostNames = ", len( hostNames ) #print "len devices = ", len( devices ) return main.FALSE if ports: if len( ports ) != len( devices ): main.log.error( "Ports and devices does " + "not have the same length" ) #print "len devices = ", len( devices ) #print "len ports = ", len( ports ) return main.FALSE else: main.log.info( "Device Ports are not specified" ) if macs: for i in range( len( devices ) ): macsDict[ devices[ i ] ] = macs[ i ] elif hostNames and not devices and main.hostsData: devices = [] main.log.info( "multiToSingleIntent function is using main.hostsData" ) for host in hostNames: devices.append( main.hostsData.get( host ).get( 'location' ) ) macsDict[ main.hostsData.get( host ).get( 'location' ) ] = \ main.hostsData.get( host ).get( 'mac' ) ipDict[ main.hostsData.get( host ).get( 'location' ) ] = \ main.hostsData.get( host ).get( 'ipAddresses' ) #print main.hostsData #print 'host names = ', hostNames #print 'devices = ', devices #print "macsDict = ", macsDict pingResult = main.TRUE intentResult = main.TRUE removeIntentResult = main.TRUE flowResult = main.TRUE topoResult = main.TRUE linkDownResult = main.TRUE linkUpResult = main.TRUE devicesCopy = copy.copy( devices ) if ports: portsCopy = copy.copy( ports ) main.log.info( itemName + ": Adding multi point to single point intents" ) # Check flows count in each node checkFlowsCount( main ) # Adding bidirectional point intents for i in range( len( devices ) ): egressDevice = devicesCopy[ i ] ingressDeviceList = copy.copy( devicesCopy ) ingressDeviceList.remove( egressDevice ) if ports: portEgress = portsCopy[ i ] portIngressList = copy.copy( portsCopy ) del portIngressList[ i ] else: portEgress = "" portIngressList = None if not macsDict: dstMac = "" else: dstMac = macsDict[ egressDevice ] if dstMac == None: main.log.debug( "There is no MAC in device - " + egressDevice ) dstMac = "" intentsId.append( main.CLIs[ onosNode ].addMultipointToSinglepointIntent( ingressDeviceList=ingressDeviceList, egressDevice=egressDevice, portIngressList=portIngressList, portEgress=portEgress, ethType=ethType, ethDst=dstMac, bandwidth=bandwidth, lambdaAlloc=lambdaAlloc, ipProto=ipProto, ipSrc="", ipDst="", tcpSrc="", tcpDst="" ) ) pingResult = pingallHosts( main, hostNames ) # Check intents state time.sleep( main.checkIntentSleep ) intentResult = checkIntentState( main, intentsId ) # Check intents state again if first check fails... if not intentResult: intentResult = checkIntentState( main, intentsId ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Ping hosts pingResult = pingResult and pingallHosts( main, hostNames ) # Ping hosts again... pingResult = pingResult and pingallHosts( main, hostNames ) # Test rerouting if these variables exist if sw1 and sw2 and expectedLink: # link down linkDownResult = link( main, sw1, sw2, "down" ) intentResult = intentResult and checkIntentState( main, intentsId ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Check OnosTopology topoResult = checkTopology( main, expectedLink ) # Ping hosts pingResult = pingResult and pingallHosts( main, hostNames ) intentResult = checkIntentState( main, intentsId ) # Checks ONOS state in link down if linkDownResult and topoResult and pingResult and intentResult: main.log.info( itemName + ": Successfully brought link down" ) else: main.log.error( itemName + ": Failed to bring link down" ) # link up linkUpResult = link( main, sw1, sw2, "up" ) time.sleep( main.rerouteSleep ) # Check flows count in each node checkFlowsCount( main ) # Verify flows checkFlowsState( main ) # Check OnosTopology topoResult = checkTopology( main, main.numLinks ) # Ping hosts pingResult = pingResult and pingallHosts( main, hostNames ) intentResult = checkIntentState( main, intentsId ) # Checks ONOS state in link up if linkUpResult and topoResult and pingResult and intentResult: main.log.info( itemName + ": Successfully brought link back up" ) else: main.log.error( itemName + ": Failed to bring link back up" ) # Remove all intents removeIntentResult = removeAllIntents( main, intentsId ) stepResult = pingResult and linkDownResult and linkUpResult \ and intentResult and removeIntentResult return stepResult def pingallHosts( main, hostList ): # Ping all host in the hosts list variable print "Pinging : ", hostList pingResult = main.TRUE pingResult = main.Mininet1.pingallHosts( hostList ) return pingResult def getHostsData( main, hostList ): """ Use fwd app and pingall to discover all the hosts """ activateResult = main.TRUE appCheck = main.TRUE getDataResult = main.TRUE main.log.info( "Activating reactive forwarding app " ) activateResult = main.CLIs[ 0 ].activateApp( "org.onosproject.fwd" ) if not activateResult: main.log.error( "Something went wrong installing fwd app" ) time.sleep( main.fwdSleep ) if isinstance( hostList[ 0 ], types.StringType ): main.Mininet1.pingallHosts( hostList ) elif isinstance( hostList[ 0 ], types.ListType ): for i in xrange( len( hostList ) ): main.Mininet1.pingallHosts( hostList[ i ] ) hostsJson = json.loads( main.CLIs[ 0 ].hosts() ) hosts = main.Mininet1.getHosts().keys() # TODO: Make better use of new getHosts function for host in hosts: main.hostsData[ host ] = {} main.hostsData[ host ][ 'mac' ] = \ main.Mininet1.getMacAddress( host ).upper() for hostj in hostsJson: if main.hostsData[ host ][ 'mac' ] == hostj[ 'mac' ]: main.hostsData[ host ][ 'id' ] = hostj[ 'id' ] main.hostsData[ host ][ 'vlan' ] = hostj[ 'vlan' ] main.hostsData[ host ][ 'location' ] = \ hostj[ 'location' ][ 'elementId' ] + '/' + \ hostj[ 'location' ][ 'port' ] main.hostsData[ host ][ 'ipAddresses' ] = hostj[ 'ipAddresses' ] main.log.info( "Deactivating reactive forwarding app " ) deactivateResult = main.CLIs[ 0 ].deactivateApp( "org.onosproject.fwd" ) if activateResult and deactivateResult and main.hostsData: main.log.info( "Successfully used fwd app to discover hosts " ) getDataResult = main.TRUE else: main.log.info( "Failed to use fwd app to discover hosts " ) getDataResult = main.FALSE print main.hostsData return getDataResult def checkTopology( main, expectedLink ): statusResult = main.TRUE # Check onos topology main.log.info( itemName + ": Checking ONOS topology " ) for i in range( main.numCtrls ): topologyResult = main.CLIs[ i ].topology() statusResult = main.ONOSbench.checkStatus( topologyResult, main.numSwitch, expectedLink )\ and statusResult if not statusResult: main.log.error( itemName + ": Topology mismatch" ) else: main.log.info( itemName + ": Topology match" ) return statusResult def checkIntentState( main, intentsId ): """ This function will check intent state to make sure all the intents are in INSTALLED state """ intentResult = main.TRUE results = [] main.log.info( itemName + ": Checking intents state" ) # First check of intents for i in range( main.numCtrls ): tempResult = main.CLIs[ i ].checkIntentState( intentsId=intentsId ) results.append( tempResult ) expectedState = [ 'INSTALLED', 'INSTALLING' ] if all( result == main.TRUE for result in results ): main.log.info( itemName + ": Intents are installed correctly" ) else: # Wait for at least 5 second before checking the intents again main.log.error( "Intents are not installed correctly. Waiting 5 sec" ) time.sleep( 5 ) results = [] # Second check of intents since some of the intents may be in # INSTALLING state, they should be in INSTALLED at this time for i in range( main.numCtrls ): tempResult = main.CLIs[ i ].checkIntentState( intentsId=intentsId ) results.append( tempResult ) if all( result == main.TRUE for result in results ): main.log.info( itemName + ": Intents are installed correctly" ) intentResult = main.TRUE else: main.log.error( itemName + ": Intents are NOT installed correctly" ) intentResult = main.FALSE return intentResult def checkFlowsState( main ): main.log.info( itemName + ": Check flows state" ) checkFlowsResult = main.CLIs[ 0 ].checkFlowsState() return checkFlowsResult def link( main, sw1, sw2, option): # link down main.log.info( itemName + ": Bring link " + option + "between " + sw1 + " and " + sw2 ) linkResult = main.Mininet1.link( end1=sw1, end2=sw2, option=option ) return linkResult def removeAllIntents( main ): """ Remove all intents in the intentsId """ onosSummary = [] removeIntentResult = main.TRUE # Remove intents removeIntentResult = main.CLIs[ 0 ].removeAllIntents( ) if removeIntentResult: main.log.info( itemName + ": There are no more intents remaining, " + "successfully removed all the intents." ) return removeIntentResult def checkFlowsCount( main ): """ Check flows count in each node """ flowsCount = [] main.log.info( itemName + ": Checking flows count in each ONOS node" ) for i in range( main.numCtrls ): flowsCount.append( len( json.loads( main.CLIs[ i ].flows() ) ) ) if flowsCount: if all( flows==flowsCount[ 0 ] for flows in flowsCount ): main.log.info( itemName + ": There are " + str( flowsCount[ 0 ] ) + " flows in all ONOS node" ) else: for i in range( main.numCtrls ): main.log.debug( itemName + ": ONOS node " + str( i + 1 ) + " has " + str( flowsCount[ i ] ) + " flows" ) else: main.log.error( "Checking flows count failed, check summary command" ) return main.FALSE return main.TRUE def sendDiscoveryArp( main, hosts=None ): """ Sends Discovery ARP packets from each host provided Defaults to each host in main.scapyHosts """ # Send an arp ping from each host if not hosts: hosts = main.scapyHosts for host in hosts: pkt = 'Ether( src="{0}")/ARP( psrc="{1}")'.format( host.hostMac ,host.hostIp ) # Send from the VLAN interface if there is one so ONOS discovers the VLAN correctly iface = None for interface in host.getIfList(): if '.' in interface: main.log.debug( "Detected VLAN interface {0}. Sending ARP packet from {0}".format( interface ) ) iface = interface break host.sendPacket( packet=pkt, iface=iface ) main.log.info( "Sending ARP packet from {0}".format( host.name ) ) def confirmHostDiscovery( main ): """ Confirms that all ONOS nodes have discovered all scapy hosts """ import collections scapyHostCount = len( main.scapyHosts ) hosts = main.topo.getAllHosts( main ) # Get host data from each ONOS node hostFails = [] # Reset for each failed attempt # Check for matching hosts on each node scapyHostIPs = [ x.hostIp for x in main.scapyHosts if x.hostIp != "0.0.0.0" ] for controller in range( main.numCtrls ): controllerStr = str( controller + 1 ) # ONOS node number # Compare Hosts # Load hosts data for controller node if hosts[ controller ] and "Error" not in hosts[ controller ]: try: hostData = json.loads( hosts[ controller ] ) except ( TypeError, ValueError ): main.log.error( "Could not load json:" + str( hosts[ controller ] ) ) hostFails.append( controllerStr ) else: onosHostIPs = [ x.get( "ipAddresses" )[ 0 ] for x in hostData if len( x.get( "ipAddresses" ) ) > 0 ] if not set( collections.Counter( scapyHostIPs ) ).issubset( set ( collections.Counter( onosHostIPs ) ) ): main.log.warn( "Controller {0} only sees nodes with {1} IPs. It should see all of the following: {2}".format( controllerStr, onosHostIPs, scapyHostIPs ) ) hostFails.append( controllerStr ) else: main.log.error( "Hosts returned nothing or an error." ) hostFails.append( controllerStr ) if hostFails: main.log.error( "List of failed ONOS Nodes:" + ', '.join(map(str, hostFails )) ) return main.FALSE else: return main.TRUE def populateHostData( main ): """ Populates hostsData """ import json try: hostsJson = json.loads( main.CLIs[ 0 ].hosts() ) hosts = main.Mininet1.getHosts().keys() # TODO: Make better use of new getHosts function for host in hosts: main.hostsData[ host ] = {} main.hostsData[ host ][ 'mac' ] = \ main.Mininet1.getMacAddress( host ).upper() for hostj in hostsJson: if main.hostsData[ host ][ 'mac' ] == hostj[ 'mac' ]: main.hostsData[ host ][ 'id' ] = hostj[ 'id' ] main.hostsData[ host ][ 'vlan' ] = hostj[ 'vlan' ] main.hostsData[ host ][ 'location' ] = \ hostj[ 'location' ][ 'elementId' ] + '/' + \ hostj[ 'location' ][ 'port' ] main.hostsData[ host ][ 'ipAddresses' ] = hostj[ 'ipAddresses' ] return main.TRUE except KeyError: main.log.error( "KeyError while populating hostsData") return main.FALSE def scapyCheckConnection( main, senders, recipients, packet=None, packetFilter=None, expectFailure=False ): """ Checks the connectivity between all given sender hosts and all given recipient hosts Packet may be specified. Defaults to Ether/IP packet Packet Filter may be specified. Defaults to Ether/IP from current sender MAC Todo: Optional packet and packet filter attributes for sender and recipients Expect Failure when the sender and recipient are not supposed to have connectivity Timeout of 1 second, returns main.TRUE if the filter is not triggered and kills the filter """ connectionsFunctional = main.TRUE if not packetFilter: packetFilter = 'ether host {}' if expectFailure: timeout = 1 else: timeout = 10 for sender in senders: try: senderComp = getattr( main, sender ) except AttributeError: main.log.error( "main has no attribute {}".format( sender ) ) connectionsFunctional = main.FALSE continue for recipient in recipients: # Do not send packets to self since recipient CLI will already be busy if recipient == sender: continue try: recipientComp = getattr( main, recipient ) except AttributeError: main.log.error( "main has no attribute {}".format( recipient ) ) connectionsFunctional = main.FALSE continue recipientComp.startFilter( pktFilter = packetFilter.format( senderComp.hostMac ) ) if not packet: pkt = 'Ether( src="{0}", dst="{2}" )/IP( src="{1}", dst="{3}" )'.format( senderComp.hostMac, senderComp.hostIp, recipientComp.hostMac, recipientComp.hostIp ) else: pkt = packet senderComp.sendPacket( packet = pkt ) if recipientComp.checkFilter( timeout ): if expectFailure: main.log.error( "Packet from {0} successfully received by {1} when it should not have been".format( sender , recipient ) ) connectionsFunctional = main.FALSE else: main.log.info( "Packet from {0} successfully received by {1}".format( sender , recipient ) ) else: recipientComp.killFilter() if expectFailure: main.log.info( "As expected, packet from {0} was not received by {1}".format( sender , recipient ) ) else: main.log.error( "Packet from {0} was not received by {1}".format( sender , recipient ) ) connectionsFunctional = main.FALSE return connectionsFunctional def checkLeaderChange( leaders1, leaders2 ): """ Checks for a change in intent partition leadership. Takes the output of leaders -c in json string format before and after a potential change as input Returns main.TRUE if no mismatches are detected Returns main.FALSE if there is a mismatch or on error loading the input """ try: leaders1 = json.loads( leaders1 ) leaders2 = json.loads( leaders2 ) except ( AttributeError, TypeError): main.log.exception( self.name + ": Object not as expected" ) return main.FALSE except Exception: main.log.exception( self.name + ": Uncaught exception!" ) main.cleanup() main.exit() main.log.info( "Checking Intent Paritions for Change in Leadership" ) mismatch = False for dict1 in leaders1: if "intent" in dict1.get( "topic", [] ): for dict2 in leaders2: if dict1.get( "topic", 0 ) == dict2.get( "topic", 0 ) and \ dict1.get( "leader", 0 ) != dict2.get( "leader", 0 ): mismatch = True main.log.error( "{0} changed leader from {1} to {2}".\ format( dict1.get( "topic", "no-topic" ),\ dict1.get( "leader", "no-leader" ),\ dict2.get( "leader", "no-leader" ) ) ) if mismatch: return main.FALSE else: return main.TRUE def report( main ): """ Report errors/warnings/exceptions """ main.ONOSbench.logReport( main.ONOSip[ 0 ], [ "INFO", "FOLLOWER", "WARN", "flow", "ERROR", "Except" ], "s" ) main.log.info( "ERROR report: \n" ) for i in range( main.numCtrls ): main.ONOSbench.logReport( main.ONOSip[ i ], [ "ERROR" ], "d" ) main.log.info( "EXCEPTIONS report: \n" ) for i in range( main.numCtrls ): main.ONOSbench.logReport( main.ONOSip[ i ], [ "Except" ], "d" ) main.log.info( "WARNING report: \n" ) for i in range( main.numCtrls ): main.ONOSbench.logReport( main.ONOSip[ i ], [ "WARN" ], "d" )
40.305636
205
0.545166
6,864
69,366
5.508741
0.083333
0.016291
0.012509
0.02166
0.749498
0.733815
0.708373
0.692505
0.687612
0.679202
0
0.015318
0.37416
69,366
1,720
206
40.32907
0.855685
0.056368
0
0.696429
0
0.001984
0.121815
0
0
0
0
0.001744
0.080357
0
null
null
0.029762
0.005952
null
null
0.001984
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
901d19950c990c5727bb906a07a2fbe584e7524f
178
py
Python
verpy/tests/importtest.py
bynoud/verpy
33a7bfa42a306f0fd8c2845a2b3fc390bc4d3d2b
[ "MIT" ]
null
null
null
verpy/tests/importtest.py
bynoud/verpy
33a7bfa42a306f0fd8c2845a2b3fc390bc4d3d2b
[ "MIT" ]
null
null
null
verpy/tests/importtest.py
bynoud/verpy
33a7bfa42a306f0fd8c2845a2b3fc390bc4d3d2b
[ "MIT" ]
null
null
null
from __future__ import print_function print("outside " + __name__) class tt(object): def __init__(self, i): print("inside tt : %s = %s %s" % (__name__, i, type(self)))
17.8
63
0.651685
25
178
3.96
0.68
0.040404
0
0
0
0
0
0
0
0
0
0
0.191011
178
9
64
19.777778
0.6875
0
0
0
0
0
0.170455
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.6
0.6
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
1
0
4
9031db654bb37bff138a922e61c1e4a41ac06523
226
py
Python
daemon/parse/__init__.py
BehindLoader/bandcamp-parser
bb1d2278d8275bd29888ce9a4fd5627400543cd0
[ "MIT" ]
null
null
null
daemon/parse/__init__.py
BehindLoader/bandcamp-parser
bb1d2278d8275bd29888ce9a4fd5627400543cd0
[ "MIT" ]
null
null
null
daemon/parse/__init__.py
BehindLoader/bandcamp-parser
bb1d2278d8275bd29888ce9a4fd5627400543cd0
[ "MIT" ]
null
null
null
""" Модуль распределения парсеров """ from daemon.parse.parsers import PARSERS from daemon.parse.bandcamp import BandcampParser PARSERS.add(BandcampParser) print( PARSERS.get_parser('BandcampParser').FUNC['Tag']['parse']() )
25.111111
68
0.783186
26
226
6.769231
0.615385
0.113636
0.170455
0
0
0
0
0
0
0
0
0
0.079646
226
9
68
25.111111
0.846154
0.128319
0
0
0
0
0.115789
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
5f43c5dd641d7471e0d2df8c0685f39558524d53
208
py
Python
caluma/form/jexl.py
sliverc/caluma
34497b97a4219900c551d311e1e4a183821c5c5d
[ "MIT" ]
null
null
null
caluma/form/jexl.py
sliverc/caluma
34497b97a4219900c551d311e1e4a183821c5c5d
[ "MIT" ]
null
null
null
caluma/form/jexl.py
sliverc/caluma
34497b97a4219900c551d311e1e4a183821c5c5d
[ "MIT" ]
null
null
null
from pyjexl import JEXL class QuestionJexl(JEXL): def __init__(self, **kwargs): super().__init__(**kwargs) # TODO: add transforms # self.add_transform("task", lambda spec: spec)
23.111111
55
0.639423
24
208
5.166667
0.75
0
0
0
0
0
0
0
0
0
0
0
0.235577
208
8
56
26
0.779874
0.317308
0
0
0
0
0
0
0
0
0
0.125
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
0
0
0
4
5f966672c2a49e3889a4419968929a26bfebe144
368
py
Python
src/xrd/core/p2p/p2pObservable.py
jack3343/xrd-core
48a6d890d62485c627060b017eadf85602268caf
[ "MIT" ]
null
null
null
src/xrd/core/p2p/p2pObservable.py
jack3343/xrd-core
48a6d890d62485c627060b017eadf85602268caf
[ "MIT" ]
null
null
null
src/xrd/core/p2p/p2pObservable.py
jack3343/xrd-core
48a6d890d62485c627060b017eadf85602268caf
[ "MIT" ]
null
null
null
from xrd.core.notification.Observable import Observable from xrd.generated import xrdlegacy_pb2 class P2PObservable(Observable): def __init__(self, source): # FIXME: Add mutexes super().__init__(source) def notify(self, message: xrdlegacy_pb2.LegacyMessage): # TODO: Add some p2p specific validation? super().notify(message)
28.307692
59
0.714674
42
368
6.02381
0.642857
0.055336
0
0
0
0
0
0
0
0
0
0.013559
0.19837
368
12
60
30.666667
0.844068
0.157609
0
0
1
0
0
0
0
0
0
0.083333
0
1
0.285714
false
0
0.285714
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
4
5fb1e58e7a120a31c6a02d923977f6af1a1499c3
213
py
Python
nyxbot/__init__.py
Cappycot/nyx
cb08e6b4148195ec5ad06382cdcd0e6ffd1676d5
[ "MIT" ]
3
2017-03-01T06:31:08.000Z
2017-10-11T03:46:06.000Z
nyxbot/__init__.py
Cappycot/nyx
cb08e6b4148195ec5ad06382cdcd0e6ffd1676d5
[ "MIT" ]
null
null
null
nyxbot/__init__.py
Cappycot/nyx
cb08e6b4148195ec5ad06382cdcd0e6ffd1676d5
[ "MIT" ]
null
null
null
from .nyxbot import NyxBot, CommandHasDisambiguation from .nyxcommands import * from .nyxguild import NyxGuild from .nyxhelp import DefaultNyxHelpCommand from .nyxsplash import splash from .nyxuser import NyxUser
30.428571
52
0.84507
24
213
7.5
0.458333
0
0
0
0
0
0
0
0
0
0
0
0.117371
213
6
53
35.5
0.957447
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
5fb7fb5739d031aa3e448c35b4be3cc9fc124c23
113
py
Python
django_docs/User_model_extension/apps.py
djangojeng-e/django_tutorial
78a5f8e17253a32f43079b2c17ffe4cecbd3c3f0
[ "MIT" ]
null
null
null
django_docs/User_model_extension/apps.py
djangojeng-e/django_tutorial
78a5f8e17253a32f43079b2c17ffe4cecbd3c3f0
[ "MIT" ]
9
2021-03-19T10:01:27.000Z
2022-01-13T03:05:42.000Z
django_docs/User_model_extension/apps.py
djangojeng-e/django_tutorial
78a5f8e17253a32f43079b2c17ffe4cecbd3c3f0
[ "MIT" ]
1
2020-05-01T12:55:48.000Z
2020-05-01T12:55:48.000Z
from django.apps import AppConfig class UserModelExtensionConfig(AppConfig): name = 'User_model_extension'
18.833333
42
0.80531
12
113
7.416667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.132743
113
5
43
22.6
0.908163
0
0
0
0
0
0.176991
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
5fd1c4345987b6847ee519218f9c02c4776605c7
182
py
Python
allhub/interactions/util.py
srinivasreddy/allhub
ff20858c9984da5c4edd5043c39eed3b6d5d693d
[ "Apache-2.0" ]
2
2019-10-07T15:46:33.000Z
2019-11-26T04:30:39.000Z
allhub/interactions/util.py
srinivasreddy/allhub
ff20858c9984da5c4edd5043c39eed3b6d5d693d
[ "Apache-2.0" ]
1
2020-03-09T14:44:04.000Z
2020-03-09T14:44:04.000Z
allhub/interactions/util.py
srinivasreddy/allhub
ff20858c9984da5c4edd5043c39eed3b6d5d693d
[ "Apache-2.0" ]
2
2019-10-08T05:22:37.000Z
2019-10-08T06:20:47.000Z
from enum import Enum class InteractionLimit(Enum): EXISTING_USERS = "existing_users" CONTRIBUTORS_ONLY = "contributors_only" COLLABORATORS_ONLY = "collaborators_only"
22.75
45
0.774725
19
182
7.105263
0.526316
0.192593
0.311111
0
0
0
0
0
0
0
0
0
0.159341
182
7
46
26
0.882353
0
0
0
0
0
0.269231
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
3956e054ac54d31e57d91b54d9ea835bd494c03e
57
py
Python
utils/scripts/OOOlevelGen/src/level_backup/johan_test.py
fullscreennl/bullettime
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
[ "MIT" ]
null
null
null
utils/scripts/OOOlevelGen/src/level_backup/johan_test.py
fullscreennl/bullettime
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
[ "MIT" ]
null
null
null
utils/scripts/OOOlevelGen/src/level_backup/johan_test.py
fullscreennl/bullettime
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
[ "MIT" ]
null
null
null
''' Created on Nov 22, 2010 @author: johantenbroeke '''
9.5
23
0.666667
7
57
5.428571
1
0
0
0
0
0
0
0
0
0
0
0.12766
0.175439
57
5
24
11.4
0.680851
0.842105
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
39755dc05382a776b09afd08e4d20b9eb8777658
152
py
Python
config.py
Horatio12345/lasdghjskldjaslhiuiosl8913616462
bbd7b585860d8a5d3f1b2938855da8609d0d2d4c
[ "MIT" ]
null
null
null
config.py
Horatio12345/lasdghjskldjaslhiuiosl8913616462
bbd7b585860d8a5d3f1b2938855da8609d0d2d4c
[ "MIT" ]
null
null
null
config.py
Horatio12345/lasdghjskldjaslhiuiosl8913616462
bbd7b585860d8a5d3f1b2938855da8609d0d2d4c
[ "MIT" ]
null
null
null
# The channel to send the log messages to CHANNEL_NAME = "voice-log" # The bot token BOT_TOKEN = "NDI3NTIyNzMxNjgxsasdsagsadshvbn,.fgsfgjfkLHEGROoAxA"
25.333333
65
0.796053
19
152
6.263158
0.631579
0.134454
0
0
0
0
0
0
0
0
0
0.007576
0.131579
152
5
66
30.4
0.893939
0.348684
0
0
0
0
0.625
0.53125
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
397570fa79e26347c5ed0a2a7a1b8f572e5bbd0b
160
py
Python
megaman/relaxation/__init__.py
ffancheng/megaman
0ccdf27c26d88685c71a8845796b03fdcf1054c4
[ "BSD-2-Clause" ]
303
2016-03-03T00:44:37.000Z
2022-03-14T03:43:38.000Z
megaman/relaxation/__init__.py
YifuLiuL/megaman
faccaf267aad0a8b18ec8a705735fd9dd838ca1e
[ "BSD-2-Clause" ]
52
2016-02-26T21:41:31.000Z
2021-06-27T08:33:51.000Z
megaman/relaxation/__init__.py
YifuLiuL/megaman
faccaf267aad0a8b18ec8a705735fd9dd838ca1e
[ "BSD-2-Clause" ]
67
2016-03-03T22:38:35.000Z
2022-01-12T08:03:47.000Z
# LICENSE: Simplified BSD https://github.com/mmp2/megaman/blob/master/LICENSE from .riemannian_relaxation import * from .trace_variable import TracingVariable
32
77
0.825
20
160
6.5
0.85
0
0
0
0
0
0
0
0
0
0
0.006849
0.0875
160
4
78
40
0.883562
0.46875
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
39aa96e1009af2c3d46c3dd1fc51e39167f2ed84
27
py
Python
gemini/version.py
burkej1/gemini_mirror
b44db1a4c65e23de4871f48272a20859c669476a
[ "MIT" ]
null
null
null
gemini/version.py
burkej1/gemini_mirror
b44db1a4c65e23de4871f48272a20859c669476a
[ "MIT" ]
null
null
null
gemini/version.py
burkej1/gemini_mirror
b44db1a4c65e23de4871f48272a20859c669476a
[ "MIT" ]
null
null
null
__version__ = "0.20.2-dev"
13.5
26
0.666667
5
27
2.8
1
0
0
0
0
0
0
0
0
0
0
0.166667
0.111111
27
1
27
27
0.416667
0
0
0
0
0
0.37037
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
39b3ca47b2843e7c7f064a00ac009b4b365f2731
119
py
Python
config/settings/components/authenticate.py
depromeet/octo-palm-server
95028764cda0dbecdda2d24f4e38de853e67e634
[ "MIT" ]
null
null
null
config/settings/components/authenticate.py
depromeet/octo-palm-server
95028764cda0dbecdda2d24f4e38de853e67e634
[ "MIT" ]
null
null
null
config/settings/components/authenticate.py
depromeet/octo-palm-server
95028764cda0dbecdda2d24f4e38de853e67e634
[ "MIT" ]
2
2019-01-12T07:14:34.000Z
2019-08-29T02:55:23.000Z
AUTHENTICATION_BACKENDS = [ 'django.contrib.auth.backends.ModelBackend', 'utils.authenticate.GoogleBackend', ]
23.8
48
0.756303
10
119
8.9
0.9
0
0
0
0
0
0
0
0
0
0
0
0.117647
119
4
49
29.75
0.847619
0
0
0
0
0
0.613445
0.613445
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f2dd91c1a7ee3547d06559688e85caf818666495
157
py
Python
datasource/__init__.py
YAmikep/datasource
6c8d72bd299aa0a9e2880228f0f39d2b8721b146
[ "MIT" ]
1
2018-06-16T11:33:56.000Z
2018-06-16T11:33:56.000Z
datasource/__init__.py
YAmikep/datasource
6c8d72bd299aa0a9e2880228f0f39d2b8721b146
[ "MIT" ]
1
2020-03-24T17:32:45.000Z
2020-03-24T17:32:45.000Z
datasource/__init__.py
YAmikep/datasource
6c8d72bd299aa0a9e2880228f0f39d2b8721b146
[ "MIT" ]
2
2018-06-16T11:37:34.000Z
2020-07-30T17:56:54.000Z
""" See README.rst for further information """ from .api import * __version_info__ = (0, 1, 0) __version__ = '.'.join((str(i) for i in __version_info__))
15.7
58
0.675159
23
157
4
0.73913
0.23913
0
0
0
0
0
0
0
0
0
0.022901
0.165605
157
9
59
17.444444
0.679389
0.242038
0
0
0
0
0.009091
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
f2f1ff348107e6a9418b1cfc15128e9ccd923fc1
487
py
Python
manma/box.py
orshe4/manma
ad85d30471f5753cc1db8eeaed3e089782b1ec39
[ "MIT" ]
null
null
null
manma/box.py
orshe4/manma
ad85d30471f5753cc1db8eeaed3e089782b1ec39
[ "MIT" ]
null
null
null
manma/box.py
orshe4/manma
ad85d30471f5753cc1db8eeaed3e089782b1ec39
[ "MIT" ]
null
null
null
class Box(object): def __init__(self, side, height): self._side = side self._height = height def volume(self): return self._side * self._side * self._height def __lt__(self, other): return self.volume() < other def __eq__(self, other): return self.volume() == other def __gt__(self, other): return self.volume() > other def __repr__(self): return "Box(side=%s, height=%s)" % (self._side, self._height)
24.35
69
0.595483
61
487
4.311475
0.262295
0.152091
0.159696
0.21673
0.376426
0.376426
0.376426
0
0
0
0
0
0.275154
487
19
70
25.631579
0.745042
0
0
0
0
0
0.047228
0
0
0
0
0
0
1
0.428571
false
0
0
0.357143
0.857143
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
840613f2903ed2ab19e83ee89f304c5926ac0d89
646
py
Python
card.py
KVinyl/war-simulator
c64096ffbb2f3e6175838995e39d07c6d56881f0
[ "MIT" ]
null
null
null
card.py
KVinyl/war-simulator
c64096ffbb2f3e6175838995e39d07c6d56881f0
[ "MIT" ]
null
null
null
card.py
KVinyl/war-simulator
c64096ffbb2f3e6175838995e39d07c6d56881f0
[ "MIT" ]
null
null
null
class Card(): ranks = [str(n) for n in range(2, 10)] + list('TJQKA') rank_tran = {rank: n for n, rank in enumerate(ranks, 2)} def __init__(self, rank, suit): self.rank = rank self.suit = suit self._numrank = self.rank_tran[rank] def __eq__(self, other): return self._numrank == other._numrank def __ne__(self, other): return self._numrank != other._numrank def __lt__(self, other): return self._numrank < other._numrank def __gt__(self, other): return self._numrank > other._numrank def __repr__(self): return f'Card({self.rank}, {self.suit})'
26.916667
60
0.609907
87
646
4.126437
0.321839
0.153203
0.167131
0.211699
0.456825
0.456825
0.456825
0.456825
0
0
0
0.008403
0.263158
646
23
61
28.086957
0.745798
0
0
0
0
0
0.05418
0
0
0
0
0
0
1
0.352941
false
0
0
0.294118
0.823529
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
842cbc8820acf4bf4136bf07966f56ae3c7b6192
10,581
py
Python
app/models.py
mross982/flask_template
80ab1a0ad64bd15245ee3a29a138534f4d4f3cc0
[ "MIT" ]
null
null
null
app/models.py
mross982/flask_template
80ab1a0ad64bd15245ee3a29a138534f4d4f3cc0
[ "MIT" ]
null
null
null
app/models.py
mross982/flask_template
80ab1a0ad64bd15245ee3a29a138534f4d4f3cc0
[ "MIT" ]
null
null
null
from datetime import datetime from hashlib import md5 from time import time from flask import current_app from flask_login import UserMixin from werkzeug.security import generate_password_hash, check_password_hash import jwt from app import db, login @login.user_loader def load_user(id): return User.query.get(int(id)) class User(UserMixin, db.Model): id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(64), index=True, unique=True) email = db.Column(db.String(120), index=True, unique=True) password_hash = db.Column(db.String(128)) measures = db.relationship('Measure', backref='user', lazy='dynamic') def __repr__(self): return '<User {}>'.format(self.username) def set_password(self, password): self.password_hash = generate_password_hash(password) def check_password(self, password): return check_password_hash(self.password_hash, password) def avatar(self, size): digest = md5(self.email.lower().encode('utf-8')).hexdigest() return 'https://www.gravatar.com/avatar/{}?d=identicon&s={}'.format( digest, size) def get_reset_password_token(self, expires_in=600): return jwt.encode( {'reset_password': self.id, 'exp': time() + expires_in}, current_app.config['SECRET_KEY'], algorithm='HS256').decode('utf-8') @staticmethod def verify_reset_password_token(token): try: id = jwt.decode(token, current_app.config['SECRET_KEY'], algorithms=['HS256'])['reset_password'] except: return return User.query.get(id) class Measure(db.Model): ''' Also think about historical data (bool), number of years (int), table of numerator / denominator for each year ''' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(140), unique=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id')) unit = db.Column(db.String(15)) # individuals or encounters start_date = db.Column(db.DateTime) # for the overall measure end_date = db.Column(db.DateTime) direction = db.Column(db.String(15)) # positive or negative benchmarks = db.relationship('Benchmark', backref='measure', lazy='dynamic') data = db.relationship('Data', backref='measure', lazy='dynamic') def __repr__(self): return '<Measure: {}>'.format(self.name) class Benchmark(db.Model): id = db.Column(db.Integer, primary_key=True) measure_id = db.Column(db.Integer, db.ForeignKey('measure.id')) benchmark = db.Column(db.Integer) value = db.Column(db.Integer) def __repr__(self): return '<Benchmark {}: {} {}'.format(self.id, self.benchmark, self.value) class Data(db.Model): id = db.Column(db.Integer, primary_key=True) measure_id = db.Column(db.Integer, db.ForeignKey('measure.id')) encounter_id = db.Column(db.Integer) encounter_date = db.Column(db.DateTime) patient_id = db.Column(db.Integer) unique_patient = db.Column(db.Boolean) def __repr__(self): return '<Data for {}>'.format(self.measure_id) ''' Troubleshooting interact with app database by starting shell (venv) $ flask shell print list of tables in the database >>> print(db.metadata.tables.keys()) >>> for table in db.metadata.tables.keys(): ... tb = db.table(table) ... pprint(tb.__dict__.keys()) to see the def__repr__ results type the following into the interpreter: >>> flask shell >>> from app.models import User >>> u = User(username='susan', email='susan@example.com') >>> u <User susan> The first time you create a new app, you will need to enter the following into the interpreter: (venv) $ flask db init With the migration repository in place, it is time to create the first database migration, which will include the users table that maps to the User database model. There are two ways to create a database migration: manually or automatically. To generate a migration automatically, Alembic compares the database schema as defined by the database models, against the actual database schema currently used in the database. It then populates the migration script with the changes necessary to make the database schema match the application models. In this case, since there is no previous database, the automatic migration will add the entire User model to the migration script. The flask db migrate sub-command generates these automatic migrations: (venv) $ flask db migrate -m "users table" If you were to add new fields to the database, you would need to perform this same step (venv) $ flask db migrate -m "new fields in user model" The generated migration script has two functions called upgrade() and downgrade(). The upgrade() function applies the migration, and the downgrade() function removes it. The flask db migrate command does not make any changes to the database, it just generates the migration script. To apply the changes to the database, the flask db upgrade command must be used. (venv) $ flask db upgrade Because this application uses SQLite, the upgrade command will detect that a database does not exist and will create it (you will notice a file named app.db is added after this command finishes, that is the SQLite database). When working with database servers such as MySQL and PostgreSQL, you have to create the database in the database server before running upgrade. Note that Flask-SQLAlchemy uses a "snake case" naming convention for database tables by default. For the User model above, the corresponding table in the database will be named user. For a AddressAndPhone model class, the table would be named address_and_phone. If you prefer to choose your own table names, you can add an attribute named __tablename__ to the model class, set to the desired name as a string. From the interpreter: >>> from app import db >>> from app.models import User, Post >>> u = User(username='susan', email='susan@example.com') >>> db.session.add(u) >>> db.session.commit() >>> users = User.query.all() >>> users [<User john>, <User susan>] >>> for u in users: ... print(u.id, u.username) ... 1 john 2 susan >>> u = User.query.get(1) >>> p = Post(body='my first post!', author=u) >>> db.session.add(p) >>> db.session.commit() Additional database queries >>> # get all posts written by a user >>> u = User.query.get(1) >>> u <User john> >>> posts = u.posts.all() >>> posts [<Post my first post!>] >>> # same, but with a user that has no posts >>> u = User.query.get(2) >>> u <User susan> >>> u.posts.all() [] >>> # print post author and body for all posts >>> posts = Post.query.all() >>> for p in posts: ... print(p.id, p.author.username, p.body) ... 1 john my first post! # get all users in reverse alphabetical order >>> User.query.order_by(User.username.desc()).all() [<User susan>, <User john>] Final Clean up >>> users = User.query.all() >>> for u in users: ... db.session.delete(u) ... >>> posts = Post.query.all() >>> for p in posts: ... db.session.delete(p) ... >>> db.session.commit() ... ******************************** FOLLOWERS ******************************* The representation of a many-to-many relationship requires the use of an auxiliary table called an association table. While it may not seem obvious at first, the association table with its two foreign keys is able to efficiently answer all the queries about the relationship. Looking at the summary of all the relationship types, it is easy to determine that the proper data model to track followers is the many-to-many relationship, because a user follows many users, and a user has many followers. But there is a twist. In the students and teachers example I had two entities that were related through the many-to-many relationship. But in the case of followers, I have users following other users, so there is just users. The second entity of the relationship is also the users. A relationship in which instances of a class are linked to other instances of the same class is called a self-referential relationship. AFter creating the new relationshio table, The changes to the database need to be recorded in a new database migration: (venv) $ flask db migrate -m "followers" (venv) $ flask db upgrade the is_following() supporting method to make sure the requested action makes sense. For example, if I ask user1 to follow user2, but it turns out that this following relationship already exists in the database, I do not want to add a duplicate. The same logic can be applied to unfollowing. Obtaining the Posts from Followed Users The most obvious solution is to run a query that returns the list of followed users, which as you already know, it would be user.followed.all(). Then for each of these returned users I can run a query to get the posts. Once I have all the posts I can merge them into a single list and sort them by date. Sounds good? Well, not really. This is actually an awful solution that does not scale well. There is really no way to avoid this merging and sorting of blog posts, but doing it in the application results in a very inefficient process. This kind of work is what relational databases excel at. The database has indexes that allow it to perform the queries and the sorting in a much more efficient way that I can possibly do from my side. So what I really want is to come up with a single database query that defines the information that I want to get, and then let the database figure out how to extract that information in the most efficient way. Joins Post.query.join(followers, (followers.c.followed_id == Post.user_id)) imaging the table structure required to retrieve the posts of of a uers's followed users. JOIN First query identifies the user ID from the UserName. Second query matches the user IDs of the users followed accounts in relation table. Third quiery returns the post IDs of the followed users FILTER Forth filter query results for the posts followed by a single user ORDER BY Fifth sort qurey results by (Post.timestamp.desc()) for example Even though adding and removing followers is fairly easy, I want to promote reusability in my code, so I'm not going to sprinkle "appends" and "removes" through the code. Instead, I'm going to implement the "follow" and "unfollow" functionality as methods in the User model. It is always best to move the application logic away from view functions and into models or other auxiliary classes or modules, because as you will see later in this chapter, that makes unit testing much easier. '''
43.72314
114
0.727436
1,653
10,581
4.60859
0.275257
0.022053
0.027566
0.024547
0.123786
0.061171
0.05382
0.049751
0.028091
0.028091
0
0.004137
0.177582
10,581
242
115
43.723141
0.871294
0.017295
0
0.147059
1
0
0.088821
0
0
0
0
0
0
1
0.147059
false
0.147059
0.117647
0.102941
0.808824
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
1
1
0
0
4
844cedf2d5b41342ba2c1feb72db7dc7382fb386
101
py
Python
backend/accommodation/apps.py
lianfengluo/Accommodating
c1c8bf0229e7459fb85dce31ce067ac2aa941bbb
[ "MIT", "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
backend/accommodation/apps.py
lianfengluo/Accommodating
c1c8bf0229e7459fb85dce31ce067ac2aa941bbb
[ "MIT", "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
4
2019-11-25T04:06:27.000Z
2022-02-13T01:20:02.000Z
backend/accommodation/apps.py
lianfengluo/Accommodating
c1c8bf0229e7459fb85dce31ce067ac2aa941bbb
[ "MIT", "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
from django.apps import AppConfig class AccommodationConfig(AppConfig): name = 'accommodation'
16.833333
37
0.782178
10
101
7.9
0.9
0
0
0
0
0
0
0
0
0
0
0
0.148515
101
5
38
20.2
0.918605
0
0
0
0
0
0.128713
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
8458e5391b5b6af26cbb034bc89be629f0e7133e
206
py
Python
src/backup_info.py
spensireli/S3Backup
6030bb21e32fb493970a66139e85f67702d31fae
[ "Apache-2.0" ]
null
null
null
src/backup_info.py
spensireli/S3Backup
6030bb21e32fb493970a66139e85f67702d31fae
[ "Apache-2.0" ]
null
null
null
src/backup_info.py
spensireli/S3Backup
6030bb21e32fb493970a66139e85f67702d31fae
[ "Apache-2.0" ]
null
null
null
import os import sys class Backups(): def file_size(self, directory, file_name): full_path = directory + '/' + file_name file_size = os.path.getsize(full_path) return file_size
22.888889
47
0.660194
28
206
4.607143
0.535714
0.186047
0.263566
0
0
0
0
0
0
0
0
0
0.247573
206
9
48
22.888889
0.832258
0
0
0
0
0
0.004831
0
0
0
0
0
0
1
0.142857
false
0
0.285714
0
0.714286
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
ffbdcf1b99c775af52af9d4369e7bf7eb8468583
182
py
Python
base_user/apps.py
munisisazade/twitter_app
2ca4f3fecdce18ffc82a2734e38a47b4e224fcee
[ "MIT" ]
null
null
null
base_user/apps.py
munisisazade/twitter_app
2ca4f3fecdce18ffc82a2734e38a47b4e224fcee
[ "MIT" ]
7
2019-01-11T08:22:01.000Z
2019-01-25T06:59:33.000Z
base_user/apps.py
munisisazade/twitter_app
2ca4f3fecdce18ffc82a2734e38a47b4e224fcee
[ "MIT" ]
5
2019-01-15T07:13:47.000Z
2019-01-15T07:47:45.000Z
from django.apps import AppConfig class BaseUserConfig(AppConfig): name = 'base_user' verbose_name = "Istifadəçilər" def ready(self): import base_user.signals
18.2
34
0.708791
21
182
6
0.761905
0.126984
0
0
0
0
0
0
0
0
0
0
0.214286
182
9
35
20.222222
0.881119
0
0
0
0
0
0.120879
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
ffc0184ce6efdc07da9ed1ebabb8e25eed1277e2
164
py
Python
drnalpha/tracker/apps.py
UKGovernmentBEIS/BRE_DigitalRegulationNavigator_Alpha
bfa6d08212bc18034b20b9c922a554a6e1ddd0f1
[ "MIT" ]
null
null
null
drnalpha/tracker/apps.py
UKGovernmentBEIS/BRE_DigitalRegulationNavigator_Alpha
bfa6d08212bc18034b20b9c922a554a6e1ddd0f1
[ "MIT" ]
null
null
null
drnalpha/tracker/apps.py
UKGovernmentBEIS/BRE_DigitalRegulationNavigator_Alpha
bfa6d08212bc18034b20b9c922a554a6e1ddd0f1
[ "MIT" ]
1
2021-04-21T09:41:43.000Z
2021-04-21T09:41:43.000Z
from django.apps import AppConfig class TrackerConfig(AppConfig): name = "drnalpha.tracker" def ready(self): from . import signals # noqa: F401
18.222222
43
0.682927
19
164
5.894737
0.842105
0
0
0
0
0
0
0
0
0
0
0.02381
0.231707
164
8
44
20.5
0.865079
0.060976
0
0
0
0
0.105263
0
0
0
0
0
0
1
0.2
false
0
0.4
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
ffc169c084bb78a48b79c0fb32121f8e72e21e2e
185
py
Python
kcliutils/utils/texts/core_texts/enum_.py
kkristof200/py_cli_utils
8c18ea37e84be5e7df1f5fcf7cdc10ae70ecf7c6
[ "MIT" ]
null
null
null
kcliutils/utils/texts/core_texts/enum_.py
kkristof200/py_cli_utils
8c18ea37e84be5e7df1f5fcf7cdc10ae70ecf7c6
[ "MIT" ]
null
null
null
kcliutils/utils/texts/core_texts/enum_.py
kkristof200/py_cli_utils
8c18ea37e84be5e7df1f5fcf7cdc10ae70ecf7c6
[ "MIT" ]
null
null
null
enum_ = ''' [COMMENT_LINE_IMPORTS] # System from enum import Enum [COMMENT_LINE] [COMMENT_LINE_CLASS_NAME] class [CLASS_NAME](Enum): [TAB]Template = 0 [COMMENT_LINE] '''.strip()
10.277778
25
0.713514
25
185
4.92
0.52
0.357724
0.243902
0
0
0
0
0
0
0
0
0.006289
0.140541
185
18
26
10.277778
0.767296
0
0
0.2
0
0
0.876344
0.252688
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
ffce5f0c10251c8cce0e1983c58b55759057a21b
1,012
py
Python
app/main/routes.py
lcfyi/ubcgrades
a00ee195eab320a9b976222e9c6d457c81d7872c
[ "MIT" ]
13
2020-10-21T04:44:52.000Z
2022-03-03T07:41:21.000Z
app/main/routes.py
lcfyi/ubcgrades
a00ee195eab320a9b976222e9c6d457c81d7872c
[ "MIT" ]
4
2020-11-26T07:40:49.000Z
2021-05-19T07:04:42.000Z
app/main/routes.py
lcfyi/ubcgrades
a00ee195eab320a9b976222e9c6d457c81d7872c
[ "MIT" ]
2
2021-05-05T06:40:19.000Z
2022-02-26T00:14:22.000Z
from app.main import bp from flask import render_template @bp.route('/', methods=['GET']) def index(): return render_template('index.html') @bp.route('/statistics-by-course', methods=['GET']) def statistics_by_course(): return render_template('statistics_by_course.html') # @bp.route('/statistics-by-subject', methods=['GET']) # def statistics_by_subject(): # return render_template('statistics_by_subject.html') # # # @bp.route('/statistics-by-faculty', methods=['GET']) # def statistics_by_faculty(): # return render_template('statistics_by_faculty.html') @bp.route('/about-help', methods=['GET']) def about_help(): return render_template('about_help.html') @bp.route('/api-reference', methods=['GET']) def api_index(): return render_template('api/api_index.html') @bp.route('/api-reference/v1', methods=['GET']) def api_v1(): return render_template('api/v1.html') @bp.route('/api-reference/v2', methods=['GET']) def api_v2(): return render_template('api/v2.html')
24.095238
58
0.702569
138
1,012
4.956522
0.188406
0.184211
0.152047
0.092105
0.451754
0
0
0
0
0
0
0.006645
0.107708
1,012
41
59
24.682927
0.750831
0.273715
0
0
0
0
0.260331
0.063361
0
0
0
0
0
1
0.3
true
0
0.1
0.3
0.7
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
4
ffff24e1eb4618aec7947c5da340145d650e2322
84
py
Python
networks/__init__.py
TorbenSDJohansen/HANA
41e67a40c30209a01155288aca53e85e29982d64
[ "MIT" ]
1
2022-03-13T09:42:22.000Z
2022-03-13T09:42:22.000Z
networks/util/__init__.py
TorbenSDJohansen/HANA
41e67a40c30209a01155288aca53e85e29982d64
[ "MIT" ]
null
null
null
networks/util/__init__.py
TorbenSDJohansen/HANA
41e67a40c30209a01155288aca53e85e29982d64
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Sun Feb 16 12:03:24 2020 @author: tsdj """
10.5
35
0.559524
14
84
3.357143
1
0
0
0
0
0
0
0
0
0
0
0.19697
0.214286
84
7
36
12
0.515152
0.869048
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
0803e00112af4ebcc19ef58e9e208b2d189787ac
23
py
Python
env/lib/python3.8/site-packages/pip/__init__.py
acrucetta/Chicago_COVI_WebApp
a37c9f492a20dcd625f8647067394617988de913
[ "MIT", "Unlicense" ]
120
2019-11-12T19:22:44.000Z
2020-05-17T12:17:25.000Z
env/lib/python3.8/site-packages/pip/__init__.py
acrucetta/Chicago_COVI_WebApp
a37c9f492a20dcd625f8647067394617988de913
[ "MIT", "Unlicense" ]
123
2019-09-10T14:48:01.000Z
2019-11-28T21:24:06.000Z
env/lib/python3.8/site-packages/pip/__init__.py
acrucetta/Chicago_COVI_WebApp
a37c9f492a20dcd625f8647067394617988de913
[ "MIT", "Unlicense" ]
98
2019-10-17T14:48:28.000Z
2022-01-21T03:33:38.000Z
__version__ = "19.2.3"
11.5
22
0.652174
4
23
2.75
1
0
0
0
0
0
0
0
0
0
0
0.2
0.130435
23
1
23
23
0.35
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
083982f9c79cb39289d40316e5221c9fee425bd7
135
py
Python
write.py
wanzew3074/systemInfoExtractor
bc9521c0ea45af5fd97a7642b581aa6c94a32c89
[ "MIT" ]
null
null
null
write.py
wanzew3074/systemInfoExtractor
bc9521c0ea45af5fd97a7642b581aa6c94a32c89
[ "MIT" ]
null
null
null
write.py
wanzew3074/systemInfoExtractor
bc9521c0ea45af5fd97a7642b581aa6c94a32c89
[ "MIT" ]
null
null
null
def write(): systemInfo_file = open("writeTest.txt", "a") systemInfo_file.write("\nTEST DATA2") systemInfo_file.close()
16.875
48
0.666667
16
135
5.4375
0.6875
0.482759
0
0
0
0
0
0
0
0
0
0.009009
0.177778
135
7
49
19.285714
0.774775
0
0
0
0
0
0.19403
0
0
0
0
0
0
1
0.25
false
0
0
0
0.25
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
f234f9974fb25ed8d25a9e16d8caca10065c7018
733
py
Python
crawler/exceptions.py
priyakdey/github-api-crawler
4621abb39ef6c901ffe21b0b2ce3e3c5c4aa3531
[ "MIT" ]
1
2021-11-25T18:52:01.000Z
2021-11-25T18:52:01.000Z
crawler/exceptions.py
priyakdey/github-api-crawler
4621abb39ef6c901ffe21b0b2ce3e3c5c4aa3531
[ "MIT" ]
16
2021-10-02T20:44:51.000Z
2021-10-11T12:22:45.000Z
crawler/exceptions.py
priyakdey/github-api-crawler
4621abb39ef6c901ffe21b0b2ce3e3c5c4aa3531
[ "MIT" ]
1
2021-11-23T23:44:45.000Z
2021-11-23T23:44:45.000Z
class ApplicationException(Exception): """Base Exception class for all exceptions across the application""" def __init__(self, err_code: int, err_msg: str) -> None: super(ApplicationException, self).__init__(err_code, err_msg) self.err_code = err_code self.err_msg = err_msg def __str__(self): # pragma: no cover return f"Error Code: {self.err_code}. Error Message: {self.err_msg}" def __repr__(self): # pragma: no cover return self.__str__() class APIException(ApplicationException): """Raised when and External API Call returns non 200 code""" pass class DatabaseException(ApplicationException): """Raised when issue with database connection""" pass
30.541667
76
0.695771
91
733
5.274725
0.483516
0.072917
0.06875
0.070833
0.095833
0
0
0
0
0
0
0.005164
0.207367
733
23
77
31.869565
0.820998
0.26603
0
0.153846
0
0
0.111324
0
0
0
0
0
0
1
0.230769
false
0.153846
0
0.153846
0.615385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
f2a416127362e65a7bc383a56e204b9e906eb3ed
7,721
py
Python
jmetal/problem/multiobjective/test/test_unconstrained.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
335
2017-03-16T19:44:50.000Z
2022-03-30T08:50:46.000Z
jmetal/problem/multiobjective/test/test_unconstrained.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
85
2017-05-16T06:40:51.000Z
2022-02-05T23:43:49.000Z
jmetal/problem/multiobjective/test/test_unconstrained.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
130
2017-02-08T01:19:15.000Z
2022-03-25T08:32:08.000Z
import unittest from jmetal.problem.multiobjective.unconstrained import Kursawe, Fonseca, Schaffer, Viennet2 class KursaweTestCases(unittest.TestCase): def test_should_constructor_create_a_non_null_object(self) -> None: problem = Kursawe(3) self.assertIsNotNone(problem) def test_should_constructor_create_a_valid_problem_with_default_settings(self) -> None: problem = Kursawe() self.assertEqual(3, problem.number_of_variables) self.assertEqual(2, problem.number_of_objectives) self.assertEqual(0, problem.number_of_constraints) self.assertEqual([-5.0, -5.0, -5.0], problem.lower_bound) self.assertEqual([5.0, 5.0, 5.0], problem.upper_bound) def test_should_constructor_create_a_valid_problem_with_5_variables(self) -> None: problem = Kursawe(5) self.assertEqual(5, problem.number_of_variables) self.assertEqual(2, problem.number_of_objectives) self.assertEqual(0, problem.number_of_constraints) self.assertEqual([-5.0, -5.0, -5.0, -5.0, -5.0], problem.lower_bound) self.assertEqual([5.0, 5.0, 5.0, 5.0, 5.0], problem.upper_bound) def test_should_create_solution_create_a_valid_float_solution(self) -> None: problem = Kursawe(3) solution = problem.create_solution() self.assertEqual(3, solution.number_of_variables) self.assertEqual(3, len(solution.variables)) self.assertEqual(2, solution.number_of_objectives) self.assertEqual(2, len(solution.objectives)) self.assertEqual(0, problem.number_of_constraints) self.assertEqual([-5.0, -5.0, -5.0], problem.lower_bound) self.assertEqual([5.0, 5.0, 5.0], problem.upper_bound) self.assertTrue(all(variable >= -5.0 for variable in solution.variables)) self.assertTrue(all(variable <= 5.0 for variable in solution.variables)) def test_should_get_name_return_the_right_name(self): problem = Kursawe() self.assertEqual("Kursawe", problem.get_name()) class FonsecaTestCases(unittest.TestCase): def test_should_constructor_create_a_non_null_object(self): problem = Fonseca() self.assertIsNotNone(problem) def test_should_constructor_create_a_valid_problem_with_default_settings(self): problem = Fonseca() self.assertEqual(3, problem.number_of_variables) self.assertEqual(2, problem.number_of_objectives) self.assertEqual(0, problem.number_of_constraints) self.assertEqual(3 * [-4], problem.lower_bound) self.assertEqual(3 * [4], problem.upper_bound) def test_should_create_solution_create_a_valid_float_solution(self): problem = Fonseca() solution = problem.create_solution() self.assertEqual(3, solution.number_of_variables) self.assertEqual(3, len(solution.variables)) self.assertEqual(2, solution.number_of_objectives) self.assertEqual(2, len(solution.objectives)) self.assertEqual(0, problem.number_of_constraints) self.assertEqual(3 * [-4], problem.lower_bound) self.assertEqual(3 * [4], problem.upper_bound) self.assertTrue(solution.variables[0] >= -4) self.assertTrue(solution.variables[0] <= 4) def test_should_create_solution_return_right_evaluation_values(self): problem = Fonseca() solution1 = problem.create_solution() solution1.variables[0] = -1.3 solution1.variables[1] = 1.5 solution1.variables[2] = 1.21 problem.evaluate(solution1) self.assertAlmostEqual(solution1.objectives[0], 0.991563628, 4) self.assertAlmostEqual(solution1.objectives[1], 0.999663388, 4) def test_should_get_name_return_the_right_name(self): problem = Fonseca() self.assertEqual("Fonseca", problem.get_name()) class SchafferTestCases(unittest.TestCase): def test_should_constructor_create_a_non_null_object(self): problem = Schaffer() self.assertIsNotNone(problem) def test_should_constructor_create_a_valid_problem_with_default_settings(self): problem = Schaffer() self.assertEqual(1, problem.number_of_variables) self.assertEqual(2, problem.number_of_objectives) self.assertEqual(0, problem.number_of_constraints) self.assertEqual([-100000], problem.lower_bound) self.assertEqual([100000], problem.upper_bound) def test_should_create_solution_create_a_valid_float_solution(self): problem = Schaffer() solution = problem.create_solution() self.assertEqual(1, solution.number_of_variables) self.assertEqual(1, len(solution.variables)) self.assertEqual(2, solution.number_of_objectives) self.assertEqual(2, len(solution.objectives)) self.assertEqual(0, problem.number_of_constraints) self.assertEqual([-100000], problem.lower_bound) self.assertEqual([100000], problem.upper_bound) self.assertTrue(solution.variables[0] >= -100000) self.assertTrue(solution.variables[0] <= 100000) def test_should_create_solution_return_right_evaluation_values(self): problem = Schaffer() solution1 = problem.create_solution() solution2 = problem.create_solution() solution1.variables[0] = 3 solution2.variables[0] = -2.6 problem.evaluate(solution1) problem.evaluate(solution2) self.assertAlmostEqual(solution1.objectives[0], 9) self.assertAlmostEqual(solution1.objectives[1], 1) self.assertAlmostEqual(solution2.objectives[0], 6.76) self.assertAlmostEqual(solution2.objectives[1], 21.16) def test_should_get_name_return_the_right_name(self): problem = Schaffer() self.assertEqual("Schaffer", problem.get_name()) class Viennet2TestCases(unittest.TestCase): def test_should_constructor_create_a_non_null_object(self): problem = Viennet2() self.assertIsNotNone(problem) def test_should_constructor_create_a_valid_problem_with_default_settings(self): problem = Viennet2() self.assertEqual(2, problem.number_of_variables) self.assertEqual(3, problem.number_of_objectives) self.assertEqual(0, problem.number_of_constraints) self.assertEqual([-4, -4], problem.lower_bound) self.assertEqual([4, 4], problem.upper_bound) def test_should_create_solution_create_a_valid_float_solution(self): problem = Viennet2() solution = problem.create_solution() self.assertEqual(2, solution.number_of_variables) self.assertEqual(2, len(solution.variables)) self.assertEqual(3, solution.number_of_objectives) self.assertEqual(3, len(solution.objectives)) self.assertEqual(0, problem.number_of_constraints) self.assertEqual([-4, -4], problem.lower_bound) self.assertEqual([4, 4], problem.upper_bound) self.assertTrue(solution.variables[0] >= -4) self.assertTrue(solution.variables[0] <= 4) def test_should_create_solution_return_right_evaluation_values(self): problem = Viennet2() solution2 = problem.create_solution() solution2.variables[0] = -2.6 solution2.variables[1] = 1.5 problem.evaluate(solution2) self.assertAlmostEqual(solution2.objectives[0], 14.0607692307) self.assertAlmostEqual(solution2.objectives[1], -11.8818055555) self.assertAlmostEqual(solution2.objectives[2], -11.1532369747) def test_should_get_name_return_the_right_name(self): problem = Viennet2() self.assertEqual("Viennet2", problem.get_name()) if __name__ == '__main__': unittest.main()
38.798995
92
0.706903
920
7,721
5.665217
0.08913
0.164045
0.049885
0.012279
0.856101
0.744244
0.666155
0.666155
0.660015
0.653492
0
0.044331
0.1878
7,721
198
93
38.994949
0.786796
0
0
0.675862
0
0
0.004922
0
0
0
0
0
0.537931
1
0.137931
false
0
0.013793
0
0.17931
0
0
0
0
null
0
0
0
1
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
4b391421c37d4d1b329e6b4ca11d972afb7bee76
14,521
py
Python
tests/test_graphOps.py
seyedb/tree-ops
787640df0bfeb0f33ca81dbca91dead5ffeb6f85
[ "MIT" ]
null
null
null
tests/test_graphOps.py
seyedb/tree-ops
787640df0bfeb0f33ca81dbca91dead5ffeb6f85
[ "MIT" ]
null
null
null
tests/test_graphOps.py
seyedb/tree-ops
787640df0bfeb0f33ca81dbca91dead5ffeb6f85
[ "MIT" ]
null
null
null
# these tests are designed for pytest framework import pytest import graph as Graph import graphOps as go from collections import Counter @pytest.fixture def generic_graph(): """Returns a sample graph with five vertices and seven edges (weights in between parentheses): AB (3.0), AC (1.0), BC (7.0), BD (5.0), BE (1.0), CD (1.0), DE (7.0) """ g = Graph.graph() na = Graph.graph().graphNode('A') nb = Graph.graph().graphNode('B') nc = Graph.graph().graphNode('C') nd = Graph.graph().graphNode('D') ne = Graph.graph().graphNode('E') na.children[nb], nb.children[na] = [3.0], [3.0] na.children[nc], nc.children[na] = [1.0], [1.0] nb.children[nc], nc.children[nb] = [7.0], [7.0] nb.children[nd], nd.children[nb] = [5.0], [5.0] nb.children[ne], ne.children[nb] = [1.0], [1.0] nc.children[nd], nd.children[nc] = [1.0], [1.0] nd.children[ne], ne.children[nd] = [7.0], [7.0] g.vertices[na][nb], g.vertices[nb][na] = [3.0], [3.0] g.vertices[na][nc], g.vertices[nc][na] = [1.0], [1.0] g.vertices[nb][nc], g.vertices[nc][nb] = [7.0], [7.0] g.vertices[nb][nd], g.vertices[nd][nb] = [5.0], [5.0] g.vertices[nb][ne], g.vertices[ne][nb] = [1.0], [1.0] g.vertices[nc][nd], g.vertices[nd][nc] = [1.0], [1.0] g.vertices[nd][ne], g.vertices[ne][nd] = [7.0], [7.0] vx_tuple = (na, nb, nc, nd, ne) return g, vx_tuple @pytest.fixture def petersen(): """Returns a Petersen graph - GP(5,2).""" GP = Graph.graph() na = Graph.graph().graphNode('a') nb = Graph.graph().graphNode('b') nc = Graph.graph().graphNode('c') nd = Graph.graph().graphNode('d') ne = Graph.graph().graphNode('e') nf = Graph.graph().graphNode('f') ng = Graph.graph().graphNode('g') nh = Graph.graph().graphNode('h') ni = Graph.graph().graphNode('i') nj = Graph.graph().graphNode('j') na.children[nb], nb.children[na] = [0.0], [0.0] na.children[nf], nf.children[na] = [0.0], [0.0] na.children[ne], ne.children[na] = [0.0], [0.0] nb.children[nc], nc.children[nb] = [0.0], [0.0] nb.children[ng], ng.children[nb] = [0.0], [0.0] nc.children[nd], nd.children[nc] = [0.0], [0.0] nc.children[nh], nh.children[nc] = [0.0], [0.0] nd.children[ne], ne.children[nd] = [0.0], [0.0] nd.children[ni], ni.children[nd] = [0.0], [0.0] ne.children[nj], nj.children[ne] = [0.0], [0.0] nf.children[nh], nh.children[nf] = [0.0], [0.0] nf.children[ni], ni.children[nf] = [0.0], [0.0] ng.children[ni], ni.children[ng] = [0.0], [0.0] ng.children[nj], nj.children[ng] = [0.0], [0.0] nh.children[nj], nj.children[nh] = [0.0], [0.0] GP.vertices[na][nb], GP.vertices[nb][na] = [0.0], [0.0] GP.vertices[nb][nc], GP.vertices[nc][nb] = [0.0], [0.0] GP.vertices[nc][nd], GP.vertices[nd][nc] = [0.0], [0.0] GP.vertices[na][ne], GP.vertices[ne][na] = [0.0], [0.0] GP.vertices[na][nf], GP.vertices[nf][na] = [0.0], [0.0] GP.vertices[nb][ng], GP.vertices[ng][nb] = [0.0], [0.0] GP.vertices[nc][nh], GP.vertices[nh][nc] = [0.0], [0.0] GP.vertices[nd][ne], GP.vertices[ne][nd] = [0.0], [0.0] GP.vertices[nd][ni], GP.vertices[ni][nd] = [0.0], [0.0] GP.vertices[ne][nj], GP.vertices[nj][ne] = [0.0], [0.0] GP.vertices[nf][nh], GP.vertices[nh][nf] = [0.0], [0.0] GP.vertices[nf][ni], GP.vertices[ni][nf] = [0.0], [0.0] GP.vertices[ng][ni], GP.vertices[ni][ng] = [0.0], [0.0] GP.vertices[ng][nj], GP.vertices[nj][ng] = [0.0], [0.0] GP.vertices[nh][nj], GP.vertices[nj][nh] = [0.0], [0.0] vx_tuple = (na, nb, nc, nd, ne, nf, ng, nh, ni, nj) return GP, vx_tuple def _compare(GP, g): petersen_vertices = GP._getVerticesDict() petersen_vxs = [] for vx in petersen_vertices.keys(): petersen_vxs.append(vx) petersen_nvx = len(petersen_vxs) petersen_vxlist = [] petersen_edgelist = [] petersen_adjlist = [] for i in range(petersen_nvx): v = petersen_vxs[i] petersen_vxlist.append(v.data) for child, weight in v.children.items(): petersen_edgelist.append((v.data, child.data, *weight)) for j in range(petersen_nvx): w = petersen_vxs[j] petersen_adjlist.append((v.data, w.data, *petersen_vertices[v][w])) vertices = g._getVerticesDict() g_vxs = [] for vx in vertices.keys(): g_vxs.append(vx) g_nvx = len(g_vxs) g_vxlist = [] g_edgelist = [] g_adjlist = [] for i in range(g_nvx): v = g_vxs[i] g_vxlist.append(v.data) for child, weight in v.children.items(): g_edgelist.append((v.data, child.data, *weight)) for j in range(g_nvx): w = g_vxs[j] g_adjlist.append((v.data, w.data, *vertices[v][w])) return Counter(g_vxlist) == Counter(petersen_vxlist) and \ Counter(g_edgelist) == Counter(petersen_edgelist) and \ Counter(g_adjlist) == Counter(petersen_adjlist) def test_graph_to_adjmat(petersen): g,_ = petersen adjMat, adjMatw = go.graph_to_adjmat(g) ref_adjMat = [ [0, 1, 0, 0, 1, 1, 0, 0, 0, 0], [1, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 1, 0, 1, 0, 0, 0, 1, 0, 0], [0, 0, 1, 0, 1, 0, 0, 0, 1, 0], [1, 0, 0, 1, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 1, 1], [0, 0, 1, 0, 0, 1, 0, 0, 0, 1], [0, 0, 0, 1, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 1, 1, 0, 0]] ref_adjMatw = [ [[], [0.0], [], [], [0.0], [0.0], [], [], [], []], [[0.0], [], [0.0], [], [], [], [0.0], [], [], []], [[], [0.0], [], [0.0], [], [], [], [0.0], [], []], [[], [], [0.0], [], [0.0], [], [], [], [0.0], []], [[0.0], [], [], [0.0], [], [], [], [], [], [0.0]], [[0.0], [], [], [], [], [], [], [0.0], [0.0], []], [[], [0.0], [], [], [], [], [], [], [0.0], [0.0]], [[], [], [0.0], [], [], [0.0], [], [], [], [0.0]], [[], [], [], [0.0], [], [0.0], [0.0], [], [], []], [[], [], [], [], [0.0], [], [0.0], [0.0], [], []]] assert adjMat == ref_adjMat and adjMatw == ref_adjMatw def test_adjmat_to_graph(petersen): GP,_ = petersen adjMat = [ [0, 1, 0, 0, 1, 1, 0, 0, 0, 0], [1, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 1, 0, 1, 0, 0, 0, 1, 0, 0], [0, 0, 1, 0, 1, 0, 0, 0, 1, 0], [1, 0, 0, 1, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 1, 1], [0, 0, 1, 0, 0, 1, 0, 0, 0, 1], [0, 0, 0, 1, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 1, 1, 0, 0]] vxdatalist = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] g = go.adjmat_to_graph(adjMat, vxdatalist=vxdatalist, directed=False) assert _compare(GP, g) def test_adjmat_to_graph_adjMatw(petersen): GP,_ = petersen adjMat = [ [[], [0.0], [], [], [0.0], [0.0], [], [], [], []], [[0.0], [], [0.0], [], [], [], [0.0], [], [], []], [[], [0.0], [], [0.0], [], [], [], [0.0], [], []], [[], [], [0.0], [], [0.0], [], [], [], [0.0], []], [[0.0], [], [], [0.0], [], [], [], [], [], [0.0]], [[0.0], [], [], [], [], [], [], [0.0], [0.0], []], [[], [0.0], [], [], [], [], [], [], [0.0], [0.0]], [[], [], [0.0], [], [], [0.0], [], [], [], [0.0]], [[], [], [], [0.0], [], [0.0], [0.0], [], [], []], [[], [], [], [], [0.0], [], [0.0], [0.0], [], []]] vxdatalist = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] g = go.adjmat_to_graph(adjMat, vxdatalist=vxdatalist, directed=False) assert _compare(GP, g) def test_edges_to_graph(petersen): GP,_ = petersen edges = {('a', 'b', 0.0), ('a', 'e', 0.0), ('a', 'f', 0.0), ('b', 'a', 0.0), ('b', 'c', 0.0), ('b', 'g', 0.0), ('c', 'b', 0.0), ('c', 'd', 0.0), ('c', 'h', 0.0), ('d', 'c', 0.0), ('d', 'e', 0.0), ('d', 'i', 0.0), ('e', 'a', 0.0), ('e', 'd', 0.0), ('e', 'j', 0.0), ('f', 'a', 0.0), ('f', 'h', 0.0), ('f', 'i', 0.0), ('g', 'b', 0.0), ('g', 'i', 0.0), ('g', 'j', 0.0), ('h', 'c', 0.0), ('h', 'f', 0.0), ('h', 'j', 0.0), ('i', 'd', 0.0), ('i', 'f', 0.0), ('i', 'g', 0.0), ('j', 'e', 0.0), ('j', 'g', 0.0), ('j', 'h', 0.0)} g = go.edges_to_graph(edges, directed=False) assert _compare(GP, g) def test_file_to_graph(petersen): GP,_ = petersen fin = "../data/petersen.dat" g = go.file_to_graph(fin, directed=False) assert _compare(GP, g) def test_remap_vertex_data(petersen): GP,_ = petersen vxdata = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] go.remap_vertex_data(GP, vxdata) graph_str = GP.__str__() ref_str = ( "'0': [[1, [0.0]], [5, [0.0]], [4, [0.0]]]\n" "'1': [[0, [0.0]], [2, [0.0]], [6, [0.0]]]\n" "'2': [[1, [0.0]], [3, [0.0]], [7, [0.0]]]\n" "'3': [[2, [0.0]], [4, [0.0]], [8, [0.0]]]\n" "'4': [[0, [0.0]], [3, [0.0]], [9, [0.0]]]\n" "'5': [[0, [0.0]], [7, [0.0]], [8, [0.0]]]\n" "'6': [[1, [0.0]], [8, [0.0]], [9, [0.0]]]\n" "'7': [[2, [0.0]], [5, [0.0]], [9, [0.0]]]\n" "'8': [[3, [0.0]], [5, [0.0]], [6, [0.0]]]\n" "'9': [[4, [0.0]], [6, [0.0]], [7, [0.0]]]\n") assert graph_str == ref_str def test_adjmatType(): A = [[0, 1, 1, 1], [1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]] B = [[[], [0.0], [0.0], [0.0]], [[0,0], [], [], [] ], [[0.0], [], [], [] ], [[0.0], [], [], [] ]] typeofA = go._adjmatType(A) typeofB = go._adjmatType(B) assert typeofA == int and typeofB == list def test_dfs_in_adjmat(): adjMat = [ [0, 1, 0, 0, 1, 1, 0, 0, 0, 0], [1, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 1, 0, 1, 0, 0, 0, 1, 0, 0], [0, 0, 1, 0, 1, 0, 0, 0, 1, 0], [1, 0, 0, 1, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 1, 1], [0, 0, 1, 0, 0, 1, 0, 0, 0, 1], [0, 0, 0, 1, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 1, 1, 0, 0]] # the following corresponds to ['a', 'b', 'c', 'd', 'e', 'j', 'g', 'i', 'f', 'h'] ref_dfs = [0, 1, 2, 3, 4, 9, 6, 8, 5, 7] nvx = len(adjMat) visited = [False for i in range(nvx)] start = 0 dfs = [start] go.dfs_in_adjmat(adjMat, start, visited, dfs) assert dfs == ref_dfs def test_dfs_in_adjmatw(): adjMat = [ [[], [0.0], [], [], [0.0], [0.0], [], [], [], []], [[0.0], [], [0.0], [], [], [], [0.0], [], [], []], [[], [0.0], [], [0.0], [], [], [], [0.0], [], []], [[], [], [0.0], [], [0.0], [], [], [], [0.0], []], [[0.0], [], [], [0.0], [], [], [], [], [], [0.0]], [[0.0], [], [], [], [], [], [], [0.0], [0.0], []], [[], [0.0], [], [], [], [], [], [], [0.0], [0.0]], [[], [], [0.0], [], [], [0.0], [], [], [], [0.0]], [[], [], [], [0.0], [], [0.0], [0.0], [], [], []], [[], [], [], [], [0.0], [], [0.0], [0.0], [], []]] # the following corresponds to ['a', 'b', 'c', 'd', 'e', 'j', 'g', 'i', 'f', 'h'] ref_dfs = [0, 1, 2, 3, 4, 9, 6, 8, 5, 7] nvx = len(adjMat) visited = [False for i in range(nvx)] start = 0 dfs = [start] go.dfs_in_adjmatw(adjMat, start, visited, dfs) assert dfs == ref_dfs def test_bfs_in_adjmat(): adjMat = [ [0, 1, 0, 0, 1, 1, 0, 0, 0, 0], [1, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 1, 0, 1, 0, 0, 0, 1, 0, 0], [0, 0, 1, 0, 1, 0, 0, 0, 1, 0], [1, 0, 0, 1, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 1, 1, 0], [0, 1, 0, 0, 0, 0, 0, 0, 1, 1], [0, 0, 1, 0, 0, 1, 0, 0, 0, 1], [0, 0, 0, 1, 0, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 1, 1, 0, 0]] # the following corresponds to ['a', 'b', 'e', 'f', 'c', 'g', 'd', 'j', 'h', 'i'] ref_bfs = [0, 1, 4, 5, 2, 6, 3, 9, 7, 8] nvx = len(adjMat) visited = [False for i in range(nvx)] start = 0 bfs = go.bfs_in_adjmat(adjMat, start, visited) assert bfs == ref_bfs def test_bfs_in_adjmatw(): adjMat = [ [[], [0.0], [], [], [0.0], [0.0], [], [], [], []], [[0.0], [], [0.0], [], [], [], [0.0], [], [], []], [[], [0.0], [], [0.0], [], [], [], [0.0], [], []], [[], [], [0.0], [], [0.0], [], [], [], [0.0], []], [[0.0], [], [], [0.0], [], [], [], [], [], [0.0]], [[0.0], [], [], [], [], [], [], [0.0], [0.0], []], [[], [0.0], [], [], [], [], [], [], [0.0], [0.0]], [[], [], [0.0], [], [], [0.0], [], [], [], [0.0]], [[], [], [], [0.0], [], [0.0], [0.0], [], [], []], [[], [], [], [], [0.0], [], [0.0], [0.0], [], []]] # the following corresponds to ['a', 'b', 'e', 'f', 'c', 'g', 'd', 'j', 'h', 'i'] ref_bfs = [0, 1, 4, 5, 2, 6, 3, 9, 7, 8] nvx = len(adjMat) visited = [False for i in range(nvx)] start = 0 bfs = go.bfs_in_adjmatw(adjMat, start, visited) assert bfs == ref_bfs def test_connected_components(): adjMat = [[0, 1, 1, 1, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 1, 0, 1], [0, 0, 0, 0, 1, 1, 0]] adjMatw = [[[], [0.0], [0.0], [0.0], [], [], []], [[0,0], [], [], [], [], [], []], [[0.0], [], [], [], [], [], []], [[0.0], [], [], [], [], [], []], [[], [], [], [], [], [0.0], [0.0]], [[], [], [], [], [0.0], [], [0.0]], [[], [], [], [], [0.0], [0.0], []]] components_adjmat = go.connected_components(adjMat) components_adjmatw = go.connected_components(adjMatw) ref = [[0, 1, 2, 3], [4, 5, 6]] assert components_adjmat == components_adjmatw == ref def test_Dijkstra(generic_graph): g,_ = generic_graph ref_spt = ['B', 'A', 'C'] ref_dist = 4.0 spt, dist = go.Dijkstra(g, 'B', 'C') assert spt == ref_spt and dist == ref_dist def test_Dijkstra_shortest_path(generic_graph): g,_ = generic_graph ref_sp = [['A', 'A', 0], ['A', 'B', 3.0], ['A', 'C', 1.0], ['A', 'D', 2.0], ['A', 'E', 4.0]] sp = go.Dijkstra_shortest_path(g, 'A') assert sp == ref_sp def test_Bellman_Ford(generic_graph): g,_ = generic_graph ref_spt = ['B', 'A', 'C'] ref_dist = 4.0 spt, dist = go.Bellman_Ford(g, 'B', 'C') assert spt == ref_spt and dist == ref_dist def test_BF_shortest_path(generic_graph): g,_ = generic_graph ref_sp = [['A', 'A', 0], ['A', 'B', 3.0], ['A', 'C', 1.0], ['A', 'D', 2.0], ['A', 'E', 4.0]] sp = go.BF_shortest_path(g, 'A') assert sp == ref_sp
35.3309
96
0.416638
2,419
14,521
2.42869
0.054155
0.216851
0.234894
0.24783
0.624
0.577021
0.509277
0.44834
0.40034
0.386723
0
0.110185
0.264996
14,521
410
97
35.417073
0.44027
0.03884
0
0.442424
0
0.030303
0.041424
0
0
0
0
0
0.048485
1
0.057576
false
0
0.012121
0
0.078788
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4bab4534fe5356ac7ebd1d65999fcf5764b4df45
3,722
py
Python
relancer-exp/original_notebooks/nsrose7224_crowdedness-at-the-campus-gym/forecasting-with-facebook-prophet.py
Chenguang-Zhu/relancer
bf1a175b77b7da4cff12fbc5de17dd55246d264d
[ "Apache-2.0" ]
1
2022-03-05T22:27:49.000Z
2022-03-05T22:27:49.000Z
relancer-exp/original_notebooks/nsrose7224_crowdedness-at-the-campus-gym/forecasting-with-facebook-prophet.py
Chenguang-Zhu/relancer
bf1a175b77b7da4cff12fbc5de17dd55246d264d
[ "Apache-2.0" ]
null
null
null
relancer-exp/original_notebooks/nsrose7224_crowdedness-at-the-campus-gym/forecasting-with-facebook-prophet.py
Chenguang-Zhu/relancer
bf1a175b77b7da4cff12fbc5de17dd55246d264d
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 # # I have decided to interpretate the results using Facebook recently realeased Prophet, for TimeSeries forecast # In[ ]: import numpy as np # linear algebra import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv) import matplotlib.pyplot as plt import seaborn as sns print() df = pd.read_csv("../../../input/nsrose7224_crowdedness-at-the-campus-gym/data.csv") # In[ ]: from fbprophet import Prophet # In[ ]: df.head() # In[ ]: df.drop('timestamp', axis = 1, inplace = True) # In[ ]: df.head() # In[ ]: df['date'] = pd.to_datetime(df['date']) # In[ ]: df.info() # ### Let's gonna group every hour , just to see what comes up # In[ ]: df = pd.DataFrame(df.set_index('date').groupby(pd.TimeGrouper('1H')).sum()) # In[ ]: df.head() # ### We have to check the number of 0s, because of the log problem # In[ ]: (df['number_people'] < 1).count() # ## Ok, we have "13971 hours " when there was nobody at the gym . Maybe we can overcome this placing the inverse of log ? We'll see it. # In[ ]: #calcula o lucro da hora df['Total'] = (df['number_people']) # In[ ]: df = df[['number_people']] # In[ ]: df.head() # In[ ]: df['ds'] = df.index df.head() # In[ ]: df = df.rename(columns={'number_people': 'y'}) # In[ ]: df['y'] = np.exp(df['y']) # # Using Facebook Prophet # ## Facebook Prophet is a tool for producing high quality forecasts for time series data that has multiple seasonality with linear or non-linear growth. # # In[ ]: from fbprophet import Prophet m = Prophet(changepoint_prior_scale=0.001, mcmc_samples=500) m.fit(df); # In[ ]: future = m.make_future_dataframe(periods=12000, freq='H') future.tail() # In[ ]: forecast = m.predict(future) forecast[['ds', 'yhat', 'yhat_lower', 'yhat_upper']].tail() # In[ ]: import matplotlib.pyplot as plt print() m.plot(forecast); # ### Finaly, we can see the best months, weeks and weekdays to attend the gym : # In[ ]: m.plot_components(forecast); # In[ ]: # In[ ]: # In[ ]: import pandas as pd import numpy as np # linear algebra import matplotlib.pyplot as plt import seaborn as sns print() df = pd.read_csv("../../../input/nsrose7224_crowdedness-at-the-campus-gym/data.csv") # [My code is on my github actually , as Kaggle does not accept some of the newest libraries I am using (Facebook Prophet) ][1] # # # [1]: https://github.com/andersonamaral/my_machine_learning_studies/blob/master/Crowdness_At_Gym_With_Prophet_Forecast.ipynb # In[ ]: ## My code is on my github actually , as Kaggle does not accept some of the newest libraries I am using (Facebook Prophet) : #https://github.com/andersonamaral/my_machine_learning_studies/blob/master/Crowdness_At_Gym_With_Prophet_Forecast.ipynb # In[ ]: df.head() # In[ ]: df.drop('timestamp', axis = 1, inplace = True) # In[ ]: df['date'] = pd.to_datetime(df['date']) # df = pd.DataFrame(df.set_index('date').groupby(pd.TimeGrouper('1H')).sum()) # In[ ]: df = pd.DataFrame(df.set_index('date').groupby(pd.TimeGrouper('1H')).sum()) # In[ ]: df['Total'] = (df['number_people']) # In[ ]: df = df[['number_people']] df.head() # In[ ]: df['ds'] = df.index df.head() # In[ ]: df = df.rename(columns={'number_people': 'y'}) # In[ ]: df['y'] = np.exp(df['y']) # In[ ]: from fbprophet import Prophet m = Prophet(changepoint_prior_scale=0.001, mcmc_samples=500) m.fit(df); # In[ ]: future = m.make_future_dataframe(periods=12, freq='M') future.tail() # In[ ]: forecast = m.predict(future) forecast[['ds', 'yhat', 'yhat_lower', 'yhat_upper']].tail() # In[ ]: import matplotlib.pyplot as plt print() m.plot(forecast);
13.245552
154
0.648307
559
3,722
4.230769
0.336315
0.037209
0.023679
0.029598
0.714588
0.700211
0.700211
0.673996
0.6537
0.6537
0
0.014388
0.178399
3,722
280
155
13.292857
0.758993
0.427727
0
0.916667
0
0
0.165044
0.062317
0
0
0
0
0
1
0
false
0
0.216667
0
0.216667
0.066667
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
2996de1b871cae10380a923294963a1434138a3e
235
py
Python
commands/joke.py
IdiotechID/Happy-Bot-V2.0
bf3f2bc56fe9f8eaf127e614a3a3974697cc94a3
[ "MIT" ]
null
null
null
commands/joke.py
IdiotechID/Happy-Bot-V2.0
bf3f2bc56fe9f8eaf127e614a3a3974697cc94a3
[ "MIT" ]
null
null
null
commands/joke.py
IdiotechID/Happy-Bot-V2.0
bf3f2bc56fe9f8eaf127e614a3a3974697cc94a3
[ "MIT" ]
null
null
null
import urllib.request import json import html def getJoke(): with urllib.request.urlopen("http://api.icndb.com/jokes/random") as response: return html.unescape(json.loads(response.read().decode("utf-8"))["value"]["joke"])
29.375
90
0.710638
33
235
5.060606
0.787879
0.155689
0
0
0
0
0
0
0
0
0
0.004808
0.114894
235
7
91
33.571429
0.798077
0
0
0
0
0
0.2
0
0
0
0
0
0
1
0.166667
true
0
0.5
0
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
29a60f72ab6aa7cc22e2c0df2ce65c0500ea7951
1,986
py
Python
test/model/acompanhanteatendimento.py
robertons/dbmodel
628c4d6a2d0b23d9137015e5b0ea4551b20943bd
[ "MIT" ]
null
null
null
test/model/acompanhanteatendimento.py
robertons/dbmodel
628c4d6a2d0b23d9137015e5b0ea4551b20943bd
[ "MIT" ]
null
null
null
test/model/acompanhanteatendimento.py
robertons/dbmodel
628c4d6a2d0b23d9137015e5b0ea4551b20943bd
[ "MIT" ]
null
null
null
#-*- coding: utf-8 -*- from dbmodel.entity import * class AcompanhanteAtendimento(Entity): __primary_key__ = ['id'] # FIELDS @Int(pk=True, auto_increment=True, not_null=True, precision = 10, scale=0) def id(self): pass @Decimal(precision = 19, scale=2) def atd_valor(self): pass @Int(precision = 3, scale=0) def atd_massagem(self): pass @Int(precision = 3, scale=0) def atd_vaginal(self): pass @Int(precision = 3, scale=0) def atd_oral(self): pass @Int(precision = 3, scale=0) def atd_anal(self): pass @Int(precision = 3, scale=0) def atd_grupal(self): pass @Int(precision = 3, scale=0) def atd_beijo_na_boca(self): pass @Int(precision = 3, scale=0) def atd_fetiches(self): pass @Int(precision = 3, scale=0) def atd_dominacao(self): pass @Int(precision = 3, scale=0) def atd_inversao_de_papeis(self): pass @Int(precision = 3, scale=0) def atd_beijo_grego(self): pass @Int(precision = 3, scale=0) def atd_striptease(self): pass @Int(precision = 3, scale=0) def atd_chuva_dourada(self): pass @Int(precision = 3, scale=0) def atd_chuva_negra(self): pass @Int(precision = 3, scale=0) def atd_bdsm(self): pass @Int(precision = 3, scale=0) def atd_festas_eventos(self): pass @Int(precision = 3, scale=0) def atd_local_atendimento(self): pass @Int(fk=True, not_null=True, precision = 10, scale=0) def id_acompanhante(self): pass @Int(fk=True, not_null=True, precision = 10, scale=0) def id_tipo_atendimento(self): pass # One-to-One @Object(name="AcompanhanteAtendimentoTipo", key="id", reference="id_tipo_atendimento", table="acompanhantes_atendimentos_tipos") def acompanhantes_atendimentos_tipos(self):pass @Object(name="Acompanhante", key="id", reference="id_acompanhante", table="acompanhantes") def acompanhantes(self):pass # One-to-many @ObjectList(name="AcompanhanteNotificacao", key="id_atendimento_acompanhante", reference="id", table="acompanhantes_notificacoes") def acompanhantes_notificacoes(self):pass
24.518519
131
0.726083
297
1,986
4.690236
0.245791
0.132089
0.122757
0.22972
0.491744
0.491744
0.491744
0.491744
0.491744
0.207466
0
0.026087
0.13142
1,986
81
132
24.518519
0.781449
0.02568
0
0.367347
0
0
0.104555
0.069876
0
0
0
0
0
1
0.469388
false
0.469388
0.020408
0
0.530612
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
4b10ea5b30389c47b37b0f446e2e7ab6ac4d444a
215
py
Python
workflow/json.py
Aiwizo/ml-workflow
88e104fce571dd3b76914626a52f9001342c07cc
[ "Apache-2.0" ]
4
2020-09-23T15:39:24.000Z
2021-09-12T22:11:00.000Z
workflow/json.py
Aiwizo/ml-workflow
88e104fce571dd3b76914626a52f9001342c07cc
[ "Apache-2.0" ]
4
2020-09-23T15:07:39.000Z
2020-10-30T10:26:24.000Z
workflow/json.py
Aiwizo/ml-workflow
88e104fce571dd3b76914626a52f9001342c07cc
[ "Apache-2.0" ]
null
null
null
import json def read(filepath): with open(filepath, 'r') as file: return json.load(file) def write(obj, filepath): with open(filepath, 'w') as file: return json.dump(obj, file, indent=4)
17.916667
45
0.632558
32
215
4.25
0.5625
0.176471
0.235294
0.352941
0
0
0
0
0
0
0
0.006098
0.237209
215
11
46
19.545455
0.823171
0
0
0
0
0
0.009302
0
0
0
0
0
0
1
0.285714
false
0
0.142857
0
0.714286
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
4b1832846836f9cae0b29a7ff3ace05d8e5d7c50
109
py
Python
PythonCodeBase/client/main.py
FreeMotion45/python-rpc-generator
fd68ebc9c521dee572fe3501929d3001d2af8d8f
[ "MIT" ]
2
2021-06-05T15:10:09.000Z
2021-06-05T15:10:15.000Z
PythonCodeBase/client/main.py
FreeMotion45/python-rpc-generator
fd68ebc9c521dee572fe3501929d3001d2af8d8f
[ "MIT" ]
null
null
null
PythonCodeBase/client/main.py
FreeMotion45/python-rpc-generator
fd68ebc9c521dee572fe3501929d3001d2af8d8f
[ "MIT" ]
null
null
null
from client.functions.echo import EchoCommand server_echo = EchoCommand("Hey").execute() print(server_echo)
21.8
45
0.807339
14
109
6.142857
0.714286
0.232558
0
0
0
0
0
0
0
0
0
0
0.082569
109
4
46
27.25
0.86
0
0
0
0
0
0.027523
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
d9f2ff0aeca975dd1e71c83436607932dcf8a1a6
2,346
py
Python
iriusrisk-python-client-lib/test/test_threats_api.py
iriusrisk/iriusrisk-python-client-lib
4912706cd1e5c0bc555dbc7da02fb64cbeab3b18
[ "Apache-2.0" ]
null
null
null
iriusrisk-python-client-lib/test/test_threats_api.py
iriusrisk/iriusrisk-python-client-lib
4912706cd1e5c0bc555dbc7da02fb64cbeab3b18
[ "Apache-2.0" ]
null
null
null
iriusrisk-python-client-lib/test/test_threats_api.py
iriusrisk/iriusrisk-python-client-lib
4912706cd1e5c0bc555dbc7da02fb64cbeab3b18
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 """ IriusRisk API Products API # noqa: E501 OpenAPI spec version: 1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import iriusrisk_python_client_lib from iriusrisk_python_client_lib.api.threats_api import ThreatsApi # noqa: E501 from iriusrisk_python_client_lib.rest import ApiException class TestThreatsApi(unittest.TestCase): """ThreatsApi unit test stubs""" def setUp(self): self.api = iriusrisk_python_client_lib.api.threats_api.ThreatsApi() # noqa: E501 def tearDown(self): pass def test_libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_post(self): """Test case for libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_post Creates a new threat in a library. # noqa: E501 """ pass def test_libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_threat_ref_countermeasures_put(self): """Test case for libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_threat_ref_countermeasures_put Associates a countermeasure to a threat in a risk pattern. # noqa: E501 """ pass def test_libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_threat_ref_weaknesses_put(self): """Test case for libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_threat_ref_weaknesses_put Associates weakness to a threat in a risk pattern. # noqa: E501 """ pass def test_libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_threat_ref_weaknesses_weakness_ref_countermeasures_put(self): """Test case for libraries_library_ref_riskpatterns_risk_pattern_ref_usecases_use_case_ref_threats_threat_ref_weaknesses_weakness_ref_countermeasures_put Associates a countermeasure to a weakness in a risk pattern. # noqa: E501 """ pass def test_products_ref_threats_get(self): """Test case for products_ref_threats_get Gets a list of all threats of a product # noqa: E501 """ pass if __name__ == '__main__': unittest.main()
33.514286
161
0.768542
313
2,346
5.255591
0.239617
0.073556
0.092401
0.15076
0.685714
0.666261
0.666261
0.621277
0.589058
0.568997
0
0.013528
0.180733
2,346
69
162
34
0.842352
0.450554
0
0.272727
1
0
0.006993
0
0
0
0
0
0
1
0.318182
false
0.272727
0.227273
0
0.590909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
8a13d47cd8fc874fd23e2412cb5c152dc3776626
328
py
Python
src/exporter.py
tjdevries/sam.py
d71befe934c6a1f584b344368a3014e6552f0289
[ "MIT" ]
1
2022-03-16T13:48:44.000Z
2022-03-16T13:48:44.000Z
src/exporter.py
tjdevries/sam.py
d71befe934c6a1f584b344368a3014e6552f0289
[ "MIT" ]
null
null
null
src/exporter.py
tjdevries/sam.py
d71befe934c6a1f584b344368a3014e6552f0289
[ "MIT" ]
null
null
null
""" This file is just for exporting stuffs :) """ def my_decorator(f): return f @my_decorator def exported_func(abc: str, b: str = ", World!") -> str: """This is the docstring from an exported function :param abc: Describing cool parameter `abc` """ return abc + b class ExportedClass: pass
15.619048
56
0.631098
44
328
4.636364
0.681818
0.107843
0
0
0
0
0
0
0
0
0
0
0.256098
328
20
57
16.4
0.836066
0.408537
0
0
0
0
0.04878
0
0
0
0
0
0
1
0.285714
false
0.142857
0
0.142857
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4