hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
138ad53bc75698fb0a04af0266ae508da388a981
| 6,057
|
py
|
Python
|
nevergrad/parametrization/utils.py
|
mehrdad-shokri/nevergrad
|
7b68b00c158bf60544bc45997560edf733fb5812
|
[
"MIT"
] | 2
|
2021-04-13T12:14:46.000Z
|
2021-07-07T14:37:50.000Z
|
nevergrad/parametrization/utils.py
|
OregonWebSells/nevergrad
|
c2b2a0efdca29830ccc9182d8a7ba4d8695f698d
|
[
"MIT"
] | 1
|
2020-09-25T10:45:06.000Z
|
2020-09-25T11:51:13.000Z
|
nevergrad/parametrization/utils.py
|
OregonWebSells/nevergrad
|
c2b2a0efdca29830ccc9182d8a7ba4d8695f698d
|
[
"MIT"
] | 1
|
2021-04-07T10:34:20.000Z
|
2021-04-07T10:34:20.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import sys
import shutil
import tempfile
import subprocess
import typing as tp
from pathlib import Path
from nevergrad.common import tools as ngtools
class Descriptors:
"""Provides access to a set of descriptors for the parametrization
This can be used within optimizers.
""" # TODO add repr
# pylint: disable=too-many-arguments
def __init__(
self,
deterministic: bool = True,
deterministic_function: bool = True,
monoobjective: bool = True,
not_manyobjective: bool = True,
continuous: bool = True,
metrizable: bool = True,
ordered: bool = True,
) -> None:
self.deterministic = deterministic
self.deterministic_function = deterministic_function
self.continuous = continuous
self.metrizable = metrizable
self.ordered = ordered
self.monoobjective = monoobjective
self.not_manyobjective = not_manyobjective
def __and__(self, other: "Descriptors") -> "Descriptors":
values = {field: getattr(self, field) & getattr(other, field) for field in self.__dict__}
return Descriptors(**values)
def __repr__(self) -> str:
diff = ",".join(f"{x}={y}" for x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items()))
return f"{self.__class__.__name__}({diff})"
class NotSupportedError(RuntimeError):
"""This type of operation is not supported by the parameter.
"""
class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore
"""Creates a full copy of a directory inside a temporary directory
This class can be used as TemporaryDirectory but:
- the created copy path is available through the copyname attribute
- the contextmanager returns the clean copy path
- the directory where the temporary directory will be created
can be controlled through the CLEAN_COPY_DIRECTORY environment
variable
"""
key = "CLEAN_COPY_DIRECTORY"
@classmethod
def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) -> None:
"""Sets the CLEAN_COPY_DIRECTORY environment variable in
order for subsequent calls to use this directory as base for the
copies.
"""
assert Path(directory).exists(), "Directory does not exist"
os.environ[cls.key] = str(directory)
# pylint: disable=redefined-builtin
def __init__(self, source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]] = None) -> None:
if dir is None:
dir = os.environ.get(self.key, None)
super().__init__(prefix="tmp_clean_copy_", dir=dir)
self.copyname = Path(self.name) / Path(source).name
shutil.copytree(str(source), str(self.copyname))
def __enter__(self) -> Path:
super().__enter__()
return self.copyname
class FailedJobError(RuntimeError):
"""Job failed during processing
"""
class CommandFunction:
"""Wraps a command as a function in order to make sure it goes through the
pipeline and notify when it is finished.
The output is a string containing everything that has been sent to stdout
Parameters
----------
command: list
command to run, as a list
verbose: bool
prints the command and stdout at runtime
cwd: Path/str
path to the location where the command must run from
Returns
-------
str
Everything that has been sent to stdout
"""
def __init__(self, command: tp.List[str], verbose: bool = False, cwd: tp.Optional[tp.Union[str, Path]] = None,
env: tp.Optional[tp.Dict[str, str]] = None) -> None:
if not isinstance(command, list):
raise TypeError("The command must be provided as a list")
self.command = command
self.verbose = verbose
self.cwd = None if cwd is None else str(cwd)
self.env = env
def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> str:
"""Call the cammand line with addidional arguments
The keyword arguments will be sent as --{key}={val}
The logs are bufferized. They will be printed if the job fails, or sent as output of the function
Errors are provided with the internal stderr
"""
# TODO make the following command more robust (probably fails in multiple cases)
full_command = self.command + [str(x) for x in args] + ["--{}={}".format(x, y) for x, y in kwargs.items()]
if self.verbose:
print(f"The following command is sent: {full_command}")
outlines: tp.List[str] = []
with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=False, cwd=self.cwd, env=self.env) as process:
try:
assert process.stdout is not None
for line in iter(process.stdout.readline, b''):
if not line:
break
outlines.append(line.decode().strip())
if self.verbose:
print(outlines[-1], flush=True)
except Exception: # pylint: disable=broad-except
process.kill()
process.wait()
raise FailedJobError("Job got killed for an unknown reason.")
stderr = process.communicate()[1] # we already got stdout
stdout = "\n".join(outlines)
retcode = process.poll()
if stderr and (retcode or self.verbose):
print(stderr.decode(), file=sys.stderr)
if retcode:
subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr)
raise FailedJobError(stderr.decode()) from subprocess_error
return stdout
| 38.826923
| 133
| 0.639591
| 744
| 6,057
| 5.106183
| 0.34543
| 0.014741
| 0.008687
| 0.011056
| 0.052645
| 0.043169
| 0.017373
| 0
| 0
| 0
| 0
| 0.000453
| 0.270596
| 6,057
| 155
| 134
| 39.077419
| 0.859439
| 0.297177
| 0
| 0.023256
| 0
| 0
| 0.061914
| 0.00814
| 0
| 0
| 0
| 0.012903
| 0.023256
| 1
| 0.093023
| false
| 0
| 0.093023
| 0
| 0.302326
| 0.034884
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
138b01aa9774bbead45a8dac1264c5149cf9f912
| 568
|
py
|
Python
|
Section 20/2.Document-transfer_files.py
|
airbornum/-Complete-Python-Scripting-for-Automation
|
bc053444f8786259086269ca1713bdb10144dd74
|
[
"MIT"
] | 18
|
2020-04-13T03:14:06.000Z
|
2022-03-09T18:54:41.000Z
|
Section 20/2.Document-transfer_files.py
|
airbornum/-Complete-Python-Scripting-for-Automation
|
bc053444f8786259086269ca1713bdb10144dd74
|
[
"MIT"
] | null | null | null |
Section 20/2.Document-transfer_files.py
|
airbornum/-Complete-Python-Scripting-for-Automation
|
bc053444f8786259086269ca1713bdb10144dd74
|
[
"MIT"
] | 22
|
2020-04-29T21:12:42.000Z
|
2022-03-17T18:19:54.000Z
|
import paramiko
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname='54.165.97.91',username='ec2-user',password='paramiko123',port=22)
sftp_client=ssh.open_sftp()
#sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt')
#sftp_client.chdir("/home/ec2-user")
#print(sftp_client.getcwd())
#sftp_client.get('demo.txt','C:\\Users\\Automation\\Desktop\\download_file.txt')
sftp_client.put("transfer_files.py",'/home/ec2-user/transfer_files.py')
sftp_client.close()
ssh.close()
| 43.692308
| 88
| 0.769366
| 84
| 568
| 4.988095
| 0.535714
| 0.167064
| 0.078759
| 0.081146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033272
| 0.047535
| 568
| 13
| 89
| 43.692308
| 0.74122
| 0.399648
| 0
| 0
| 0
| 0
| 0.245399
| 0.09816
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
139af14f3890b6a5fdebd9bc833f815258ac26c3
| 1,433
|
py
|
Python
|
tests/adv/test_pop_sfrd.py
|
jlashner/ares
|
6df2b676ded6bd59082a531641cb1dadd475c8a8
|
[
"MIT"
] | 10
|
2020-03-26T01:08:10.000Z
|
2021-12-04T13:02:10.000Z
|
tests/adv/test_pop_sfrd.py
|
jlashner/ares
|
6df2b676ded6bd59082a531641cb1dadd475c8a8
|
[
"MIT"
] | 25
|
2020-06-08T14:52:28.000Z
|
2022-03-08T02:30:54.000Z
|
tests/adv/test_pop_sfrd.py
|
jlashner/ares
|
6df2b676ded6bd59082a531641cb1dadd475c8a8
|
[
"MIT"
] | 8
|
2020-03-24T14:11:25.000Z
|
2021-11-06T06:32:59.000Z
|
"""
test_pop_models.py
Author: Jordan Mirocha
Affiliation: UCLA
Created on: Fri Jul 15 15:23:11 PDT 2016
Description:
"""
import ares
import matplotlib.pyplot as pl
PB = ares.util.ParameterBundle
def test():
# Create a simple population
pars_1 = PB('pop:fcoll') + PB('sed:bpass')
pop_fcoll = ares.populations.GalaxyPopulation(**pars_1)
#pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1)
# Mimic the above population to check our different SFRD/SED techniques
sfrd_pars = {'pop_sfr_model': 'sfrd-func'}
sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD
sfrd_pars['pop_sfrd_units'] = 'internal'
sed = PB('sed:toy')
sed['pop_Nion'] = pop_fcoll.src.Nion
sed['pop_Nlw'] = pop_fcoll.src.Nlw
# pop_Ex?
sed['pop_ion_src_igm'] = False
sed['pop_heat_src_igm'] = False
pars_2 = sed + sfrd_pars
pop_sfrd = ares.populations.GalaxyPopulation(**pars_2)
assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), "Error in SFRD."
# Check the emissivities too
#print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6))
#print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6))
#assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \
# == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \
# "Error in photon luminosity density."
if __name__ == '__main__':
test()
| 25.589286
| 75
| 0.669923
| 203
| 1,433
| 4.502463
| 0.413793
| 0.078775
| 0.04814
| 0.135667
| 0.282276
| 0.203501
| 0.203501
| 0.203501
| 0.203501
| 0.203501
| 0
| 0.046007
| 0.196092
| 1,433
| 55
| 76
| 26.054545
| 0.747396
| 0.432659
| 0
| 0
| 0
| 0
| 0.183081
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0.052632
| false
| 0.052632
| 0.105263
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
139b92054f917712ecbfacdc663b9fc7eea6103f
| 6,010
|
py
|
Python
|
core/self6dpp/tools/ycbv/ycbv_pbr_so_mlBCE_Double_3_merge_train_real_uw_init_results_with_refined_poses_to_json.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 33
|
2021-12-15T07:11:47.000Z
|
2022-03-29T08:58:32.000Z
|
core/self6dpp/tools/ycbv/ycbv_pbr_so_mlBCE_Double_3_merge_train_real_uw_init_results_with_refined_poses_to_json.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 3
|
2021-12-15T11:39:54.000Z
|
2022-03-29T07:24:23.000Z
|
core/self6dpp/tools/ycbv/ycbv_pbr_so_mlBCE_Double_3_merge_train_real_uw_init_results_with_refined_poses_to_json.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | null | null | null |
import os.path as osp
import sys
import numpy as np
import mmcv
from tqdm import tqdm
from functools import cmp_to_key
cur_dir = osp.dirname(osp.abspath(__file__))
PROJ_ROOT = osp.normpath(osp.join(cur_dir, "../../../../"))
sys.path.insert(0, PROJ_ROOT)
from lib.pysixd import inout, misc
from lib.utils.bbox_utils import xyxy_to_xywh
from lib.utils.utils import iprint, wprint
id2obj = {
1: "002_master_chef_can", # [1.3360, -0.5000, 3.5105]
2: "003_cracker_box", # [0.5575, 1.7005, 4.8050]
3: "004_sugar_box", # [-0.9520, 1.4670, 4.3645]
4: "005_tomato_soup_can", # [-0.0240, -1.5270, 8.4035]
5: "006_mustard_bottle", # [1.2995, 2.4870, -11.8290]
6: "007_tuna_fish_can", # [-0.1565, 0.1150, 4.2625]
7: "008_pudding_box", # [1.1645, -4.2015, 3.1190]
8: "009_gelatin_box", # [1.4460, -0.5915, 3.6085]
9: "010_potted_meat_can", # [2.4195, 0.3075, 8.0715]
10: "011_banana", # [-18.6730, 12.1915, -1.4635]
11: "019_pitcher_base", # [5.3370, 5.8855, 25.6115]
12: "021_bleach_cleanser", # [4.9290, -2.4800, -13.2920]
13: "024_bowl", # [-0.2270, 0.7950, -2.9675]
14: "025_mug", # [-8.4675, -0.6995, -1.6145]
15: "035_power_drill", # [9.0710, 20.9360, -2.1190]
16: "036_wood_block", # [1.4265, -2.5305, 17.1890]
17: "037_scissors", # [7.0535, -28.1320, 0.0420]
18: "040_large_marker", # [0.0460, -2.1040, 0.3500]
19: "051_large_clamp", # [10.5180, -1.9640, -0.4745]
20: "052_extra_large_clamp", # [-0.3950, -10.4130, 0.1620]
21: "061_foam_brick", # [-0.0805, 0.0805, -8.2435]
}
obj_num = len(id2obj)
obj2id = {_name: _id for _id, _name in id2obj.items()}
if __name__ == "__main__":
new_res_path = osp.join(
PROJ_ROOT,
"datasets/BOP_DATASETS/ycbv/test/init_poses/",
"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json",
)
if osp.exists(new_res_path):
wprint("{} already exists! overriding!".format(new_res_path))
res_root = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/"
iter_num_test = 4
pkl_paths = [
"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl",
"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl",
"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl",
"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl",
"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl",
"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl",
"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl",
"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl",
"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl",
"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl",
"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl",
"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl",
"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl",
"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl",
"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl",
"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl",
"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl",
"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl",
"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl",
"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl",
"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl",
]
obj_names = [obj for obj in obj2id]
new_res_dict = {}
for obj_name, pred_name in zip(obj_names, pkl_paths):
assert obj_name in pred_name, "{} not in {}".format(obj_name, pred_name)
pred_path = osp.join(res_root, pred_name)
assert osp.exists(pred_path), pred_path
iprint(obj_name, pred_path)
# pkl scene_im_id key, list of preds
preds = mmcv.load(pred_path)
for scene_im_id, pred_list in preds.items():
for pred in pred_list:
obj_id = pred["obj_id"]
score = pred["score"]
bbox_est = pred["bbox_det_xyxy"] # xyxy
bbox_est_xywh = xyxy_to_xywh(bbox_est)
refined_pose = pred["pose_{}".format(iter_num_test)]
pose_est = pred["pose_0"]
cur_new_res = {
"obj_id": obj_id,
"score": float(score),
"bbox_est": bbox_est_xywh.tolist(),
"pose_est": pose_est.tolist(),
"pose_refine": refined_pose.tolist(),
}
if scene_im_id not in new_res_dict:
new_res_dict[scene_im_id] = []
new_res_dict[scene_im_id].append(cur_new_res)
inout.save_json(new_res_path, new_res_dict)
iprint()
iprint("new result path: {}".format(new_res_path))
| 52.719298
| 146
| 0.708985
| 906
| 6,010
| 4.248344
| 0.342163
| 0.051442
| 0.062873
| 0.114575
| 0.277734
| 0.075864
| 0.016108
| 0
| 0
| 0
| 0
| 0.141502
| 0.175707
| 6,010
| 113
| 147
| 53.185841
| 0.635446
| 0.100166
| 0
| 0
| 0
| 0
| 0.540305
| 0.456352
| 0
| 0
| 0
| 0
| 0.020202
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0.050505
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
139bcb633d3c2b224334dad0ddfc97013f3a8ff8
| 918
|
py
|
Python
|
tests/test_app/rest_app/rest_app/services/account_service.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | 12
|
2018-09-06T06:14:59.000Z
|
2021-04-18T06:30:44.000Z
|
tests/test_app/rest_app/rest_app/services/account_service.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | null | null | null |
tests/test_app/rest_app/rest_app/services/account_service.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | 2
|
2019-09-08T22:01:26.000Z
|
2020-08-03T07:23:29.000Z
|
from flask import abort
from guniflask.context import service
from ..config.jwt_config import jwt_manager
@service
class AccountService:
accounts = {
'root': {
'authorities': ['role_admin'],
'password': '123456',
}
}
def login(self, username: str, password: str):
if username not in self.accounts or self.accounts[username]['password'] != password:
return abort(403)
account = self.accounts[username]
token = jwt_manager.create_access_token(authorities=account['authorities'], username=username)
return {
'username': username,
'access_token': token,
}
def get(self, username: str):
if username not in self.accounts:
return abort(404)
return {
'username': username,
'authorities': self.accounts[username]['authorities']
}
| 27.818182
| 102
| 0.59695
| 91
| 918
| 5.945055
| 0.395604
| 0.110906
| 0.110906
| 0.05915
| 0.110906
| 0.110906
| 0.110906
| 0
| 0
| 0
| 0
| 0.018576
| 0.296296
| 918
| 32
| 103
| 28.6875
| 0.818885
| 0
| 0
| 0.148148
| 0
| 0
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0.111111
| 0.111111
| 0
| 0.407407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
139ed5391c8324e35fd54e409887ff876db4d1d0
| 239
|
py
|
Python
|
noo/impl/utils/__init__.py
|
nooproject/noo
|
238711c55faeb1226a4e5339cd587a312c4babac
|
[
"MIT"
] | 2
|
2022-02-03T07:35:46.000Z
|
2022-02-03T16:12:25.000Z
|
noo/impl/utils/__init__.py
|
nooproject/noo
|
238711c55faeb1226a4e5339cd587a312c4babac
|
[
"MIT"
] | 2
|
2022-03-05T02:31:38.000Z
|
2022-03-05T21:26:42.000Z
|
noo/impl/utils/__init__.py
|
nooproject/noo
|
238711c55faeb1226a4e5339cd587a312c4babac
|
[
"MIT"
] | 1
|
2022-03-05T01:40:29.000Z
|
2022-03-05T01:40:29.000Z
|
from .echo import echo, set_quiet
from .errors import NooException, cancel
from .store import STORE, FileStore, Store
__all__ = (
"FileStore",
"NooException",
"Store",
"STORE",
"cancel",
"echo",
"set_quiet",
)
| 17.071429
| 42
| 0.635983
| 26
| 239
| 5.615385
| 0.423077
| 0.09589
| 0.164384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.23431
| 239
| 13
| 43
| 18.384615
| 0.797814
| 0
| 0
| 0
| 0
| 0
| 0.209205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
139f98ba0220830de5e89cabcde17bead64e5fb5
| 625
|
py
|
Python
|
setup.py
|
ooreilly/mydocstring
|
077cebfb86575914d343bd3291b9e6c5e8beef94
|
[
"MIT"
] | 13
|
2018-12-11T00:34:09.000Z
|
2022-03-22T20:41:04.000Z
|
setup.py
|
ooreilly/mydocstring
|
077cebfb86575914d343bd3291b9e6c5e8beef94
|
[
"MIT"
] | 13
|
2018-06-15T19:42:06.000Z
|
2020-12-18T22:20:02.000Z
|
setup.py
|
ooreilly/mydocstring
|
077cebfb86575914d343bd3291b9e6c5e8beef94
|
[
"MIT"
] | 5
|
2018-06-16T07:45:49.000Z
|
2020-12-12T07:12:00.000Z
|
from setuptools import setup
setup(name='mydocstring',
version='0.2.7',
description="""A tool for extracting and converting Google-style docstrings to
plain-text, markdown, and JSON.""",
url='http://github.com/ooreilly/mydocstring',
author="Ossian O'Reilly",
license='MIT',
packages=['mydocstring'],
install_requires=['mako', 'docopt'],
entry_points = {
'console_scripts': [
'mydocstring=mydocstring.docstring:main',
],},
package_data={'mydocstring': ['templates/google_docstring.md']},
zip_safe=False)
| 32.894737
| 84
| 0.6048
| 63
| 625
| 5.904762
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006424
| 0.2528
| 625
| 18
| 85
| 34.722222
| 0.79015
| 0
| 0
| 0
| 0
| 0
| 0.4592
| 0.1072
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.0625
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13a92427a8cdec440aec42402a7483f2303b73a6
| 10,075
|
py
|
Python
|
json_to_relation/mysqldb.py
|
paepcke/json_to_relation
|
acfa58d540f8f51d1d913d0c173ee3ded1b6c2a9
|
[
"BSD-3-Clause"
] | 4
|
2015-10-10T19:09:49.000Z
|
2021-09-02T00:58:06.000Z
|
json_to_relation/mysqldb.py
|
paepcke/json_to_relation
|
acfa58d540f8f51d1d913d0c173ee3ded1b6c2a9
|
[
"BSD-3-Clause"
] | null | null | null |
json_to_relation/mysqldb.py
|
paepcke/json_to_relation
|
acfa58d540f8f51d1d913d0c173ee3ded1b6c2a9
|
[
"BSD-3-Clause"
] | 8
|
2015-05-16T14:33:33.000Z
|
2019-10-24T08:56:25.000Z
|
# Copyright (c) 2014, Stanford University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
Created on Sep 24, 2013
@author: paepcke
Modifications:
- Dec 30, 2013: Added closing of connection to close() method
'''
import re
import subprocess
import tempfile
import pymysql
#import MySQLdb
class MySQLDB(object):
'''
Shallow interface to MySQL databases. Some niceties nonetheless.
The query() method is an iterator. So::
for result in mySqlObj.query('SELECT * FROM foo'):
print result
'''
def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'):
'''
:param host: MySQL host
:type host: string
:param port: MySQL host's port
:type port: int
:param user: user to log in as
:type user: string
:param passwd: password to use for given user
:type passwd: string
:param db: database to connect to within server
:type db: string
'''
# If all arguments are set to None, we are unittesting:
if all(arg is None for arg in (host,port,user,passwd,db)):
return
self.user = user
self.pwd = passwd
self.db = db
self.cursors = []
try:
self.connection = pymysql.connect(host=host, port=port, user=user, passwd=passwd, db=db)
#self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1)
#except MySQLdb.OperationalError:
except pymysql.OperationalError:
pwd = '...............' if len(passwd) > 0 else '<no password>'
raise ValueError('Cannot reach MySQL server with host:%s, port:%s, user:%s, pwd:%s, db:%s' %
(host, port, user, pwd, db))
def close(self):
'''
Close all cursors that are currently still open.
'''
for cursor in self.cursors:
try:
cursor.close()
except:
pass
try:
self.connection.close()
except:
pass
def createTable(self, tableName, schema):
'''
Create new table, given its name, and schema.
The schema is a dict mappingt column names to
column types. Example: {'col1' : 'INT', 'col2' : 'TEXT'}
:param tableName: name of new table
:type tableName: String
:param schema: dictionary mapping column names to column types
:type schema: Dict<String,String>
'''
colSpec = ''
for colName, colVal in schema.items():
colSpec += str(colName) + ' ' + str(colVal) + ','
cmd = 'CREATE TABLE IF NOT EXISTS %s (%s) ' % (tableName, colSpec[:-1])
cursor = self.connection.cursor()
try:
cursor.execute(cmd)
self.connection.commit()
finally:
cursor.close()
def dropTable(self, tableName):
'''
Delete table safely. No errors
:param tableName: name of table
:type tableName: String
'''
cursor = self.connection.cursor()
try:
cursor.execute('DROP TABLE IF EXISTS %s' % tableName)
self.connection.commit()
finally:
cursor.close()
def truncateTable(self, tableName):
'''
Delete all table rows. No errors
:param tableName: name of table
:type tableName: String
'''
cursor = self.connection.cursor()
try:
cursor.execute('TRUNCATE TABLE %s' % tableName)
self.connection.commit()
finally:
cursor.close()
def insert(self, tblName, colnameValueDict):
'''
Given a dictionary mapping column names to column values,
insert the data into a specified table
:param tblName: name of table to insert into
:type tblName: String
:param colnameValueDict: mapping of column name to column value
:type colnameValueDict: Dict<String,Any>
'''
colNames, colValues = zip(*colnameValueDict.items())
cursor = self.connection.cursor()
try:
cmd = 'INSERT INTO %s (%s) VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues))
cursor.execute(cmd)
self.connection.commit()
finally:
cursor.close()
def bulkInsert(self, tblName, colNameTuple, valueTupleArray):
'''
Inserts large number of rows into given table. Strategy: write
the values to a temp file, then generate a LOAD INFILE LOCAL
MySQL command. Execute that command via subprocess.call().
Using a cursor.execute() fails with error 'LOAD DATA LOCAL
is not supported in this MySQL version...' even though MySQL
is set up to allow the op (load-infile=1 for both mysql and
mysqld in my.cnf).
:param tblName: table into which to insert
:type tblName: string
:param colNameTuple: tuple containing column names in proper order, i.e. \
corresponding to valueTupleArray orders.
:type colNameTuple: (str[,str[...]])
:param valueTupleArray: array of n-tuples, which hold the values. Order of\
values must corresond to order of column names in colNameTuple.
:type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]])
'''
tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv')
for valueTuple in valueTupleArray:
tmpCSVFile.write(','.join(valueTuple) + '\n')
try:
# Remove quotes from the values inside the colNameTuple's:
mySQLColNameList = re.sub("'","",str(colNameTuple))
mySQLCmd = "USE %s; LOAD DATA LOCAL INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n' %s" %\
(self.db, tmpCSVFile.name, tblName, mySQLColNameList)
subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd])
finally:
tmpCSVFile.close()
def update(self, tblName, colName, newVal, fromCondition=None):
'''
Update one column with a new value.
:param tblName: name of table in which update is to occur
:type tblName: String
:param colName: column whose value is to be changed
:type colName: String
:param newVal: value acceptable to MySQL for the given column
:type newVal: type acceptable to MySQL for the given column
:param fromCondition: optionally condition that selects which rows to update.\
if None, the named column in all rows are updated to\
the given value. Syntax must conform to what may be in\
a MySQL FROM clause (don't include the 'FROM' keyword)
:type fromCondition: String
'''
cursor = self.connection.cursor()
try:
if fromCondition is None:
cmd = "UPDATE %s SET %s = '%s';" % (tblName,colName,newVal)
else:
cmd = "UPDATE %s SET %s = '%s' WHERE %s;" % (tblName,colName,newVal,fromCondition)
cursor.execute(cmd)
self.connection.commit()
finally:
cursor.close()
def ensureSQLTyping(self, colVals):
'''
Given a list of items, return a string that preserves
MySQL typing. Example: (10, 'My Poem') ---> '10, "My Poem"'
Note that ','.join(map(str,myList)) won't work:
(10, 'My Poem') ---> '10, My Poem'
:param colVals: list of column values destined for a MySQL table
:type colVals: <any>
'''
resList = []
for el in colVals:
if isinstance(el, basestring):
resList.append('"%s"' % el)
else:
resList.append(el)
return ','.join(map(str,resList))
def query(self, queryStr):
'''
Query iterator. Given a query, return one result for each
subsequent call.
:param queryStr: query
:type queryStr: String
'''
cursor = self.connection.cursor()
# For if caller never exhausts the results by repeated calls:
self.cursors.append(cursor)
cursor.execute(queryStr)
while True:
nextRes = cursor.fetchone()
if nextRes is None:
cursor.close()
return
yield nextRes
| 40.461847
| 757
| 0.60794
| 1,195
| 10,075
| 5.121339
| 0.314644
| 0.032026
| 0.019608
| 0.02549
| 0.186438
| 0.156373
| 0.128758
| 0.112909
| 0.112909
| 0.096242
| 0
| 0.006105
| 0.300943
| 10,075
| 248
| 758
| 40.625
| 0.862843
| 0.504218
| 0
| 0.431373
| 0
| 0.019608
| 0.104526
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098039
| false
| 0.068627
| 0.039216
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13a95b957fe3881893fa91e63fed84b8224215f9
| 611
|
py
|
Python
|
tools/xkeydump.py
|
treys/crypto-key-derivation
|
789900bd73160db9a0d406c7c7f00f5f299aff73
|
[
"MIT"
] | 29
|
2017-11-12T08:54:03.000Z
|
2022-03-04T21:12:00.000Z
|
tools/xkeydump.py
|
treys/crypto-key-derivation
|
789900bd73160db9a0d406c7c7f00f5f299aff73
|
[
"MIT"
] | 2
|
2019-03-01T05:56:52.000Z
|
2021-05-17T00:18:01.000Z
|
tools/xkeydump.py
|
treys/crypto-key-derivation
|
789900bd73160db9a0d406c7c7f00f5f299aff73
|
[
"MIT"
] | 9
|
2018-04-10T08:40:25.000Z
|
2021-12-29T16:04:48.000Z
|
#!./venv/bin/python
from lib.mbp32 import XKey
from lib.utils import one_line_from_stdin
xkey = XKey.from_xkey(one_line_from_stdin())
print(xkey)
print("Version:", xkey.version)
print("Depth:", xkey.depth)
print("Parent FP:", xkey.parent_fp.hex())
print("Child number:", xkey.child_number_with_tick())
print("Chain code:", xkey.chain_code.hex())
print("Key:", xkey.key)
if xkey.key.get_private_bytes():
print("Private bytes:", xkey.key.get_private_bytes().hex())
print("Public bytes:", xkey.key.get_public_bytes().hex())
print("Key ID:", xkey.keyid().hex())
print("XKey:", xkey.to_xkey().decode('ascii'))
| 32.157895
| 63
| 0.721768
| 97
| 611
| 4.350515
| 0.360825
| 0.094787
| 0.07109
| 0.075829
| 0.104265
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003546
| 0.076923
| 611
| 18
| 64
| 33.944444
| 0.744681
| 0.02946
| 0
| 0
| 0
| 0
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.133333
| 0
| 0.133333
| 0.733333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
13aa5a46812a4881dac4a4f78ba8019d3b73841d
| 616
|
py
|
Python
|
examples/compute_angular_resolution.py
|
meder411/Tangent-Images
|
6def4d7b8797110e54f7faa2435973771d9e9722
|
[
"BSD-3-Clause"
] | 57
|
2019-12-20T09:28:29.000Z
|
2022-03-28T02:38:48.000Z
|
examples/compute_angular_resolution.py
|
meder411/Tangent-Images
|
6def4d7b8797110e54f7faa2435973771d9e9722
|
[
"BSD-3-Clause"
] | 6
|
2020-06-06T16:39:35.000Z
|
2021-01-21T01:19:52.000Z
|
examples/compute_angular_resolution.py
|
meder411/Tangent-Images
|
6def4d7b8797110e54f7faa2435973771d9e9722
|
[
"BSD-3-Clause"
] | 16
|
2019-12-21T08:19:33.000Z
|
2022-03-28T02:38:49.000Z
|
from spherical_distortion.util import *
sample_order = 9 # Input resolution to examine
def ang_fov(s):
print('Spherical Resolution:', s)
for b in range(s):
dim = tangent_image_dim(b, s) # Pixel dimension of tangent image
corners = tangent_image_corners(b, s) # Corners of each tangent image
fov_x, fov_y = compute_tangent_image_angular_resolution(corners)
print(' At base level', b)
print(' FOV (x) =', fov_x)
print(' FOV (y) =', fov_y)
print(' deg/pix (x) =', fov_x/dim)
print(' deg/pix (y) =', fov_y/dim)
ang_fov(sample_order)
| 36.235294
| 77
| 0.625
| 90
| 616
| 4.066667
| 0.422222
| 0.163934
| 0.103825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002183
| 0.256494
| 616
| 17
| 78
| 36.235294
| 0.796943
| 0.146104
| 0
| 0
| 0
| 0
| 0.183556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.142857
| 0.428571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
13abd087d6a8034a9d1a9da08f2dab574fb7be66
| 51,194
|
py
|
Python
|
polymath/srdfg/base.py
|
he-actlab/polymath
|
9b7937d0ddf7452f6cc74ee90d05f8c6acef737e
|
[
"Apache-2.0"
] | 15
|
2021-05-09T05:46:04.000Z
|
2022-03-06T20:46:32.000Z
|
polymath/srdfg/base.py
|
he-actlab/polymath
|
9b7937d0ddf7452f6cc74ee90d05f8c6acef737e
|
[
"Apache-2.0"
] | null | null | null |
polymath/srdfg/base.py
|
he-actlab/polymath
|
9b7937d0ddf7452f6cc74ee90d05f8c6acef737e
|
[
"Apache-2.0"
] | 4
|
2021-08-24T07:46:29.000Z
|
2022-03-05T18:23:07.000Z
|
from polymath import UNSET_SHAPE, DEFAULT_SHAPES
import builtins
import operator
from collections import OrderedDict, Mapping, Sequence, deque
import functools
from numbers import Integral, Rational, Real
import contextlib
import traceback
import uuid
import numpy as np
import importlib
from .graph import Graph
from .domain import Domain
from .util import _noop_callback, _flatten_iterable, node_hash, \
_is_node_type_instance, is_iterable
class Node(object):
"""
Base class for nodes.
Parameters
----------
args : tuple
Positional arguments passed to the `_evaluate` method.
name : str or None
Name of the node or `None` to use a random, unique identifier.
shape : tuple or None
Shape of the output for a node. This can be a tuple of integers or parameter node names.
graph : Node or None
Parent graph of this node. If graph is `None`, this is the top-level graph.
op_name : str
Operation name which describes the node functionality.
value : Any or None
If a node has a default value to use for execution, it can be set using `value`.
kwargs : dict
Keyword arguments passed to the `_evaluate` method.
"""
_graph_stack = deque([None])
_eval_stack = []
stack_size = 5
evaluated_nodes = 0
def __init__(self, *args,
name=None,
shape=None,
graph=None,
dependencies=None,
op_name=None,
value=None,
**kwargs):
self.nodes = Graph()
self.value = value
self.dependencies = []
self._args = []
self._predeecessors = []
self._succesors = []
self.args = args
if "name" in kwargs:
kwargs.pop("name")
self.added_attrs = []
# TODO: CHange this to underscore private variable
self.kwargs = kwargs
self.graph = graph
self._shape = OrderedDict()
self.shape = shape or tuple([])
# Get a list of all dependencies relevant to this node
self.dependencies = [] if dependencies is None else dependencies
if self.graph:
self.dependencies.extend(self.graph.dependencies)
# Choose a name for the node and add the node to the graph
self._name = None
self.name = name or uuid.uuid4().hex
self._op_name = None
self.op_name = op_name
# Get the stack context so we can report where the node was defined
self._stack = traceback.extract_stack(limit=1)
@property
def graph(self):
"""
polymath.srdfg.graph.Graph : Parent graph of this node. If graph is `None`, this is the top-level graph.
"""
return self._graph
def preds(self):
return self._preds
def succs(self):
return self._preds
def add_predecessor(self, pred):
if isinstance(pred, Node):
self._predecessors.append(pred.gname)
else:
self._predecessors.append(pred)
def add_successor(self, succ):
if isinstance(succ, Node):
self._succesors.append(succ.gname)
else:
self._succesors.append(succ)
def set_edges(self):
for e in self.args:
self.add_predecessor(e)
if isinstance(e, Node):
e.add_successor(self)
@property
def domain(self):
return Domain(tuple([]))
@property
def args(self):
"""
tuple : Positional arguments which are used for executing this node.
"""
return tuple(self._args)
@property
def argnames(self):
return [a.name if isinstance(a, Node) else a for a in self.args]
@property
def shape(self):
"""
tuple : Shape of the output for a node. This can be a tuple of integers or parameter node names.
"""
return self._shape
@property
def var(self):
return self
@property
def name(self):
"""str : Unique name of the node"""
return self._name
@property
def op_name(self):
"""
str : Operation name which describes the node functionality.
"""
return self._op_name
@op_name.setter
def op_name(self, op_name):
if op_name:
self._op_name = op_name
elif self.__class__.__name__ == "Node":
self._op_name = self.name
else:
self._op_name = self.__class__.__name__
@name.setter
def name(self, name):
self.set_name(name)
@args.setter
def args(self, args):
new_args = []
for arg in args:
if isinstance(arg, Node):
if self.__class__.__name__ == "Node":
self.nodes[arg.name] = self.graph[arg.name]
new_args.append(arg)
self._args = tuple(new_args)
@shape.setter
def shape(self, shape):
self.set_shape(shape, init=True)
@graph.setter
def graph(self, graph):
self._graph = Node.get_active_graph(graph)
@property
def gname(self):
scope_names = [self.name]
cgraph = self.graph
while cgraph:
scope_names.append(cgraph.name)
cgraph = cgraph.graph
return "/".join(list(reversed(scope_names)))
def __enter__(self):
Node._graph_stack.append(self)
return self
def __exit__(self, *args):
assert self == Node._graph_stack.pop()
def __repr__(self):
return "<node '%s'>" % self.name
def add_attribute(self, key, value):
self.added_attrs.append(key)
self.kwargs[key] = value
def is_shape_finalized(self):
if self.shape == UNSET_SHAPE:
return False
for s in self.shape:
if not isinstance(s, Integral):
return False
return True
def set_shape(self, shape=None, init=False):
if isinstance(shape, float):
self._shape = tuple([np.int(shape)])
elif isinstance(shape, Integral):
self._shape = tuple([shape])
elif isinstance(shape, Node):
self._shape = tuple([shape])
elif not shape or len(shape) == 0:
# TODO: Change in order to enable "is shape finalized" to work
self._shape = UNSET_SHAPE
else:
shapes = []
for dim in shape:
if isinstance(dim, (Node, Integral)):
shapes.append(dim)
elif isinstance(dim, float):
shapes.append(int(dim))
else:
raise TypeError(f"Shape value must be placeholder or integer value for {self.name}\n"
f"\tDim: {dim}"
f"\n\t{self.kwargs} ")
self._shape = tuple(shapes)
@staticmethod
def get_active_graph(graph=None):
"""
Obtain the currently active graph instance by returning the explicitly given graph or using
the default graph.
Parameters
----------
graph : Node or None
Graph to return or `None` to use the default graph.
Raises
------
ValueError
If no `Graph` instance can be obtained.
"""
graph = graph or Node._graph_stack[-1]
return graph
def instantiate_node(self, node): # pylint:disable=W0621
"""
Instantiate nodes by retrieving the node object associated with the node name.
Parameters
----------
node : Node or str
Node instance or name of an node.
Returns
-------
instantiated_node : Node
Node instance.
Raises
------
ValueError
If `node` is not an `Node` instance or an node name.
RuntimeError
If `node` is an `Node` instance but does not belong to this graph.
"""
if isinstance(node, str):
return self.nodes[node]
if isinstance(node, Node):
if node.name not in self.nodes and (node.graph != self):
raise RuntimeError(f"node '{node}' does not belong to {self} graph, instead belongs to"
f" {node.graph}")
return node
raise ValueError(f"'{node}' is not an `Node` instance or node name")
def instantiate_graph(self, context, **kwargs):
"""
Instantiate a graph by replacing all node names with node instances.
.. note::
This function modifies the context in place. Use :code:`context=context.copy()` to avoid
the context being modified.
Parameters
----------
context : dict[Node or str, object]
Context whose keys are node instances or names.
kwargs : dict[str, object]
Additional context information keyed by variable name.
Returns
-------
normalized_context : dict[Node, object]
Normalized context whose keys are node instances.
Raises
------
ValueError
If the context specifies more than one value for any node.
ValueError
If `context` is not a mapping.
"""
if context is None:
context = {}
elif not isinstance(context, Mapping):
raise ValueError("`context` must be a mapping.")
nodes = list(context)
# Add the keyword arguments
for node in nodes: # pylint:disable=W0621
value = context.pop(node)
node = self.instantiate_node(node)
if node in context:
raise ValueError(f"duplicate unequal value for node '{node}'")
context[node] = value
if node.op_name in ["placeholder", "state", "input", "output", "temp"] and not node.is_shape_finalized():
context[node] = node.evaluate(context)
for name, value in kwargs.items():
node = self.nodes[name]
if node in context:
raise ValueError(f"duplicate value for node '{node}'")
context[node] = value
if node.op_name in ["placeholder", "state", "input", "output", "temp"] and not node.is_shape_finalized():
context[node] = node.evaluate(context)
return context
def run(self, fetches, context=None, *, callback=None, **kwargs):
"""
Evaluate one or more nodes given a dictionary of node names with their values.
.. note::
This function modifies the context in place. Use :code:`context=context.copy()` to avoid
the context being modified.
Parameters
----------
fetches : list[str or Node] or str or Node
One or more `Node` instances or names to evaluate.
context : dict or None
Context in which to evaluate the nodes.
callback : callable or None
Callback to be evaluated when an node is evaluated.
kwargs : dict
Additional context information keyed by variable name.
Returns
-------
values : Node or tuple[object]
Output of the nodes given the context.
Raises
------
ValueError
If `fetches` is not an `Node` instance, node name, or a sequence thereof.
"""
if isinstance(fetches, (str, Node)):
fetches = [fetches]
single = True
elif isinstance(fetches, Sequence):
single = False
else:
raise ValueError("`fetches` must be an `Node` instance, node name, or a "
"sequence thereof.")
fetches = [self.instantiate_node(node) for node in fetches]
context = self.instantiate_graph(context, **kwargs)
for c in context:
if c in fetches and c.op_name in ["output", "state", "temp"]:
write_name = "/".join([f"{i}{c.write_count-1}" for i in c.name.split("/")]) if c.write_count > 0 else c.name
fetches[fetches.index(c)] = c.graph.nodes[write_name]
values = [fetch.evaluate_node(fetch, context, callback=callback) for fetch in fetches]
return values[0] if single else tuple(values)
def __getstate__(self):
return self.__dict__
def __setstate__(self, data):
self.__dict__.update(data)
def set_name(self, name):
"""
Set the name of the node and update the graph.
Parameters
----------
value : str
Unique name of the node.
Returns
-------
self : Node
This node.
Raises
------
ValueError
If an node with `value` already exists in the associated graph.
KeyError
If the current name of the node cannot be found in the associated graph.
"""
name = name or uuid.uuid4().hex
# TODO: Need a way to check if the existing node is not equal to the current ndoe as ewll
if self.graph and name in self.graph.nodes:
raise ValueError(f"duplicate name '{name}' in {self.graph.name}:\n\t"
f"Existing: {self.graph.nodes[name].args}\n\t"
f"New: {self.args}")
if self.graph:
graph = self.graph
if self._name and self._name in graph.nodes:
graph.update_graph_key(self._name, name)
else:
graph.nodes[name] = self
self._name = name
return self
def evaluate_dependencies(self, context, callback=None):
"""
Evaluate the dependencies of this node and discard the values.
Parameters
----------
context : dict
Normalised context in which to evaluate the node.
callback : callable or None
Callback to be evaluated when an node is evaluated.
"""
for node in self.dependencies:
node.evaluate(context, callback)
def evaluate(self, context, callback=None):
"""
Evaluate the node given a context.
Parameters
----------
context : dict
Normalised context in which to evaluate the node.
callback : callable or None
Callback to be evaluated when an node is evaluated.
Returns
-------
value : object
Output of the node given the context.
"""
# Evaluate all explicit dependencies first
self.evaluate_dependencies(context, callback)
if self in context:
return context[self]
# Evaluate the parents
partial = functools.partial(self.evaluate_node, context=context, callback=callback)
args = [partial(arg) for arg in self.args]
kwargs = {key: partial(value) for key, value in self.kwargs.items() if key not in self.added_attrs}
# Evaluate the node
callback = callback or _noop_callback
with callback(self, context):
if self.__class__.__name__ == "Node":
context[self] = self.value = self._evaluate(*args, context=context, **kwargs)
else:
context[self] = self.value = self._evaluate(*args, **kwargs)
return self.value
def _evaluate(self, *args, context=None, **kwargs):
"""
Inheriting nodes should implement this function to evaluate the node.
"""
return self(*args, context, **kwargs)
@classmethod
def evaluate_node(cls, node, context, **kwargs):
"""
Evaluate an node or constant given a context.
"""
Node.evaluated_nodes += 1
try:
if isinstance(node, Node):
Node._eval_stack.append(node.name)
return node.evaluate(context, **kwargs)
partial = functools.partial(cls.evaluate_node, context=context, **kwargs)
if isinstance(node, tuple):
return tuple(partial(element) for element in node)
if isinstance(node, list):
return [partial(element) for element in node]
if isinstance(node, dict):
return {partial(key): partial(value) for key, value in node.items()}
if isinstance(node, slice):
return slice(*[partial(getattr(node, attr))
for attr in ['start', 'stop', 'step']])
return node
except Exception as ex: # pragma: no cover
messages = []
interactive = False
if isinstance(node, Node) or not is_iterable(node):
node = [node]
for n in node:
stack = []
if isinstance(n, Node):
for frame in reversed(n._stack): # pylint: disable=protected-access
# Do not capture any internal stack traces
fname = frame.filename
if 'polymath' in fname:
continue
# Stop tracing at the last interactive cell
if interactive and not fname.startswith('<'):
break # pragma: no cover
interactive = fname.startswith('<')
stack.append(frame)
stack = "".join(traceback.format_list(reversed(stack)))
message = "Failed to evaluate node `%s` defined at:\n\n%s" % (n, stack)
messages.append(message)
raise ex from EvaluationError("".join(messages))
@classmethod
def init_from_args(cls, *args,
name=None,
shape=None,
graph=None,
dependencies=None,
op_name=None,
value=None,
**kwargs):
if len(args) == 0:
n = cls(name=name,
shape=shape,
graph=graph,
op_name=op_name,
dependencies=dependencies,
value=value,
**kwargs)
else:
n = cls(*args,
name=name,
shape=shape,
graph=graph,
op_name=op_name,
dependencies=dependencies,
value=value,
**kwargs)
return n
def __bool__(self):
return True
def __hash__(self):
return id(self)
def func_hash(self):
"""
This returns the functional hash of a particular node. The default hash returns an object id, whereas this function
returns a hash of all attributes and subgraphs of a node.
"""
return node_hash(self)
def find_node(self, name):
g = self.graph
while g is not None and name not in g.nodes:
g = g.graph
if name in g.nodes:
return g.nodes[name]
raise RuntimeError(f"Cannot find {name} in graph nodes. Graph: {self.graph}")
def __len__(self):
#TODO: Update this to check for finalzied shape
if self.shape == UNSET_SHAPE:
raise TypeError(f'`shape` must be specified explicitly for nodes {self}')
return self.shape[0]
def __iter__(self):
num = len(self)
for i in range(num):
yield self[i]
def __eq__(self, other):
return hash(self) == hash(other)
def __getattr__(self, name):
return getattr_(self, name, graph=self.graph)
def __getitem__(self, key):
if self.__class__.__name__ != "Node":
if isinstance(key, (slice, Integral)):
return getitem(self, key, graph=self.graph)
else:
if isinstance(key, (list)):
return var_index(self, key, graph=self)
elif isinstance(key, tuple):
return var_index(self, list(key), graph=self)
else:
return var_index(self, [key], graph=self)
else:
return self.nodes[key]
def __add__(self, other):
return add(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__radd__(self)
def __radd__(self, other):
return add(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__add__(self)
def __sub__(self, other):
return sub(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rsub__(self)
def __rsub__(self, other):
return sub(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__sub__(self)
def __pow__(self, other):
return pow_(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rpow__(self)
def __rpow__(self, other):
return pow_(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rpow__(self)
def __matmul__(self, other):
return matmul(self, other, graph=self.graph)
def __rmatmul__(self, other):
return matmul(other, self, graph=self.graph)
def __mul__(self, other):
return mul(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rmul__(self)
def __rmul__(self, other):
return mul(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__mul__(self)
def __truediv__(self, other):
return truediv(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__truediv__(self)
def __rtruediv__(self, other):
return truediv(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rtruediv__(self)
def __floordiv__(self, other):
return floordiv(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rfloordiv__(self)
def __rfloordiv__(self, other):
return floordiv(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__floordiv__(self)
def __mod__(self, other):
return mod(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rmod__(self)
def __rmod__(self, other):
return mod(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__mod__(self)
def __lshift__(self, other):
return lshift(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rlshift__(self)
def __rlshift__(self, other):
return lshift(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__lshift__(self)
def __rshift__(self, other):
return rshift(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rrshift__(self)
def __rrshift__(self, other):
return rshift(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rshift__(self)
def __and__(self, other):
return and_(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rand__(self)
def __rand__(self, other):
return and_(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__and__(self)
def __or__(self, other):
return or_(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__ror__(self)
def __ror__(self, other):
return or_(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__or__(self)
def __xor__(self, other):
return xor(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rxor__(self)
def __rxor__(self, other):
return xor(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__xor__(self)
def __lt__(self, other):
return lt(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__gt__(self)
def __le__(self, other):
return le(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__ge__(self)
def __ne__(self, other):
return ne(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__ne__(self)
def __gt__(self, other):
return gt(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__lt__(self)
def __ge__(self, other):
return ge(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__le__(self)
def __invert__(self):
return inv(self, graph=self.graph)
def __neg__(self):
return neg(self, graph=self.graph)
def __abs__(self):
return abs_(self, graph=self.graph)
def __pos__(self):
return pos(self, graph=self.graph)
def __reversed__(self):
return reversed_(self, graph=self.graph)
def update_graph_key(self, old_key, new_key):
n = list(map(lambda k: (new_key, self.nodes[k]) if k == old_key else (k, self.nodes[k]), self.nodes.keys()))
self.nodes = Graph(n)
def insert_node(self, node, idx):
node_list = list(self.nodes.items())
node_list.insert(idx, (node.name, node))
self.nodes = Graph(node_list)
def __call__(self, *args, **kwargs):
return self.run(*args, **kwargs)
class EvaluationError(RuntimeError):
"""
Failed to evaluate an node.
"""
class var_index(Node): # pylint: disable=C0103,W0223
"""
Node representing values of a variable corresponding to input index values.
Parameters
----------
var : Node
The multi-dimensional variable used for indexing into.
idx : tuple
Tuple of either integer values or index/index_op nodes.
"""
def __init__(self, var, idx, name=None, **kwargs): # pylint: disable=W0235
if "domain" in kwargs:
domain = tuple(kwargs.pop("domain")) if isinstance(kwargs["domain"], list) else kwargs.pop("domain")
else:
domain = Domain(idx)
super(var_index, self).__init__(var, idx, name=name, domain=domain, **kwargs)
@property
def domain(self):
return self.kwargs["domain"]
@property
def var(self):
var, index_list = self.args
return var
def set_name(self, name):
"""
Set the name for a variable index, making sure to replicate the new name with
a unique stringwhich corresponds to the variable, index combination.
Parameters
----------
value : str
Unique name of the node.
Returns
-------
self : Node
This node.
Raises
------
ValueError
If an node with `value` already exists in the associated graph.
KeyError
If the current name of the node cannot be found in the associated graph.
"""
# TODO: Need a way to check if the existing node is not equal to the current ndoe as ewll
if self.graph and name in self.graph.nodes:
raise ValueError(f"duplicate name '{name}' in {self.graph.name}:"
f"Existing: {self.graph.nodes[name].args}\n"
f"New: {self.args}")
if self.graph:
graph = self.graph
if self._name is not None and self._name in graph.nodes:
graph.update_graph_key(self._name, name)
else:
graph.nodes[name] = self
self._name = name
return self
def __getitem__(self, key):
if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape):
if isinstance(key, Integral):
key = tuple([key])
idx = np.ravel_multi_index(key, dims=self.shape, order='C')
ret = self.nodes.item_by_index(idx)
return ret
else:
if isinstance(key, (list)):
ret = var_index(self.var, tuple(key), graph=self)
elif isinstance(key, tuple):
ret = var_index(self.var, key, graph=self)
else:
ret = var_index(self.var, tuple([key]), graph=self)
return ret
def is_scalar(self, val=None):
if val is not None and (not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)):
if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)):
raise ValueError(f"Invalid shape var for var index {self} with variable shape {self.var.shape}")
return True
else:
return self.var.shape == DEFAULT_SHAPES[0]
def _evaluate(self, var, indices, **kwargs):
if self.is_scalar(var):
out_shape = (1,)
indices = (0,)
single = True
else:
out_shape = self.domain.shape_from_indices(indices)
indices = self.domain.compute_pairs()
single = False
if isinstance(var, (Integral, Real, str)):
var = np.asarray([var])
elif not isinstance(var, (np.ndarray, list)):
raise TypeError(f"Variable {var} with type {type(var)} is not a list or numpy array, and cannot be sliced for {self.name}")
elif isinstance(var, list):
var = np.asarray(var)
if len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape):
if len(out_shape) > len(var.shape):
for i in range(len(out_shape)):
if out_shape[i] == 1:
var = np.expand_dims(var, axis=i)
else:
var = np.squeeze(var)
if len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape):
raise ValueError(f"Index list does not match {var.shape} in {self.var.name} - {self.var.op_name}"
f"dimensions for slice {self.args[0].name} with {out_shape}.\n"
f"Domain: {self.domain}\n"
f"Eval Stack: {Node._eval_stack}")
if not single and not all([(idx_val - 1) >= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]):
raise ValueError(f"var_index {self.name} has indices which are greater than the variable shape:\n"
f"\tArgs: {self.args}\n"
f"\tVar shape: {var.shape}\n"
f"\tNode shape: {self.var.shape}\n"
f"\tIndex Upper bounds: {indices[-1]}")
indices = list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else x, indices))
res = var[indices] if single else np.asarray([var[idx] for idx in indices]).reshape(out_shape)
if out_shape == (1,) and len(indices) == 1:
res = res[0]
self.domain.set_computed(out_shape, indices)
return res
def __add__(self, other):
return slice_op(operator.add, self, other, graph=self.graph)
def __radd__(self, other):
return slice_op(operator.add, other, self, graph=self.graph)
def __sub__(self, other):
return slice_op(operator.sub, self, other, graph=self.graph)
def __rsub__(self, other):
return slice_op(operator.sub, other, self, graph=self.graph)
def __pow__(self, other):
return slice_op(builtins.pow, self, other, graph=self.graph)
def __rpow__(self, other):
return slice_op(builtins.pow, other, self, graph=self.graph)
def __mul__(self, other):
return slice_op(operator.mul, self, other, graph=self.graph)
def __rmul__(self, other):
return slice_op(operator.mul, other, self, graph=self.graph)
def __truediv__(self, other):
return slice_op(operator.truediv, self, other, graph=self.graph)
def __rtruediv__(self, other):
return slice_op(operator.truediv, other, self, graph=self.graph)
def __floordiv__(self, other):
return slice_op(operator.floordiv, self, other, graph=self.graph)
def __rfloordiv__(self, other):
return slice_op(operator.floordiv, other, self, graph=self.graph)
def __mod__(self, other):
return slice_op(operator.mod, self, other, graph=self.graph)
def __rmod__(self, other):
return slice_op(operator.mod, other, self, graph=self.graph)
def __lshift__(self, other):
return slice_op(operator.lshift, self, other, graph=self.graph)
def __rlshift__(self, other):
return slice_op(operator.lshift, other, self, graph=self.graph)
def __rshift__(self, other):
return slice_op(operator.rshift, self, other, graph=self.graph)
def __rrshift__(self, other):
return slice_op(operator.rshift, other, self, graph=self.graph)
def __and__(self, other):
return slice_op(operator.and_, self, other, graph=self.graph)
def __rand__(self, other):
return slice_op(operator.and_, other, self, graph=self.graph)
def __or__(self, other):
return slice_op(operator.or_, self, other, graph=self.graph)
def __ror__(self, other):
return slice_op(operator.or_, other, self, graph=self.graph)
def __xor__(self, other):
return slice_op(operator.xor, self, other, graph=self.graph)
def __rxor__(self, other):
return slice_op(operator.xor, other, self, graph=self.graph)
def __lt__(self, other):
return slice_op(operator.lt, self, other, graph=self.graph)
def __le__(self, other):
return slice_op(operator.lt, other, self, graph=self.graph)
def __ne__(self, other):
return slice_op(operator.ne, self, other, graph=self.graph)
def __gt__(self, other):
return slice_op(operator.gt, self, other, graph=self.graph)
def __ge__(self, other):
return slice_op(operator.ge, self, other, graph=self.graph)
def __repr__(self):
return "<var_index name=%s, index=%s>" % (self.name, self.args)
class slice_op(Node):
"""
Node representing multi-dimensional operations performed on a node.
Parameters
----------
target : cal
The multi-dimensional variable used for indexing into.
idx : tuple
Tuple of either integer values or index/index_op nodes.
"""
def __init__(self, target, *args, **kwargs):
if "domain" in kwargs:
domain = tuple(kwargs.pop("domain")) if isinstance(kwargs["domain"], list) else kwargs.pop("domain")
else:
all_args = _flatten_iterable(args)
slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1])
domain = slice1_idx.combine_set_domains(slice2_idx)
if "op_name" in kwargs:
kwargs.pop("op_name")
target_name = f"{target.__module__}.{target.__name__}"
super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f"slice_{target.__name__}", **kwargs)
self.target = target
@property
def domain(self):
return self.kwargs["domain"]
def __getitem__(self, key):
if isinstance(key, (tuple, list, np.ndarray)) and len(key) == 0:
return self
elif self.is_shape_finalized() and len(self.nodes) > 0:
if isinstance(key, (int, Node)):
key = tuple([key])
if len(key) != len(self.shape):
raise KeyError(f"Invalid key shape for {self.name}:\n"
f"Shape: {self.shape}\n"
f"Key: {key}")
name = f"{self.name}{key}"
if name not in self.nodes.keys():
raise KeyError(f"{name} not in {self.name} keys:\n"
f"Node keys: {list(self.nodes.keys())}")
ret = self.nodes[name]
return ret
else:
name = []
if isinstance(key, Node):
name.append(key.name)
elif hasattr(key, "__len__") and not isinstance(key, str):
for k in key:
if isinstance(k, Node):
name.append(k.name)
else:
name.append(str(k))
else:
name.append(key)
name = self.var.name + "[" + "][".join(name) + "]"
if name in self.graph.nodes:
return self.graph.nodes[name]
elif isinstance(key, (list)):
return var_index(self, key, name=name, graph=self.graph)
elif isinstance(key, tuple):
return var_index(self, list(key), name=name, graph=self.graph)
else:
return var_index(self, [key], name=name, graph=self.graph)
def set_shape(self, shape=None, init=False):
s = []
assert isinstance(shape, (tuple, list))
if all([isinstance(sv, Integral) for sv in shape]) and len(self.domain) == np.product(shape) and len(shape) > 0:
self._shape = shape if isinstance(shape, tuple) else tuple(shape)
else:
for idx, d in enumerate(self.domain.dom_set):
if shape and isinstance(shape[idx], (func_op, Integral)):
s.append(shape[idx])
elif shape and isinstance(shape[idx], float):
s.append(int(shape[idx]))
elif isinstance(d, float):
s.append(int(d))
elif isinstance(d, var_index):
s.append(d.domain)
else:
s.append(d)
self._shape = tuple(s)
def is_scalar(self, val):
return not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)
def _evaluate(self, op1, op2, context=None, **kwargs):
if self.is_scalar(op1) or self.is_scalar(op2):
value = self.target(op1, op2)
else:
arg0_dom = self.args[0].domain
arg1_dom = self.args[1].domain
op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([])
op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([])
op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape)
op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape)
value = self.target(op1, op2)
return value
def get_index_nodes(self, slice1_var=None, slice2_var=None):
if slice1_var is None and slice2_var is None:
slice1_var, slice2_var = self.args
if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, "GroupNode"):
slice1_idx = slice1_var.domain
elif _is_node_type_instance(slice1_var, "index"):
slice1_idx = slice1_var.domain
else:
slice1_idx = Domain(tuple([]))
if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, "GroupNode"):
slice2_idx = slice2_var.domain
elif _is_node_type_instance(slice2_var, "index"):
slice2_idx = slice2_var.domain
else:
slice2_idx = Domain(tuple([]))
return slice1_var, slice1_idx, slice2_var, slice2_idx
def __add__(self, other):
return slice_op(operator.add, self, other, graph=self.graph)
def __radd__(self, other):
return slice_op(operator.add, other, self, graph=self.graph)
def __sub__(self, other):
return slice_op(operator.sub, self, other, graph=self.graph)
def __rsub__(self, other):
return slice_op(operator.sub, other, self, graph=self.graph)
def __pow__(self, other):
return slice_op(builtins.pow, self, other, graph=self.graph)
def __rpow__(self, other):
return slice_op(builtins.pow, other, self, graph=self.graph)
def __mul__(self, other):
return slice_op(operator.mul, self, other, graph=self.graph)
def __rmul__(self, other):
return slice_op(operator.mul, other, self, graph=self.graph)
def __truediv__(self, other):
return slice_op(operator.truediv, self, other, graph=self.graph)
def __rtruediv__(self, other):
return slice_op(operator.truediv, other, self, graph=self.graph)
def __floordiv__(self, other):
return slice_op(operator.floordiv, self, other, graph=self.graph)
def __rfloordiv__(self, other):
return slice_op(operator.floordiv, other, self, graph=self.graph)
def __mod__(self, other):
return slice_op(operator.mod, self, other, graph=self.graph)
def __rmod__(self, other):
return slice_op(operator.mod, other, self, graph=self.graph)
def __lshift__(self, other):
return slice_op(operator.lshift, self, other, graph=self.graph)
def __rlshift__(self, other):
return slice_op(operator.lshift, other, self, graph=self.graph)
def __rshift__(self, other):
return slice_op(operator.rshift, self, other, graph=self.graph)
def __rrshift__(self, other):
return slice_op(operator.rshift, other, self, graph=self.graph)
def __and__(self, other):
return slice_op(operator.and_, self, other, graph=self.graph)
def __rand__(self, other):
return slice_op(operator.and_, other, self, graph=self.graph)
def __or__(self, other):
return slice_op(operator.or_, self, other, graph=self.graph)
def __ror__(self, other):
return slice_op(operator.or_, other, self, graph=self.graph)
def __xor__(self, other):
return slice_op(operator.xor, self, other, graph=self.graph)
def __rxor__(self, other):
return slice_op(operator.xor, other, self, graph=self.graph)
def __lt__(self, other):
return slice_op(operator.lt, self, other, graph=self.graph)
def __le__(self, other):
return slice_op(operator.lt, other, self, graph=self.graph)
def __ne__(self, other):
return slice_op(operator.ne, self, other, graph=self.graph)
def __gt__(self, other):
return slice_op(operator.gt, self, other, graph=self.graph)
def __ge__(self, other):
return slice_op(operator.ge, self, other, graph=self.graph)
def __repr__(self):
return "<slice_%s '%s'>" % (self.target.__name__, self.name)
class func_op(Node): # pylint: disable=C0103,R0903
"""
Node wrapper for stateless functions.
Parameters
----------
target : callable
function to evaluate the node
args : tuple
positional arguments passed to the target
kwargs : dict
keywoard arguments passed to the target
"""
def __init__(self, target, *args, **kwargs):
kwargs["op_name"] = kwargs["op_name"] if "op_name" in kwargs \
else f"{target.__name__}"
if "domain" in kwargs:
domain = tuple(kwargs.pop("domain")) if isinstance(kwargs["domain"], list) else kwargs.pop("domain")
elif len(args) == 2:
all_args = _flatten_iterable(args)
slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1])
domain = slice1_idx.combine_set_domains(slice2_idx)
else:
domain = Domain(tuple([]))
self._target = None
super(func_op, self).__init__(*args, target=f"{target.__module__}.{target.__name__}", domain=domain, **kwargs)
self.target = target
self.added_attrs += ["domain", "target"]
@property
def target(self):
return self._target
@target.setter
def target(self, fnc):
self._target = fnc
self.op_name = f"{fnc.__name__}"
self.kwargs["target"] = f"{fnc.__module__}.{fnc.__name__}"
def __getitem__(self, key):
return self
@property
def domain(self):
return self.kwargs["domain"]
def get_index_nodes(self, slice1_var=None, slice2_var=None):
if slice1_var is None and slice2_var is None:
slice1_var, slice2_var = self.args
if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, "GroupNode"):
slice1_idx = slice1_var.domain
else:
slice1_idx = Domain(tuple([]))
if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, "GroupNode"):
slice2_idx = slice2_var.domain
else:
slice2_idx = Domain(tuple([]))
return slice1_var, slice1_idx, slice2_var, slice2_idx
def _evaluate(self, *args, **kwargs):
for aa in list(kwargs.keys()):
if aa in self.added_attrs:
kwargs.pop(aa)
return self.target(*args, **kwargs)
def __call__(self, *args, **kwargs):
return call(self, *args, **kwargs)
def __repr__(self):
return "<func_op '%s' target=%s args=<%d items>>" % \
(self.name, self.kwargs["target"], len(self.args))
def nodeop(target=None, **kwargs):
"""
Decorator for creating nodes from functions.
"""
# This is called when the decorator is used with arguments
if target is None:
return functools.partial(nodeop, **kwargs)
# This is called when the decorator is used without arguments
@functools.wraps(target)
def _wrapper(*args, **kwargs_inner):
return func_op(target, *args, **kwargs_inner, **kwargs)
return _wrapper
@nodeop
def call(func, *args, **kwargs):
"""
Call `func` with positional arguments `args` and keyword arguments `kwargs`.
Parameters
----------
func : callable
Function to call when the node is executed.
args : list
Sequence of positional arguments passed to `func`.
kwargs : dict
Mapping of keyword arguments passed to `func`.
"""
return func(*args, **kwargs)
@contextlib.contextmanager
def control_dependencies(dependencies, graph=None):
"""
Ensure that all `dependencies` are executed before any nodes in this scope.
Parameters
----------
dependencies : list
Sequence of nodes to be evaluted before evaluating any nodes defined in this
scope.
"""
# Add dependencies to the graph
graph = Node.get_active_graph(graph)
graph.dependencies.extend(dependencies)
yield
# Remove dependencies from the graph
del graph.dependencies[-len(dependencies):]
#pylint: disable=C0103
abs_ = nodeop(builtins.abs)
dict_ = nodeop(builtins.dict)
help_ = nodeop(builtins.help)
min_ = nodeop(builtins.min)
setattr_ = nodeop(builtins.setattr)
all_ = nodeop(builtins.all)
dir_ = nodeop(builtins.dir)
hex_ = nodeop(builtins.hex)
next_ = nodeop(builtins.next)
slice_ = nodeop(builtins.slice)
any_ = nodeop(builtins.any)
divmod_ = nodeop(builtins.divmod)
id_ = nodeop(builtins.id)
object_ = nodeop(builtins.object)
sorted_ = nodeop(builtins.sorted)
ascii_ = nodeop(builtins.ascii)
enumerate_ = nodeop(builtins.enumerate)
input_ = nodeop(builtins.input)
oct_ = nodeop(builtins.oct)
staticmethod_ = nodeop(builtins.staticmethod)
bin_ = nodeop(builtins.bin)
eval_ = nodeop(builtins.eval)
int_ = nodeop(builtins.int)
open_ = nodeop(builtins.open)
str_ = nodeop(builtins.str)
bool_ = nodeop(builtins.bool)
exec_ = nodeop(builtins.exec)
isinstance_ = nodeop(builtins.isinstance)
ord_ = nodeop(builtins.ord)
sum_ = nodeop(builtins.sum)
bytearray_ = nodeop(builtins.bytearray)
filter_ = nodeop(builtins.filter)
issubclass_ = nodeop(builtins.issubclass)
pow_ = nodeop(builtins.pow)
super_ = nodeop(builtins.super)
bytes_ = nodeop(builtins.bytes)
float_ = nodeop(builtins.float)
iter_ = nodeop(builtins.iter)
print_ = nodeop(builtins.print)
tuple_ = nodeop(builtins.tuple)
callable_ = nodeop(builtins.callable)
format_ = nodeop(builtins.format)
len_ = nodeop(builtins.len)
property_ = nodeop(builtins.property)
type_ = nodeop(builtins.type)
chr_ = nodeop(builtins.chr)
frozenset_ = nodeop(builtins.frozenset)
list_ = nodeop(builtins.list)
range_ = nodeop(builtins.range)
vars_ = nodeop(builtins.vars)
classmethod_ = nodeop(builtins.classmethod)
getattr_ = nodeop(builtins.getattr)
locals_ = nodeop(builtins.locals)
repr_ = nodeop(builtins.repr)
zip_ = nodeop(builtins.zip)
compile_ = nodeop(builtins.compile)
globals_ = nodeop(builtins.globals)
map_ = nodeop(builtins.map)
reversed_ = nodeop(builtins.reversed)
complex_ = nodeop(builtins.complex)
hasattr_ = nodeop(builtins.hasattr)
max_ = nodeop(builtins.max)
round_ = nodeop(builtins.round)
delattr_ = nodeop(builtins.delattr)
hash_ = nodeop(builtins.hash)
memoryview_ = nodeop(builtins.memoryview)
set_ = nodeop(builtins.set)
add = nodeop(operator.add)
and_ = nodeop(operator.and_)
attrgetter = nodeop(operator.attrgetter)
concat = nodeop(operator.concat)
contains = nodeop(operator.contains)
countOf = nodeop(operator.countOf)
delitem = nodeop(operator.delitem)
eq = nodeop(operator.eq)
floordiv = nodeop(operator.floordiv)
ge = nodeop(operator.ge)
getitem = nodeop(operator.getitem)
gt = nodeop(operator.gt)
index = nodeop(operator.index)
indexOf = nodeop(operator.indexOf)
inv = nodeop(operator.inv)
invert = nodeop(operator.invert)
ior = nodeop(operator.ior)
ipow = nodeop(operator.ipow)
irshift = nodeop(operator.irshift)
is_ = nodeop(operator.is_)
is_not = nodeop(operator.is_not)
itemgetter = nodeop(operator.itemgetter)
le = nodeop(operator.le)
length_hint = nodeop(operator.length_hint)
lshift = nodeop(operator.lshift)
lt = nodeop(operator.lt)
matmul = nodeop(operator.matmul)
methodcaller = nodeop(operator.methodcaller)
mod = nodeop(operator.mod)
mul = nodeop(operator.mul)
ne = nodeop(operator.ne)
neg = nodeop(operator.neg)
not_ = nodeop(operator.not_)
or_ = nodeop(operator.or_)
pos = nodeop(operator.pos)
rshift = nodeop(operator.rshift)
setitem = nodeop(operator.setitem)
sub = nodeop(operator.sub)
truediv = nodeop(operator.truediv)
truth = nodeop(operator.truth)
xor = nodeop(operator.xor)
import_ = nodeop(importlib.import_module)
| 35.725052
| 158
| 0.607239
| 6,433
| 51,194
| 4.613866
| 0.0813
| 0.050942
| 0.049055
| 0.038947
| 0.487214
| 0.429905
| 0.399717
| 0.387554
| 0.363633
| 0.352481
| 0
| 0.004223
| 0.282963
| 51,194
| 1,432
| 159
| 35.75
| 0.804348
| 0.13777
| 0
| 0.37013
| 0
| 0.002165
| 0.063779
| 0.005614
| 0
| 0
| 0
| 0.003492
| 0.002165
| 1
| 0.187229
| false
| 0
| 0.016234
| 0.126623
| 0.398268
| 0.001082
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
13bc25bc6434cc017d92bbc47c055999ff8c038c
| 3,181
|
py
|
Python
|
tests/stack_test.py
|
arthurlogilab/py_zipkin
|
8e733506c399967ea74c56b99a9a421e1bb1736a
|
[
"Apache-2.0"
] | 225
|
2016-09-16T17:57:51.000Z
|
2022-02-12T22:15:32.000Z
|
tests/stack_test.py
|
arthurlogilab/py_zipkin
|
8e733506c399967ea74c56b99a9a421e1bb1736a
|
[
"Apache-2.0"
] | 156
|
2016-09-17T03:50:04.000Z
|
2021-03-17T23:19:40.000Z
|
tests/stack_test.py
|
arthurlogilab/py_zipkin
|
8e733506c399967ea74c56b99a9a421e1bb1736a
|
[
"Apache-2.0"
] | 53
|
2016-09-20T18:34:08.000Z
|
2021-08-31T06:14:03.000Z
|
import mock
import pytest
import py_zipkin.storage
@pytest.fixture(autouse=True, scope="module")
def create_zipkin_attrs():
# The following tests all expect _thread_local.zipkin_attrs to exist: if it
# doesn't, mock.patch will fail.
py_zipkin.storage.ThreadLocalStack().get()
def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", []):
assert not py_zipkin.storage.ThreadLocalStack().get()
assert not py_zipkin.storage.ThreadLocalStack().get()
def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs():
with mock.patch.object(py_zipkin.storage.log, "warning", autospec=True) as log:
assert not py_zipkin.storage.Stack([]).get()
assert log.call_count == 1
def test_storage_stack_still_works_if_you_dont_pass_in_storage():
# Let's make sure this still works if we don't pass in a custom storage.
assert not py_zipkin.storage.Stack().get()
def test_get_zipkin_attrs_returns_the_last_of_the_list():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", ["foo"]):
assert "foo" == py_zipkin.storage.ThreadLocalStack().get()
def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list():
assert "foo" == py_zipkin.storage.Stack(["bar", "foo"]).get()
def test_pop_zipkin_attrs_does_nothing_if_no_requests():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", []):
assert not py_zipkin.storage.ThreadLocalStack().pop()
def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests():
assert not py_zipkin.storage.Stack([]).pop()
def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", ["foo", "bar"]):
assert "bar" == py_zipkin.storage.ThreadLocalStack().pop()
assert "foo" == py_zipkin.storage.ThreadLocalStack().get()
def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs():
context_stack = py_zipkin.storage.Stack(["foo", "bar"])
assert "bar" == context_stack.pop()
assert "foo" == context_stack.get()
def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", ["foo"]):
assert "foo" == py_zipkin.storage.ThreadLocalStack().get()
py_zipkin.storage.ThreadLocalStack().push("bar")
assert "bar" == py_zipkin.storage.ThreadLocalStack().get()
def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list():
stack = py_zipkin.storage.Stack(["foo"])
assert "foo" == stack.get()
stack.push("bar")
assert "bar" == stack.get()
def test_stack_copy():
stack = py_zipkin.storage.Stack()
stack.push("a")
stack.push("b")
the_copy = stack.copy()
the_copy.push("c")
stack.push("d")
assert ["a", "b", "c"] == the_copy._storage
assert ["a", "b", "d"] == stack._storage
| 34.576087
| 83
| 0.727759
| 449
| 3,181
| 4.775056
| 0.184855
| 0.089552
| 0.16791
| 0.14459
| 0.704291
| 0.640392
| 0.508862
| 0.385728
| 0.385728
| 0.362407
| 0
| 0.000366
| 0.142094
| 3,181
| 91
| 84
| 34.956044
| 0.785269
| 0.055014
| 0
| 0.241379
| 0
| 0
| 0.041972
| 0
| 0
| 0
| 0
| 0
| 0.327586
| 1
| 0.224138
| false
| 0.017241
| 0.051724
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13c5d0054209f9afb389d03f1764cab446c01a96
| 742
|
py
|
Python
|
src/messages.py
|
Ewpratten/chat
|
4cc8461e442b6530b7874f234b1a2261f3db8456
|
[
"MIT"
] | null | null | null |
src/messages.py
|
Ewpratten/chat
|
4cc8461e442b6530b7874f234b1a2261f3db8456
|
[
"MIT"
] | null | null | null |
src/messages.py
|
Ewpratten/chat
|
4cc8461e442b6530b7874f234b1a2261f3db8456
|
[
"MIT"
] | null | null | null |
greeting = """
--------------- BEGIN SESSION ---------------
You have connected to a chat server. Welcome!
:: About
Chat is a small piece of server software
written by Evan Pratten to allow people to
talk to eachother from any computer as long
as it has an internet connection. (Even an
arduino!). Check out the project at:
https://github.com/Ewpratten/chat
:: Disclaimer
While chatting, keep in mind that, if there
is a rule or regulation about privacy, this
server does not follow it. All data is sent
to and from this server over a raw TCP socket
and data is temporarily stored in plaintext
while the server handles message broadcasting
Now that's out of the way so, happy chatting!
---------------------------------------------
"""
| 32.26087
| 45
| 0.690027
| 114
| 742
| 4.491228
| 0.719298
| 0.011719
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165768
| 742
| 23
| 46
| 32.26087
| 0.827141
| 0
| 0
| 0
| 0
| 0
| 0.975774
| 0.060565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13c974d988a5a072e9adfbe93d6a9ef5022a8ab3
| 1,712
|
py
|
Python
|
source/dump_query_results.py
|
CheyenneNS/metrics
|
cfeeac6d01d99679897a998b193d630ada169c61
|
[
"MIT"
] | null | null | null |
source/dump_query_results.py
|
CheyenneNS/metrics
|
cfeeac6d01d99679897a998b193d630ada169c61
|
[
"MIT"
] | null | null | null |
source/dump_query_results.py
|
CheyenneNS/metrics
|
cfeeac6d01d99679897a998b193d630ada169c61
|
[
"MIT"
] | null | null | null |
#!/usr/local/bin/python
import os
import mysql.connector as mysql
metrics_mysql_password = os.environ['METRICS_MYSQL_PWD']
sql_host = os.environ['SQL_HOST']
metrics = os.environ['QUERY_ON']
def dump_query_results():
"""
This is a simple SQL table dump of a given query so we can supply users with custom tables.
Note that the SQL query itself and column headers portion need to be changed if you want to change
the query/results. Otherwise it is good to go.
It can be called simply with the bin shell script.
Read the README at the top level for an example.
"""
#connect to mysql
db_connection = mysql.connect(
host = sql_host,#"mysql1", #"localhost",
user = "metrics", #"root",
passwd = metrics_mysql_password,
database = "metrics" #"datacamp"
)
cursor = db_connection.cursor()
query = "use "+metrics
cursor.execute(query)
#CHANGE QUERY HERE
query = "select username, display_name, email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date from user_info order by signup_date"
#CHANGE COLUMN HEADERS HERE TO MATCH QUERY HEADERS
print("username\tdisplay_name\temail\torcid\tkb_internal_user\tinstitution\tcountry\tsignup_date\tlast_signin_date")
cursor.execute(query)
row_values = list()
for (row_values) in cursor:
temp_string = ""
for i in range(len(row_values) - 1):
if row_values[i] is not None:
temp_string += str(row_values[i])
temp_string += "\t"
if row_values[-1] is not None:
temp_string += str(row_values[-1])
print(temp_string)
return 1
dump_query_results()
| 33.568627
| 164
| 0.675234
| 242
| 1,712
| 4.603306
| 0.512397
| 0.056553
| 0.02693
| 0.023339
| 0.055655
| 0.055655
| 0.055655
| 0.055655
| 0
| 0
| 0
| 0.003817
| 0.234813
| 1,712
| 50
| 165
| 34.24
| 0.846565
| 0.28271
| 0
| 0.066667
| 0
| 0.033333
| 0.261163
| 0.090143
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0.066667
| 0.066667
| 0
| 0.133333
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13cbb884947e5c5ee43f164c1fde11e81811776b
| 4,399
|
py
|
Python
|
osaka/storage/sftp.py
|
riverma/osaka
|
f9ed386936500303c629d7213d91215085bcf346
|
[
"Apache-2.0"
] | 2
|
2018-05-08T03:13:49.000Z
|
2022-02-09T08:48:06.000Z
|
osaka/storage/sftp.py
|
riverma/osaka
|
f9ed386936500303c629d7213d91215085bcf346
|
[
"Apache-2.0"
] | 6
|
2019-02-06T19:12:09.000Z
|
2022-02-08T04:29:49.000Z
|
osaka/storage/sftp.py
|
riverma/osaka
|
f9ed386936500303c629d7213d91215085bcf346
|
[
"Apache-2.0"
] | 12
|
2018-04-08T12:58:29.000Z
|
2022-03-31T18:35:53.000Z
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
import os
import os.path
import stat
import urllib.parse
import paramiko
import traceback
import osaka.utils
"""
A backend used to handle stfp using parimiko
@author starchmd
"""
class SFTP(object):
"""
SFTP handling for Osaka
"""
def __init__(self, params={}):
"""
Constructor
"""
self.keyfile = params["keyfile"] if "keyfile" in params else None
def connect(self, host=None, port=None, user=None, password=None, secure=False):
"""
Connect to this storage medium. All data is parsed out of the url and may be None
scheme:
@param host - may be None, host to connect to
implementor must handle defaulting
@param port - may be None, port to connect to
implementor must handle a None port
@param user - may be None, user to connect as
implementor must handle a None user
@param password - may be None, password to connect with
implementor must handle a None password
"""
self.client = paramiko.client.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(
host,
port=22 if port is None else int(port),
username=user,
password=password,
key_filename=self.keyfile,
timeout=15,
)
self.sftp = self.client.open_sftp()
@classmethod
def getSchemes(clazz):
"""
Returns a list of schemes this handler handles
Note: handling the scheme of another handler produces unknown results
@returns list of handled schemes
"""
return ["sftp"]
def put(self, path, url):
"""
Put the given path to the given url
@param path - local path of file/folder to put
@param url - url to put file/folder to
"""
rpath = urllib.parse.urlparse(url).path.lstrip("/")
print("\n\n\n\nUploading:", path)
if not os.path.isdir(path):
print("As file")
try:
self.sftp.mkdir(os.path.dirname(rpath))
except IOError:
pass
dest = rpath
try:
if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0:
dest = os.path.join(rpath, os.path.basename(path))
except:
pass
return self.upload(path, dest)
print("As Dir")
try:
self.sftp.mkdir(rpath)
except IOError:
pass
for dirpath, dirname, filenames in os.walk(path):
extra = os.path.relpath(dirpath, os.path.dirname(path))
try:
self.sftp.mkdir(os.path.join(rpath, extra))
except IOError:
pass
for filename in filenames:
self.upload(
os.path.join(dirpath, filename),
os.path.join(rpath, extra, filename),
)
def upload(self, path, rpath):
"""
Uploads a file to remote path
@param path - path to upload
@param rpath - remote path to upload to
"""
self.sftp.put(path, rpath)
return True
def get(self, url, path):
"""
Get the url (file/folder) to local path
@param url - url to get file/folder from
@param path - path to place fetched files
"""
rpath = urllib.parse.urlparse(url).path
try:
self.sftp.get(rpath, path)
except Exception as e:
osaka.utils.LOGGER.warning(
"Encountered exception: {}\n{}".format(e, traceback.format_exc())
)
raise osaka.utils.OsakaFileNotFound("File {} doesn't exist.".format(url))
def rm(self, url):
"""
Remove the item
@param url - url to remove
"""
rpath = urllib.parse.urlparse(url).path
self.sftp.remove(rpath)
def close(self):
"""
Close this connection
"""
self.client.close()
| 30.130137
| 90
| 0.562855
| 519
| 4,399
| 4.703276
| 0.310212
| 0.02458
| 0.032773
| 0.027038
| 0.119623
| 0.082343
| 0
| 0
| 0
| 0
| 0
| 0.001745
| 0.348716
| 4,399
| 145
| 91
| 30.337931
| 0.850262
| 0.246874
| 0
| 0.17284
| 0
| 0
| 0.034852
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098765
| false
| 0.074074
| 0.160494
| 0
| 0.308642
| 0.049383
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13ce074cf333bc82bde9c49d1dbfefb77ad96d57
| 715
|
py
|
Python
|
kindler/solver/optimizer.py
|
mingruimingrui/kindler
|
8a9c2278b607a167b0ce827b218e54949a1120e7
|
[
"MIT"
] | null | null | null |
kindler/solver/optimizer.py
|
mingruimingrui/kindler
|
8a9c2278b607a167b0ce827b218e54949a1120e7
|
[
"MIT"
] | null | null | null |
kindler/solver/optimizer.py
|
mingruimingrui/kindler
|
8a9c2278b607a167b0ce827b218e54949a1120e7
|
[
"MIT"
] | null | null | null |
import torch
def make_sgd_optimizer(
model,
base_lr=0.001,
bias_lr_factor=2.0,
momentum=0.9,
weight_decay=0.0005,
weight_decay_bias=0.0,
):
params = []
for key, value in model.named_parameters():
if not value.requires_grad:
continue
param_lr = base_lr
param_weight_decay = weight_decay
if "bias" in key:
param_lr = base_lr * bias_lr_factor
param_weight_decay = weight_decay_bias
params.append({
'params': [value],
'lr': param_lr,
'weight_decay': param_weight_decay
})
optimizer = torch.optim.SGD(params, base_lr, momentum=momentum)
return optimizer
| 22.34375
| 67
| 0.601399
| 91
| 715
| 4.417582
| 0.384615
| 0.218905
| 0.119403
| 0.064677
| 0.134328
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030426
| 0.31049
| 715
| 31
| 68
| 23.064516
| 0.78499
| 0
| 0
| 0
| 0
| 0
| 0.033566
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.04
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13d25057738843cced8f3d82852dabf41375fb9a
| 754
|
py
|
Python
|
redshift_upload/base_utilities.py
|
douglassimonsen/redshift_upload
|
e549c770538f022c0b90a983ca056f3e9c16c643
|
[
"MIT"
] | null | null | null |
redshift_upload/base_utilities.py
|
douglassimonsen/redshift_upload
|
e549c770538f022c0b90a983ca056f3e9c16c643
|
[
"MIT"
] | 1
|
2022-03-12T03:50:55.000Z
|
2022-03-12T03:50:55.000Z
|
redshift_upload/base_utilities.py
|
douglassimonsen/redshift_upload
|
e549c770538f022c0b90a983ca056f3e9c16c643
|
[
"MIT"
] | null | null | null |
import inspect
import os
from pathlib import Path
class change_directory:
"""
A class for changing the working directory using a "with" statement.
It takes the directory to change to as an argument. If no directory is given,
it takes the directory of the file from which this function was called.
"""
def __init__(self, directory: str = None) -> None:
self.old_dir = os.getcwd()
if directory is None:
self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore
else:
self.new_dir = directory
def __enter__(self, *_) -> None:
os.chdir(self.new_dir)
def __exit__(self, *_) -> None:
os.chdir(self.old_dir)
| 30.16
| 98
| 0.624668
| 102
| 754
| 4.421569
| 0.529412
| 0.046563
| 0.066519
| 0.084257
| 0.084257
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003704
| 0.28382
| 754
| 24
| 99
| 31.416667
| 0.831481
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.214286
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13d760267b20f874fc4b087de72759e81f401445
| 6,123
|
py
|
Python
|
servicedirectory/src/sd-api/users/tests/tests_serializers.py
|
ealogar/servicedirectory
|
fb4f4bfa8b499b93c03af589ef2f34c08a830b17
|
[
"Apache-2.0"
] | null | null | null |
servicedirectory/src/sd-api/users/tests/tests_serializers.py
|
ealogar/servicedirectory
|
fb4f4bfa8b499b93c03af589ef2f34c08a830b17
|
[
"Apache-2.0"
] | null | null | null |
servicedirectory/src/sd-api/users/tests/tests_serializers.py
|
ealogar/servicedirectory
|
fb4f4bfa8b499b93c03af589ef2f34c08a830b17
|
[
"Apache-2.0"
] | null | null | null |
'''
(c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights
Reserved.
The copyright to the software program(s) is property of Telefonica I+D.
The program(s) may be used and or copied only with the express written
consent of Telefonica I+D or in accordance with the terms and conditions
stipulated in the agreement/contract under which the program(s) have
been supplied.
'''
from unittest import TestCase
from mock import MagicMock, patch
from commons.json_schema_validator.schema_reader import SchemaField
from commons.json_schema_validator.schema_reader import SchemaReader
from users.serializers import UserCollectionSerializer
class UserSerializerTests(TestCase):
def setUp(self):
super(UserSerializerTests, self).setUp()
mock_schema_instance = MagicMock(name='mock_schema_instance')
mock_schema_instance.return_value = [
SchemaField(name='username', field_type='string', required=True),
SchemaField(name='password', field_type='string', required=True),
SchemaField(name='is_admin', field_type='boolean', required=True, default=False)
]
mock_get_schema_fields = MagicMock(name='mock_get_schema')
mock_get_schema_fields.return_value = mock_schema_instance
# mock schema instance
schema_reader = SchemaReader()
self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable
self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable
'get_schema_fields', mock_schema_instance)
self.patcher_schema.start()
self.patcher_validate.start()
def tearDown(self):
self.patcher_schema.stop()
self.patcher_validate.stop()
def test_deserialize_user_should_work(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass'})
self.assertEquals(True, serializer.is_valid(), "Serialization invalid")
def test_deserialize_user_invalid_is_admin_should_work(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'is_admin': 'si'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
def test_deserialize_user_empty_user_should_give_error_invalid(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': '', 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_null_user_should_give_required_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"required",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_large_user_ne_should_give_invalid_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'a' * 600, 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_with_invalid_origins_should_give_error(self):
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'origins': ["????"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['origins'][0],
'Invalid error message')
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'origins': [" tugo"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['origins'][0],
'Invalid error message')
def test_deserialize_user_with_invalid_classes_should_give_error(self):
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'classes': ["????"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['classes'][0],
'Invalid error message')
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'classes': [" sms"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['classes'][0],
'Invalid error message')
def test_deserialize_user_invalid_username_should_give_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_invalid_is_admin_should_give_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'usera', 'password': 'pass', 'is_admin': 0})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['is_admin'][0],
'Invalid error message')
| 52.784483
| 114
| 0.6634
| 654
| 6,123
| 6.022936
| 0.19419
| 0.081239
| 0.106118
| 0.116781
| 0.693069
| 0.687992
| 0.669713
| 0.648388
| 0.606245
| 0.567149
| 0
| 0.003606
| 0.230116
| 6,123
| 115
| 115
| 53.243478
| 0.83199
| 0.136534
| 0
| 0.421687
| 0
| 0
| 0.16926
| 0
| 0
| 0
| 0
| 0
| 0.240964
| 1
| 0.13253
| false
| 0.144578
| 0.060241
| 0
| 0.204819
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13d7896d6d799cba6c0e766504d5f3eea5f2e531
| 3,124
|
py
|
Python
|
Web/notifyXAPI/app/src/users/views.py
|
abs0lut3pwn4g3/RootersCTF2019-challenges
|
397a6fad0b03e55541df06e5103172ae850cd4e5
|
[
"MIT"
] | 14
|
2019-10-13T07:38:04.000Z
|
2022-02-13T09:03:50.000Z
|
Web/notifyXAPI/app/src/users/views.py
|
abs0lut3pwn4g3/RootersCTF2019-challenges
|
397a6fad0b03e55541df06e5103172ae850cd4e5
|
[
"MIT"
] | 1
|
2019-10-13T07:35:13.000Z
|
2019-10-13T08:22:48.000Z
|
Web/notifyXAPI/app/src/users/views.py
|
abs0lut3pwn4g3/RootersCTF2019-challenges
|
397a6fad0b03e55541df06e5103172ae850cd4e5
|
[
"MIT"
] | 4
|
2019-10-13T08:21:43.000Z
|
2022-01-09T16:39:33.000Z
|
''' User views '''
from datetime import timedelta
from flask import request, jsonify, make_response, redirect, json, render_template
from flask_jwt_extended import (create_access_token, jwt_required)
from flask_restful import Resource
from flask_login import login_user, current_user
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from src import db, api
from .models import User
from .schemas import UserSchema
class UserLoginResource(Resource):
model = User
schema = UserSchema
def get(self):
return make_response(render_template('login.html'))
def post(self):
if request.json:
data = request.json
user = self.model.query.filter(self.model.email == data['email']).first()
if user and self.model.check_password(user, data['password']):
expires = timedelta(days=365)
user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data
return make_response(
jsonify({'id': user,
'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200)
else:
return make_response(jsonify({"error": {"code": 400, "msg": "No such user/wrong password."}}), 400)
else:
data = request.form
user = self.model.query.filter(self.model.email == data['email']).first()
if user and self.model.check_password(user, data['password']) and login_user(user):
return make_response(redirect('/admin/', 302))
else:
return make_response(redirect('/api/v1/login', 403))
class UserRegisterResource(Resource):
model = User
schema = UserSchema
def post(self):
data = request.json
if not data:
return make_response(jsonify({'error': 'No data'}), 400)
user = User.query.filter(User.email == data['email']).first()
if user:
return make_response(jsonify({'error': 'User already exists'}), 403)
user, errors = self.schema().load(data)
if errors:
return make_response(jsonify(errors), 400)
try:
user.set_password(data['password'])
db.session.add(user)
db.session.commit()
except (IntegrityError, InvalidRequestError) as e:
print(e)
db.session.rollback()
return make_response(jsonify(error={'code': 400 }), 400)
expires = timedelta(days=365)
return make_response(
jsonify(created_user={'id': user.id,
'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data,
'authentication_token': create_access_token(identity=user.id,
expires_delta=expires)}), 200)
api.add_resource(UserLoginResource, '/login/', endpoint='login')
api.add_resource(UserRegisterResource, '/register/', endpoint='register')
| 40.571429
| 124
| 0.588348
| 336
| 3,124
| 5.354167
| 0.28869
| 0.073374
| 0.100056
| 0.097276
| 0.367982
| 0.31851
| 0.264591
| 0.223457
| 0.190106
| 0.190106
| 0
| 0.018165
| 0.295134
| 3,124
| 77
| 125
| 40.571429
| 0.798819
| 0.003201
| 0
| 0.274194
| 0
| 0
| 0.08336
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048387
| false
| 0.064516
| 0.145161
| 0.016129
| 0.451613
| 0.016129
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13ded3828a8c037ea4aa78b91386fb78512809eb
| 326
|
py
|
Python
|
tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py
|
mbargull/conda-build
|
ebc56f48196774301863fecbe98a32a7ded6eb7e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py
|
mbargull/conda-build
|
ebc56f48196774301863fecbe98a32a7ded6eb7e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py
|
mbargull/conda-build
|
ebc56f48196774301863fecbe98a32a7ded6eb7e
|
[
"BSD-3-Clause"
] | null | null | null |
import os
pkgs = os.path.join(os.environ["ROOT"], "pkgs")
info_dir = os.path.join(pkgs, "conda-build-test-ignore-some-prefix-files-1.0-0", "info")
has_prefix_file = os.path.join(info_dir, "has_prefix")
print(info_dir)
assert os.path.isfile(has_prefix_file)
with open(has_prefix_file) as f:
assert "test2" not in f.read()
| 32.6
| 88
| 0.733129
| 60
| 326
| 3.816667
| 0.516667
| 0.104803
| 0.131004
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013605
| 0.09816
| 326
| 9
| 89
| 36.222222
| 0.765306
| 0
| 0
| 0
| 0
| 0
| 0.226994
| 0.144172
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13e067146d5c409e953e8fe9a97ca674f7b0976f
| 2,217
|
py
|
Python
|
ymir/backend/src/ymir_controller/controller/utils/invoker_mapping.py
|
phoenix-xhuang/ymir
|
537d3ac389c4a365ce4daef431c95b42ddcd5b1b
|
[
"Apache-2.0"
] | 64
|
2021-11-15T03:48:00.000Z
|
2022-03-25T07:08:46.000Z
|
ymir/backend/src/ymir_controller/controller/utils/invoker_mapping.py
|
phoenix-xhuang/ymir
|
537d3ac389c4a365ce4daef431c95b42ddcd5b1b
|
[
"Apache-2.0"
] | 35
|
2021-11-23T04:14:35.000Z
|
2022-03-26T09:03:43.000Z
|
ymir/backend/src/ymir_controller/controller/utils/invoker_mapping.py
|
phoenix-xhuang/ymir
|
537d3ac389c4a365ce4daef431c95b42ddcd5b1b
|
[
"Apache-2.0"
] | 57
|
2021-11-11T10:15:40.000Z
|
2022-03-29T07:27:54.000Z
|
from controller.invoker import (
invoker_cmd_branch_checkout,
invoker_cmd_branch_commit,
invoker_cmd_branch_create,
invoker_cmd_branch_delete,
invoker_cmd_branch_list,
invoker_cmd_evaluate,
invoker_cmd_filter,
invoker_cmd_gpu_info,
invoker_cmd_inference,
invoker_cmd_init,
invoker_cmd_label_add,
invoker_cmd_label_get,
invoker_cmd_log,
invoker_cmd_merge,
invoker_cmd_pull_image,
invoker_cmd_repo_check,
invoker_cmd_repo_clear,
invoker_cmd_sampling,
invoker_cmd_terminate,
invoker_cmd_user_create,
invoker_task_factory,
)
from proto import backend_pb2
RequestTypeToInvoker = {
backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker,
backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker,
backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker,
backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker,
backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker,
backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker,
backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker,
backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker,
backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker,
backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker,
backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker,
backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker,
backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker,
backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker,
backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler,
backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker,
backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker,
backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker,
backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker,
backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory,
backend_pb2.USER_CREATE: invoker_cmd_user_create.UserCreateInvoker,
backend_pb2.CMD_SAMPLING: invoker_cmd_sampling.SamplingInvoker,
}
| 43.470588
| 87
| 0.834461
| 284
| 2,217
| 5.929577
| 0.204225
| 0.243468
| 0.146675
| 0.045131
| 0.273159
| 0.179335
| 0
| 0
| 0
| 0
| 0
| 0.011705
| 0.113667
| 2,217
| 50
| 88
| 44.34
| 0.845293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13e10f247a53a809b100dc05b97804f51f30b05a
| 463
|
py
|
Python
|
server/form/mongo.py
|
SRM-IST-KTR/ossmosis
|
06e375dfdd67f91286ffbcb13e04b6543585d8ad
|
[
"MIT"
] | 6
|
2021-07-04T07:59:17.000Z
|
2021-07-04T14:41:00.000Z
|
server/form/mongo.py
|
SRM-IST-KTR/ossmosis
|
06e375dfdd67f91286ffbcb13e04b6543585d8ad
|
[
"MIT"
] | null | null | null |
server/form/mongo.py
|
SRM-IST-KTR/ossmosis
|
06e375dfdd67f91286ffbcb13e04b6543585d8ad
|
[
"MIT"
] | 1
|
2022-02-15T13:31:46.000Z
|
2022-02-15T13:31:46.000Z
|
import os
from pymongo import MongoClient
from dotenv import load_dotenv
def database_entry(data):
try:
load_dotenv()
mongo_string = os.getenv('MONGODB_AUTH_URI')
client = MongoClient(mongo_string)
database = client[os.getenv('MONGODB_DB')]
col = database['users']
col.insert_one(data)
return True
except Exception as e:
print(e)
return False
if __name__ == "__main__":
pass
| 21.045455
| 52
| 0.637149
| 56
| 463
| 4.964286
| 0.642857
| 0.071942
| 0.107914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.278618
| 463
| 21
| 53
| 22.047619
| 0.832335
| 0
| 0
| 0
| 0
| 0
| 0.084233
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0.058824
| 0.176471
| 0
| 0.352941
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13e74a7f98e6571f4fc714e2743e38c7eafbf58e
| 2,607
|
py
|
Python
|
orthoexon/tests/test_util.py
|
jessicalettes/orthoexon
|
463ad1908364c602cf75dbddb0b16a42f4100a36
|
[
"BSD-3-Clause"
] | null | null | null |
orthoexon/tests/test_util.py
|
jessicalettes/orthoexon
|
463ad1908364c602cf75dbddb0b16a42f4100a36
|
[
"BSD-3-Clause"
] | null | null | null |
orthoexon/tests/test_util.py
|
jessicalettes/orthoexon
|
463ad1908364c602cf75dbddb0b16a42f4100a36
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_orthoexon
----------------------------------
Tests for `orthoexon` module.
"""
import os
import pytest
@pytest.fixture
def exon_id_with_quotes():
return "'ENSE00001229068.1'"
@pytest.fixture
def exon_id():
return "ENSE00001229068.1"
def test_separate_with_quotes(exon_id_with_quotes):
from orthoexon.util import separate
test = separate(exon_id_with_quotes)
true = "ENSE00001229068"
assert test == true
def test_separate(exon_id):
from orthoexon.util import separate
test = separate(exon_id)
true = "ENSE00001229068"
assert test == true
@pytest.fixture
def location():
return "chr20:10256140-10256211:+:0"
def test_splitstart(location):
from orthoexon.util import splitstart
test = splitstart(location)
true = '10256140'
assert test == true
def test_splitend(location):
from orthoexon.util import splitend
test = splitend(location)
true = '10256211'
assert test == true
@pytest.fixture
def human_gtf_filename(table_folder):
return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf')
@pytest.fixture
def human_gtf_database(table_folder):
return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db')
@pytest.fixture
def human_fasta(table_folder):
return os.path.join(table_folder, 'GRCm38.p3.genome.fa')
def test_translate(exon_id, human_fasta, human_gtf_database):
from orthoexon.util import translate
from orthoexon.util import separate
for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')):
species1gffutilsgeneid = str(species1gene['gene_id'])
species1geneid = separate(species1gffutilsgeneid)
for exon in human_gtf_database.children(species1geneid,
featuretype='CDS',
order_by='start'):
if exon_id == exon:
test = translate(exon, human_fasta)
break
break
true = 'MAEDADMRNELEEMQRRADQLADE'
assert test == true
# def test_getsequence(exon, human_gtf_database):
# from orthoexon.util import getsequence
#
# test = getsequence(exon, human_gtf_database)
# true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \
# 'GGCTGATGAG'
#
# assert test == true
# def test_make_sequence_array(finalsequencedf):
# from orthoexon.util import make_sequence_array
#
# test = make_sequence_array(finalsequencedf)
# true = ......
#
# assert test == true
| 23.917431
| 86
| 0.678558
| 287
| 2,607
| 5.965157
| 0.275261
| 0.028037
| 0.079439
| 0.107477
| 0.478388
| 0.271028
| 0.204439
| 0.158879
| 0.136682
| 0.079439
| 0
| 0.048434
| 0.215957
| 2,607
| 109
| 87
| 23.917431
| 0.789139
| 0.221327
| 0
| 0.327273
| 0
| 0
| 0.117588
| 0.057798
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.2
| false
| 0
| 0.145455
| 0.109091
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
13efdb45818b7da3afae845201256a86d37c940d
| 4,302
|
py
|
Python
|
Lib/test/libregrtest/utils.py
|
oskomorokhov/cpython
|
c0e11a3ceb9427e09db4224f394c7789bf6deec5
|
[
"0BSD"
] | 5
|
2017-08-25T04:31:30.000Z
|
2022-03-22T15:01:56.000Z
|
Lib/test/libregrtest/utils.py
|
oskomorokhov/cpython
|
c0e11a3ceb9427e09db4224f394c7789bf6deec5
|
[
"0BSD"
] | 20
|
2021-03-25T12:52:42.000Z
|
2022-03-01T02:02:03.000Z
|
Lib/test/libregrtest/utils.py
|
oskomorokhov/cpython
|
c0e11a3ceb9427e09db4224f394c7789bf6deec5
|
[
"0BSD"
] | 3
|
2020-04-13T14:41:31.000Z
|
2022-03-02T18:56:32.000Z
|
import math
import os.path
import sys
import textwrap
from test import support
def format_duration(seconds):
ms = math.ceil(seconds * 1e3)
seconds, ms = divmod(ms, 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
parts = []
if hours:
parts.append('%s hour' % hours)
if minutes:
parts.append('%s min' % minutes)
if seconds:
if parts:
# 2 min 1 sec
parts.append('%s sec' % seconds)
else:
# 1.0 sec
parts.append('%.1f sec' % (seconds + ms / 1000))
if not parts:
return '%s ms' % ms
parts = parts[:2]
return ' '.join(parts)
def removepy(names):
if not names:
return
for idx, name in enumerate(names):
basename, ext = os.path.splitext(name)
if ext == '.py':
names[idx] = basename
def count(n, word):
if n == 1:
return "%d %s" % (n, word)
else:
return "%d %ss" % (n, word)
def printlist(x, width=70, indent=4, file=None):
"""Print the elements of iterable x to stdout.
Optional arg width (default 70) is the maximum line length.
Optional arg indent (default 4) is the number of blanks with which to
begin each line.
"""
blanks = ' ' * indent
# Print the sorted list: 'x' may be a '--random' list or a set()
print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width,
initial_indent=blanks, subsequent_indent=blanks),
file=file)
def print_warning(msg):
support.print_warning(msg)
orig_unraisablehook = None
def regrtest_unraisable_hook(unraisable):
global orig_unraisablehook
support.environment_altered = True
print_warning("Unraisable exception")
old_stderr = sys.stderr
try:
sys.stderr = sys.__stderr__
orig_unraisablehook(unraisable)
finally:
sys.stderr = old_stderr
def setup_unraisable_hook():
global orig_unraisablehook
orig_unraisablehook = sys.unraisablehook
sys.unraisablehook = regrtest_unraisable_hook
def clear_caches():
# Clear the warnings registry, so they can be displayed again
for mod in sys.modules.values():
if hasattr(mod, '__warningregistry__'):
del mod.__warningregistry__
# Flush standard output, so that buffered data is sent to the OS and
# associated Python objects are reclaimed.
for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__):
if stream is not None:
stream.flush()
# Clear assorted module caches.
# Don't worry about resetting the cache if the module is not loaded
try:
distutils_dir_util = sys.modules['distutils.dir_util']
except KeyError:
pass
else:
distutils_dir_util._path_created.clear()
try:
re = sys.modules['re']
except KeyError:
pass
else:
re.purge()
try:
_strptime = sys.modules['_strptime']
except KeyError:
pass
else:
_strptime._regex_cache.clear()
try:
urllib_parse = sys.modules['urllib.parse']
except KeyError:
pass
else:
urllib_parse.clear_cache()
try:
urllib_request = sys.modules['urllib.request']
except KeyError:
pass
else:
urllib_request.urlcleanup()
try:
linecache = sys.modules['linecache']
except KeyError:
pass
else:
linecache.clearcache()
try:
mimetypes = sys.modules['mimetypes']
except KeyError:
pass
else:
mimetypes._default_mime_types()
try:
filecmp = sys.modules['filecmp']
except KeyError:
pass
else:
filecmp._cache.clear()
try:
struct = sys.modules['struct']
except KeyError:
pass
else:
struct._clearcache()
try:
doctest = sys.modules['doctest']
except KeyError:
pass
else:
doctest.master = None
try:
ctypes = sys.modules['ctypes']
except KeyError:
pass
else:
ctypes._reset_cache()
try:
typing = sys.modules['typing']
except KeyError:
pass
else:
for f in typing._cleanups:
f()
support.gc_collect()
| 22.761905
| 75
| 0.600418
| 510
| 4,302
| 4.933333
| 0.35098
| 0.051669
| 0.085851
| 0.104928
| 0.022258
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009015
| 0.303812
| 4,302
| 188
| 76
| 22.882979
| 0.831052
| 0.125291
| 0
| 0.381295
| 0
| 0
| 0.051701
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057554
| false
| 0.086331
| 0.035971
| 0
| 0.129496
| 0.035971
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13f954a55ebaa879400311cfe5c32a3993b29137
| 12,933
|
py
|
Python
|
test/test_rimuhosting.py
|
shenoyn/libcloud
|
bd902992a658b6a99193d69323e051ffa7388253
|
[
"Apache-2.0"
] | 1
|
2015-11-08T12:59:27.000Z
|
2015-11-08T12:59:27.000Z
|
test/test_rimuhosting.py
|
shenoyn/libcloud
|
bd902992a658b6a99193d69323e051ffa7388253
|
[
"Apache-2.0"
] | null | null | null |
test/test_rimuhosting.py
|
shenoyn/libcloud
|
bd902992a658b6a99193d69323e051ffa7388253
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# libcloud.org licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2009 RedRata Ltd
from libcloud.drivers.rimuhosting import RimuHostingNodeDriver
from test import MockHttp
from test import MockHttp, TestCaseMixin
import unittest
import httplib
class RimuHostingTest(unittest.TestCase, TestCaseMixin):
def setUp(self):
RimuHostingNodeDriver.connectionCls.conn_classes = (None,
RimuHostingMockHttp)
self.driver = RimuHostingNodeDriver('foo')
def test_list_nodes(self):
nodes = self.driver.list_nodes()
self.assertEqual(len(nodes),1)
node = nodes[0]
self.assertEqual(node.public_ip[0], "1.2.3.4")
self.assertEqual(node.public_ip[1], "1.2.3.5")
self.assertEqual(node.extra['order_oid'], 88833465)
self.assertEqual(node.id, "order-88833465-api-ivan-net-nz")
def test_list_sizes(self):
sizes = self.driver.list_sizes()
self.assertEqual(len(sizes),1)
size = sizes[0]
self.assertEqual(size.ram,950)
self.assertEqual(size.disk,20)
self.assertEqual(size.bandwidth,75)
self.assertEqual(size.price,32.54)
def test_list_images(self):
images = self.driver.list_images()
self.assertEqual(len(images),6)
image = images[0]
self.assertEqual(image.name,"Debian 5.0 (aka Lenny, RimuHosting"\
" recommended distro)")
self.assertEqual(image.id, "lenny")
def test_reboot_node(self):
# Raises exception on failure
node = self.driver.list_nodes()[0]
self.driver.reboot_node(node)
def test_destroy_node(self):
# Raises exception on failure
node = self.driver.list_nodes()[0]
self.driver.destroy_node(node)
def test_create_node(self):
# Raises exception on failure
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
self.driver.create_node(name="api.ivan.net.nz", image=image, size=size)
class RimuHostingMockHttp(MockHttp):
def _r_orders(self,method,url,body,headers):
body = """
{ "get_orders_response" :
{ "status_message" : null
, "status_code" : 200
, "error_info" : null
, "response_type" : "OK"
, "human_readable_message" : "Found 15 orders"
, "response_display_duration_type" : "REGULAR",
"about_orders" :
[{ "order_oid" : 88833465
, "domain_name" : "api.ivan.net.nz"
, "slug" : "order-88833465-api-ivan-net-nz"
, "billing_oid" : 96122465
, "is_on_customers_own_physical_server" : false
, "vps_parameters" : { "memory_mb" : 160
, "disk_space_mb" : 4096
, "disk_space_2_mb" : 0}
, "host_server_oid" : "764"
, "server_type" : "VPS"
, "data_transfer_allowance" : { "data_transfer_gb" : 30
, "data_transfer" : "30"}
, "billing_info" : { }
, "allocated_ips" : { "primary_ip" : "1.2.3.4"
, "secondary_ips" : ["1.2.3.5","1.2.3.6"]}
, "running_state" : "RUNNING"}]}}"""
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _r_pricing_plans(self,method,url,body,headers):
body = """
{"get_pricing_plans_response" :
{ "status_message" : null
, "status_code" : 200
, "error_info" : null
, "response_type" : "OK"
, "human_readable_message" : "Here some pricing plans we are offering on new orders. Note we offer most disk and memory sizes. So if you setup a new server feel free to vary these (e.g. different memory, disk, etc) and we will just adjust the pricing to suit. Pricing is in USD. If you are an NZ-based customer then we would need to add GST."
, "response_display_duration_type" : "REGULAR"
, "pricing_plan_infos" :
[{ "pricing_plan_code" : "MiroVPSLowContention"
, "pricing_plan_description" : "MiroVPS Semi-Dedicated Server (Dallas)"
, "monthly_recurring_fee" : 32.54
, "monthly_recurring_amt" : { "amt" : 35.0
, "currency" : "CUR_AUD"
,"amt_usd" : 32.54}
, "minimum_memory_mb" : 950
, "minimum_disk_gb" : 20
, "minimum_data_transfer_allowance_gb" : 75
, "see_also_url" : "http://rimuhosting.com/order/serverdetails.jsp?plan=MiroVPSLowContention"
, "server_type" : "VPS"
, "offered_at_data_center" :
{ "data_center_location_code" : "DCDALLAS"
, "data_center_location_name" : "Dallas"}}
]}}
"""
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _r_distributions(self, method, url, body, headers):
body = """
{ "get_distros_response" : { "status_message" : null
, "status_code" : 200
, "error_info" : null
, "response_type" : "OK"
, "human_readable_message" : "Here are the distros we are offering on new orders."
, "response_display_duration_type" : "REGULAR"
, "distro_infos" : [{ "distro_code" : "lenny"
, "distro_description" : "Debian 5.0 (aka Lenny, RimuHosting recommended distro)"}
, { "distro_code" : "centos5"
, "distro_description" : "Centos5"}
, { "distro_code" : "ubuntu904"
, "distro_description" : "Ubuntu 9.04 (Jaunty Jackalope, from 2009-04)"}
, { "distro_code" : "ubuntu804"
, "distro_description" : "Ubuntu 8.04 (Hardy Heron, 5 yr long term support (LTS))"}
, { "distro_code" : "ubuntu810"
, "distro_description" : "Ubuntu 8.10 (Intrepid Ibex, from 2008-10)"}
, { "distro_code" : "fedora10"
, "distro_description" : "Fedora 10"}]}}
"""
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _r_orders_new_vps(self, method, url, body, headers):
body = """
{ "post_new_vps_response" :
{ "status_message" : null
, "status_code" : 200
, "error_info" : null
, "response_type" : "OK"
, "human_readable_message" : null
, "response_display_duration_type" : "REGULAR"
, "setup_messages" :
["Using user-specified billing data: Wire Transfer" , "Selected user as the owner of the billing details: Ivan Meredith"
, "No VPS paramters provided, using default values."]
, "about_order" :
{ "order_oid" : 52255865
, "domain_name" : "api.ivan.net.nz"
, "slug" : "order-52255865-api-ivan-net-nz"
, "billing_oid" : 96122465
, "is_on_customers_own_physical_server" : false
, "vps_parameters" :
{ "memory_mb" : 160
, "disk_space_mb" : 4096
, "disk_space_2_mb" : 0}
, "host_server_oid" : "764"
, "server_type" : "VPS"
, "data_transfer_allowance" :
{ "data_transfer_gb" : 30 , "data_transfer" : "30"}
, "billing_info" : { }
, "allocated_ips" :
{ "primary_ip" : "74.50.57.80", "secondary_ips" : []}
, "running_state" : "RUNNING"}
, "new_order_request" :
{ "billing_oid" : 96122465
, "user_oid" : 0
, "host_server_oid" : null
, "vps_order_oid_to_clone" : 0
, "ip_request" :
{ "num_ips" : 1, "extra_ip_reason" : ""}
, "vps_parameters" :
{ "memory_mb" : 160
, "disk_space_mb" : 4096
, "disk_space_2_mb" : 0}
, "pricing_plan_code" : "MIRO1B"
, "instantiation_options" :
{ "control_panel" : "webmin"
, "domain_name" : "api.ivan.net.nz"
, "password" : "aruxauce27"
, "distro" : "lenny"}}
, "running_vps_info" :
{ "pings_ok" : true
, "current_kernel" : "default"
, "current_kernel_canonical" : "2.6.30.5-xenU.i386"
, "last_backup_message" : ""
, "is_console_login_enabled" : false
, "console_public_authorized_keys" : null
, "is_backup_running" : false
, "is_backups_enabled" : true
, "next_backup_time" :
{ "ms_since_epoch": 1256446800000, "iso_format" : "2009-10-25T05:00:00Z", "users_tz_offset_ms" : 46800000}
, "vps_uptime_s" : 31
, "vps_cpu_time_s" : 6
, "running_state" : "RUNNING"
, "is_suspended" : false}}}
"""
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _r_orders_order_88833465_api_ivan_net_nz_vps(self, method, url, body, headers):
body = """
{ "delete_server_response" :
{ "status_message" : null
, "status_code" : 200
, "error_info" : null
, "response_type" : "OK"
, "human_readable_message" : "Server removed"
, "response_display_duration_type" : "REGULAR"
, "cancel_messages" :
["api.ivan.net.nz is being shut down."
, "A $7.98 credit has been added to your account."
, "If you need to un-cancel the server please contact our support team."]
}
}
"""
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _r_orders_order_88833465_api_ivan_net_nz_vps_running_state(self, method,
url, body,
headers):
body = """
{ "put_running_state_response" :
{ "status_message" : null
, "status_code" : 200
, "error_info" : null
, "response_type" : "OK"
, "human_readable_message" : "api.ivan.net.nz restarted. After the reboot api.ivan.net.nz is pinging OK."
, "response_display_duration_type" : "REGULAR"
, "is_restarted" : true
, "is_pinging" : true
, "running_vps_info" :
{ "pings_ok" : true
, "current_kernel" : "default"
, "current_kernel_canonical" : "2.6.30.5-xenU.i386"
, "last_backup_message" : ""
, "is_console_login_enabled" : false
, "console_public_authorized_keys" : null
, "is_backup_running" : false
, "is_backups_enabled" : true
, "next_backup_time" :
{ "ms_since_epoch": 1256446800000, "iso_format" : "2009-10-25T05:00:00Z", "users_tz_offset_ms" : 46800000}
, "vps_uptime_s" : 19
, "vps_cpu_time_s" : 5
, "running_state" : "RUNNING"
, "is_suspended" : false}
, "host_server_info" : { "is_host64_bit_capable" : true
, "default_kernel_i386" : "2.6.30.5-xenU.i386"
, "default_kernel_x86_64" : "2.6.30.5-xenU.x86_64"
, "cpu_model_name" : "Intel(R) Xeon(R) CPU E5506 @ 2.13GHz"
, "host_num_cores" : 1
, "host_xen_version" : "3.4.1"
, "hostload" : [1.45
, 0.56
, 0.28]
, "host_uptime_s" : 3378276
, "host_mem_mb_free" : 51825
, "host_mem_mb_total" : 73719
, "running_vpss" : 34}
, "running_state_messages" : null}}
"""
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
| 45.22028
| 380
| 0.540478
| 1,377
| 12,933
| 4.819898
| 0.28976
| 0.029381
| 0.01808
| 0.021697
| 0.446738
| 0.410125
| 0.361911
| 0.339009
| 0.309779
| 0.309779
| 0
| 0.050881
| 0.341993
| 12,933
| 285
| 381
| 45.378947
| 0.729025
| 0.067115
| 0
| 0.387755
| 0
| 0.036735
| 0.757244
| 0.111831
| 0
| 0
| 0
| 0
| 0.053061
| 1
| 0.053061
| false
| 0.004082
| 0.020408
| 0
| 0.106122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b913259774170b0ae117752589cf379fac40286c
| 4,139
|
py
|
Python
|
easyidp/core/tests/test_class_reconsproject.py
|
HowcanoeWang/EasyIDP
|
0d0a0df1287e3c15cda17e8e4cdcbe05f21f7272
|
[
"MIT"
] | null | null | null |
easyidp/core/tests/test_class_reconsproject.py
|
HowcanoeWang/EasyIDP
|
0d0a0df1287e3c15cda17e8e4cdcbe05f21f7272
|
[
"MIT"
] | null | null | null |
easyidp/core/tests/test_class_reconsproject.py
|
HowcanoeWang/EasyIDP
|
0d0a0df1287e3c15cda17e8e4cdcbe05f21f7272
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import pytest
import easyidp
from easyidp.core.objects import ReconsProject, Points
from easyidp.io import metashape
module_path = os.path.join(easyidp.__path__[0], "io/tests")
def test_init_reconsproject():
attempt1 = ReconsProject("agisoft")
assert attempt1.software == "metashape"
attempt2 = ReconsProject("Metashape")
assert attempt2.software == "metashape"
with pytest.raises(LookupError):
attempt3 = ReconsProject("not_supported_sfm")
def test_local2world2local():
attempt1 = ReconsProject("agisoft")
attempt1.transform.matrix = np.asarray([[-0.86573098, -0.01489186, 0.08977677, 7.65034123],
[0.06972335, 0.44334391, 0.74589315, 1.85910928],
[-0.05848325, 0.74899678, -0.43972184, -0.1835615],
[0., 0., 0., 1.]], dtype=np.float)
w_pos = Points([0.5, 1, 1.5])
l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965])
w_pos_ans = Points([0.4999999999999978, 0.9999999999999993, 1.5])
world_pos = attempt1.local2world(l_pos)
np.testing.assert_array_almost_equal(w_pos_ans.values, world_pos.values, decimal=6)
local_pos = attempt1.world2local(w_pos)
np.testing.assert_array_almost_equal(l_pos.values, local_pos.values, decimal=6)
def test_metashape_project_local_points_on_raw():
test_project_folder = easyidp.test_full_path("data/metashape/goya_test.psx")
chunks = metashape.open_project(test_project_folder)
chunk = chunks[0]
# test for single point
l_pos = Points([7.960064093299587, 1.3019528769064523, -2.6697181763370965])
p_dis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=False)
p_undis_out = chunk.project_local_points_on_raw(l_pos, 0, distortion_correct=True)
# pro_api_out = np.asarray([2218.883386793118, 1991.4709388015149])
my_undistort_out = Points([2220.854889556147, 1992.6933680261686])
my_distort_out = Points([2218.47960556, 1992.46356322])
np.testing.assert_array_almost_equal(p_dis_out.values, my_distort_out.values)
np.testing.assert_array_almost_equal(p_undis_out.values, my_undistort_out.values)
# test for multiple points
l_pos_points = Points([[7.960064093299587, 1.3019528769064523, -2.6697181763370965],
[7.960064093299587, 1.3019528769064523, -2.6697181763370965]])
p_dis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=False)
p_undis_outs = chunk.project_local_points_on_raw(l_pos_points, 0, distortion_correct=True)
my_undistort_outs = Points([[2220.854889556147, 1992.6933680261686],
[2220.854889556147, 1992.6933680261686]])
my_distort_outs = Points([[2218.47960556, 1992.46356322],
[2218.47960556, 1992.46356322]])
np.testing.assert_array_almost_equal(p_dis_outs.values, my_distort_outs.values)
np.testing.assert_array_almost_equal(p_undis_outs.values, my_undistort_outs.values)
def test_world2crs_and_on_raw_images():
test_project_folder = easyidp.test_full_path("data/metashape/wheat_tanashi.psx")
chunks = metashape.open_project(test_project_folder)
chunk = chunks[0]
local = Points([11.870130675203006, 0.858098777517136, -12.987136541275])
geocentric = Points([-3943658.7087006606, 3363404.124223561, 3704651.3067566575])
geodetic = Points([139.54033578028609, 35.73756358928734, 96.87827569602781], columns=['lon', 'lat', 'alt'])
idp_world = chunk.local2world(local)
np.testing.assert_array_almost_equal(idp_world.values, geocentric.values, decimal=1)
idp_crs = chunk.world2crs(idp_world)
np.testing.assert_array_almost_equal(idp_crs.values, geodetic.values)
camera_id = 56 # camera_label = 'DJI_0057'
camera_pix_ans = Points([2391.7104647010146, 1481.8987733175165])
idp_cam_pix = chunk.project_local_points_on_raw(local, camera_id, distortion_correct=True)
np.testing.assert_array_almost_equal(camera_pix_ans.values, idp_cam_pix.values)
| 42.234694
| 112
| 0.723846
| 535
| 4,139
| 5.297196
| 0.293458
| 0.012703
| 0.047636
| 0.063514
| 0.452717
| 0.41602
| 0.357798
| 0.290049
| 0.269584
| 0.162315
| 0
| 0.222932
| 0.167673
| 4,139
| 97
| 113
| 42.670103
| 0.59971
| 0.033341
| 0
| 0.126984
| 0
| 0
| 0.033801
| 0.015023
| 0
| 0
| 0
| 0
| 0.174603
| 1
| 0.063492
| false
| 0
| 0.095238
| 0
| 0.15873
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b915eeed88fbfbe46318454fd21bc9db43d6d639
| 6,023
|
py
|
Python
|
utils/utils_bbox.py
|
MasoonZhang/FasterRConvMixer
|
a7a17d00f716a28a5b301088053e00840c222524
|
[
"MIT"
] | null | null | null |
utils/utils_bbox.py
|
MasoonZhang/FasterRConvMixer
|
a7a17d00f716a28a5b301088053e00840c222524
|
[
"MIT"
] | null | null | null |
utils/utils_bbox.py
|
MasoonZhang/FasterRConvMixer
|
a7a17d00f716a28a5b301088053e00840c222524
|
[
"MIT"
] | 1
|
2022-03-14T05:29:42.000Z
|
2022-03-14T05:29:42.000Z
|
import numpy as np
import torch
from torch.nn import functional as F
from torchvision.ops import nms
def loc2bbox(src_bbox, loc):
if src_bbox.size()[0] == 0:
return torch.zeros((0, 4), dtype=loc.dtype)
src_width = torch.unsqueeze(src_bbox[:, 2] - src_bbox[:, 0], -1)
src_height = torch.unsqueeze(src_bbox[:, 3] - src_bbox[:, 1], -1)
src_ctr_x = torch.unsqueeze(src_bbox[:, 0], -1) + 0.5 * src_width
src_ctr_y = torch.unsqueeze(src_bbox[:, 1], -1) + 0.5 * src_height
dx = loc[:, 0::4]
dy = loc[:, 1::4]
dw = loc[:, 2::4]
dh = loc[:, 3::4]
ctr_x = dx * src_width + src_ctr_x
ctr_y = dy * src_height + src_ctr_y
w = torch.exp(dw) * src_width
h = torch.exp(dh) * src_height
dst_bbox = torch.zeros_like(loc)
dst_bbox[:, 0::4] = ctr_x - 0.5 * w
dst_bbox[:, 1::4] = ctr_y - 0.5 * h
dst_bbox[:, 2::4] = ctr_x + 0.5 * w
dst_bbox[:, 3::4] = ctr_y + 0.5 * h
return dst_bbox
class DecodeBox():
def __init__(self, std, num_classes):
self.std = std
self.num_classes = num_classes + 1
def frcnn_correct_boxes(self, box_xy, box_wh, input_shape, image_shape):
#-----------------------------------------------------------------#
# 把y轴放前面是因为方便预测框和图像的宽高进行相乘
#-----------------------------------------------------------------#
box_yx = box_xy[..., ::-1]
box_hw = box_wh[..., ::-1]
input_shape = np.array(input_shape)
image_shape = np.array(image_shape)
box_mins = box_yx - (box_hw / 2.)
box_maxes = box_yx + (box_hw / 2.)
boxes = np.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]], axis=-1)
boxes *= np.concatenate([image_shape, image_shape], axis=-1)
return boxes
def forward(self, roi_cls_locs, roi_scores, rois, image_shape, input_shape, nms_iou = 0.3, confidence = 0.5):
results = []
bs = len(roi_cls_locs)
#--------------------------------#
# batch_size, num_rois, 4
#--------------------------------#
rois = rois.view((bs, -1, 4))
#----------------------------------------------------------------------------------------------------------------#
# 对每一张图片进行处理,由于在predict.py的时候,我们只输入一张图片,所以for i in range(len(mbox_loc))只进行一次
#----------------------------------------------------------------------------------------------------------------#
for i in range(bs):
#----------------------------------------------------------#
# 对回归参数进行reshape
#----------------------------------------------------------#
roi_cls_loc = roi_cls_locs[i] * self.std
#----------------------------------------------------------#
# 第一维度是建议框的数量,第二维度是每个种类
# 第三维度是对应种类的调整参数
#----------------------------------------------------------#
roi_cls_loc = roi_cls_loc.view([-1, self.num_classes, 4])
#-------------------------------------------------------------#
# 利用classifier网络的预测结果对建议框进行调整获得预测框
# num_rois, 4 -> num_rois, 1, 4 -> num_rois, num_classes, 4
#-------------------------------------------------------------#
roi = rois[i].view((-1, 1, 4)).expand_as(roi_cls_loc)
cls_bbox = loc2bbox(roi.contiguous().view((-1, 4)), roi_cls_loc.contiguous().view((-1, 4)))
cls_bbox = cls_bbox.view([-1, (self.num_classes), 4])
#-------------------------------------------------------------#
# 对预测框进行归一化,调整到0-1之间
#-------------------------------------------------------------#
cls_bbox[..., [0, 2]] = (cls_bbox[..., [0, 2]]) / input_shape[1]
cls_bbox[..., [1, 3]] = (cls_bbox[..., [1, 3]]) / input_shape[0]
roi_score = roi_scores[i]
prob = F.softmax(roi_score, dim=-1)
results.append([])
for c in range(1, self.num_classes):
#--------------------------------#
# 取出属于该类的所有框的置信度
# 判断是否大于门限
#--------------------------------#
c_confs = prob[:, c]
c_confs_m = c_confs > confidence
if len(c_confs[c_confs_m]) > 0:
#-----------------------------------------#
# 取出得分高于confidence的框
#-----------------------------------------#
boxes_to_process = cls_bbox[c_confs_m, c]
confs_to_process = c_confs[c_confs_m]
keep = nms(
boxes_to_process,
confs_to_process,
nms_iou
)
#-----------------------------------------#
# 取出在非极大抑制中效果较好的内容
#-----------------------------------------#
good_boxes = boxes_to_process[keep]
confs = confs_to_process[keep][:, None]
labels = (c - 1) * torch.ones((len(keep), 1)).cuda() if confs.is_cuda else (c - 1) * torch.ones((len(keep), 1))
#-----------------------------------------#
# 将label、置信度、框的位置进行堆叠。
#-----------------------------------------#
c_pred = torch.cat((good_boxes, confs, labels), dim=1).cpu().numpy()
# 添加进result里
results[-1].extend(c_pred)
if len(results[-1]) > 0:
results[-1] = np.array(results[-1])
box_xy, box_wh = (results[-1][:, 0:2] + results[-1][:, 2:4])/2, results[-1][:, 2:4] - results[-1][:, 0:2]
results[-1][:, :4] = self.frcnn_correct_boxes(box_xy, box_wh, input_shape, image_shape)
return results
| 45.628788
| 136
| 0.381205
| 601
| 6,023
| 3.567388
| 0.21797
| 0.025187
| 0.020989
| 0.039179
| 0.157183
| 0.102612
| 0.059701
| 0.041978
| 0
| 0
| 0
| 0.028913
| 0.305164
| 6,023
| 131
| 137
| 45.977099
| 0.483393
| 0.267309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053333
| false
| 0
| 0.053333
| 0
| 0.173333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b923cd998b5a122c2fa8e86b09305b2b291d6507
| 3,873
|
py
|
Python
|
platformio/commands/home/run.py
|
Granjow/platformio-core
|
71ae579bc07b2e11fec16acda482dea04bc3a359
|
[
"Apache-2.0"
] | 4,744
|
2016-11-28T14:37:47.000Z
|
2022-03-31T12:35:56.000Z
|
platformio/commands/home/run.py
|
Granjow/platformio-core
|
71ae579bc07b2e11fec16acda482dea04bc3a359
|
[
"Apache-2.0"
] | 3,424
|
2016-11-27T22:45:41.000Z
|
2022-03-31T21:40:03.000Z
|
platformio/commands/home/run.py
|
Granjow/platformio-core
|
71ae579bc07b2e11fec16acda482dea04bc3a359
|
[
"Apache-2.0"
] | 576
|
2016-12-01T18:48:22.000Z
|
2022-03-30T02:27:35.000Z
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from urllib.parse import urlparse
import click
import uvicorn
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.responses import PlainTextResponse
from starlette.routing import Mount, Route, WebSocketRoute
from starlette.staticfiles import StaticFiles
from starlette.status import HTTP_403_FORBIDDEN
from platformio.commands.home.rpc.handlers.account import AccountRPC
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.ide import IDERPC
from platformio.commands.home.rpc.handlers.misc import MiscRPC
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.commands.home.rpc.handlers.project import ProjectRPC
from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory
from platformio.compat import aio_get_running_loop
from platformio.exception import PlatformioException
from platformio.package.manager.core import get_core_package_dir
from platformio.proc import force_exit
class ShutdownMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
if scope["type"] == "http" and b"__shutdown__" in scope.get("query_string", {}):
await shutdown_server()
await self.app(scope, receive, send)
async def shutdown_server(_=None):
aio_get_running_loop().call_later(0.5, force_exit)
return PlainTextResponse("Server has been shutdown!")
async def protected_page(_):
return PlainTextResponse(
"Protected PlatformIO Home session", status_code=HTTP_403_FORBIDDEN
)
def run_server(host, port, no_open, shutdown_timeout, home_url):
contrib_dir = get_core_package_dir("contrib-piohome")
if not os.path.isdir(contrib_dir):
raise PlatformioException("Invalid path to PIO Home Contrib")
ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout)
ws_rpc_factory.addObjectHandler(AccountRPC(), namespace="account")
ws_rpc_factory.addObjectHandler(AppRPC(), namespace="app")
ws_rpc_factory.addObjectHandler(IDERPC(), namespace="ide")
ws_rpc_factory.addObjectHandler(MiscRPC(), namespace="misc")
ws_rpc_factory.addObjectHandler(OSRPC(), namespace="os")
ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace="core")
ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace="project")
path = urlparse(home_url).path
routes = [
WebSocketRoute(path + "wsrpc", ws_rpc_factory, name="wsrpc"),
Route(path + "__shutdown__", shutdown_server, methods=["POST"]),
Mount(path, StaticFiles(directory=contrib_dir, html=True), name="static"),
]
if path != "/":
routes.append(Route("/", protected_page))
uvicorn.run(
Starlette(
middleware=[Middleware(ShutdownMiddleware)],
routes=routes,
on_startup=[
lambda: click.echo(
"PIO Home has been started. Press Ctrl+C to shutdown."
),
lambda: None if no_open else click.launch(home_url),
],
),
host=host,
port=port,
log_level="warning",
)
| 38.73
| 88
| 0.737155
| 479
| 3,873
| 5.803758
| 0.386221
| 0.060432
| 0.038849
| 0.07482
| 0.103597
| 0.093165
| 0
| 0
| 0
| 0
| 0
| 0.004992
| 0.172476
| 3,873
| 99
| 89
| 39.121212
| 0.862403
| 0.150529
| 0
| 0.028169
| 0
| 0
| 0.079365
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028169
| false
| 0
| 0.309859
| 0
| 0.380282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
b925f7b3126896a3611797c97e1fa8d0eee2234c
| 564
|
py
|
Python
|
webscraping.py
|
carvalho-fdec/DesafioDSA
|
fec9742bd77ddc3923ed616b6511cce87de48968
|
[
"MIT"
] | null | null | null |
webscraping.py
|
carvalho-fdec/DesafioDSA
|
fec9742bd77ddc3923ed616b6511cce87de48968
|
[
"MIT"
] | null | null | null |
webscraping.py
|
carvalho-fdec/DesafioDSA
|
fec9742bd77ddc3923ed616b6511cce87de48968
|
[
"MIT"
] | null | null | null |
# webscraping test
import urllib.request
from bs4 import BeautifulSoup
with urllib.request.urlopen('http://www.netvasco.com.br') as url:
page = url.read()
#print(page)
print(url.geturl())
print(url.info())
print(url.getcode())
# Analise o html na variável 'page' e armazene-o no formato Beautiful Soup
soup = BeautifulSoup(page, 'html.parser')
#print(soup.prettify())
print(soup.title)
print(soup.title.string)
print(soup.title.name)
soup_a = soup.find_all('a')[:10]
for a in soup_a:
print(a.get('href'))
print(a.get_text())
| 18.193548
| 74
| 0.687943
| 86
| 564
| 4.465116
| 0.55814
| 0.09375
| 0.109375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006316
| 0.157801
| 564
| 30
| 75
| 18.8
| 0.802105
| 0.216312
| 0
| 0
| 0
| 0
| 0.097448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.133333
| 0
| 0.133333
| 0.533333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
b9312660991c249b5bd6faf4ead63f4150e99b7e
| 4,915
|
py
|
Python
|
pysnmp/EXTREME-RTSTATS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/EXTREME-RTSTATS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/EXTREME-RTSTATS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module EXTREME-RTSTATS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/EXTREME-BASE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:53:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection")
extremeAgent, = mibBuilder.importSymbols("EXTREME-BASE-MIB", "extremeAgent")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Unsigned32, iso, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Bits, MibIdentifier, ModuleIdentity, Counter64, Counter32, NotificationType, Integer32, IpAddress, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "iso", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Bits", "MibIdentifier", "ModuleIdentity", "Counter64", "Counter32", "NotificationType", "Integer32", "IpAddress", "TimeTicks")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
extremeRtStats = ModuleIdentity((1, 3, 6, 1, 4, 1, 1916, 1, 11))
if mibBuilder.loadTexts: extremeRtStats.setLastUpdated('9906240000Z')
if mibBuilder.loadTexts: extremeRtStats.setOrganization('Extreme Networks, Inc.')
extremeRtStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1), )
if mibBuilder.loadTexts: extremeRtStatsTable.setStatus('current')
extremeRtStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1), ).setIndexNames((0, "EXTREME-RTSTATS-MIB", "extremeRtStatsIndex"))
if mibBuilder.loadTexts: extremeRtStatsEntry.setStatus('current')
extremeRtStatsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsIndex.setStatus('current')
extremeRtStatsIntervalStart = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 2), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsIntervalStart.setStatus('current')
extremeRtStatsCRCAlignErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsCRCAlignErrors.setStatus('current')
extremeRtStatsUndersizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsUndersizePkts.setStatus('current')
extremeRtStatsOversizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsOversizePkts.setStatus('current')
extremeRtStatsFragments = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsFragments.setStatus('current')
extremeRtStatsJabbers = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsJabbers.setStatus('current')
extremeRtStatsCollisions = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsCollisions.setStatus('current')
extremeRtStatsTotalErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsTotalErrors.setStatus('current')
extremeRtStatsUtilization = MibTableColumn((1, 3, 6, 1, 4, 1, 1916, 1, 11, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extremeRtStatsUtilization.setStatus('current')
mibBuilder.exportSymbols("EXTREME-RTSTATS-MIB", extremeRtStatsEntry=extremeRtStatsEntry, extremeRtStatsOversizePkts=extremeRtStatsOversizePkts, extremeRtStatsUndersizePkts=extremeRtStatsUndersizePkts, extremeRtStatsTable=extremeRtStatsTable, extremeRtStatsTotalErrors=extremeRtStatsTotalErrors, extremeRtStats=extremeRtStats, PYSNMP_MODULE_ID=extremeRtStats, extremeRtStatsCollisions=extremeRtStatsCollisions, extremeRtStatsCRCAlignErrors=extremeRtStatsCRCAlignErrors, extremeRtStatsJabbers=extremeRtStatsJabbers, extremeRtStatsIndex=extremeRtStatsIndex, extremeRtStatsUtilization=extremeRtStatsUtilization, extremeRtStatsIntervalStart=extremeRtStatsIntervalStart, extremeRtStatsFragments=extremeRtStatsFragments)
| 114.302326
| 713
| 0.790031
| 498
| 4,915
| 7.793173
| 0.25502
| 0.007215
| 0.075754
| 0.013399
| 0.345014
| 0.262304
| 0.172121
| 0.172121
| 0.172121
| 0.168771
| 0
| 0.068882
| 0.075483
| 4,915
| 42
| 714
| 117.02381
| 0.785211
| 0.066938
| 0
| 0
| 0
| 0
| 0.157745
| 0.009613
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b936e2da1dfb0c50e0a4123e54c302664e300cf0
| 4,454
|
py
|
Python
|
tests/core_ptl/check_for_ranks.py
|
PatrykNeubauer/NeMo
|
3ada744b884dba5f233f22c6991fc6092c6ca8d0
|
[
"Apache-2.0"
] | 2
|
2021-09-21T07:36:20.000Z
|
2022-02-05T15:29:04.000Z
|
tests/core_ptl/check_for_ranks.py
|
PatrykNeubauer/NeMo
|
3ada744b884dba5f233f22c6991fc6092c6ca8d0
|
[
"Apache-2.0"
] | null | null | null |
tests/core_ptl/check_for_ranks.py
|
PatrykNeubauer/NeMo
|
3ada744b884dba5f233f22c6991fc6092c6ca8d0
|
[
"Apache-2.0"
] | 12
|
2021-06-20T08:56:10.000Z
|
2022-03-16T19:07:10.000Z
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import torch
from omegaconf import OmegaConf
from pytorch_lightning import Trainer
from pytorch_lightning.utilities.distributed import rank_zero_only
from nemo.core import ModelPT
from nemo.utils import logging
from nemo.utils.exp_manager import ExpManagerConfig, exp_manager
class OnesDataset(torch.utils.data.Dataset):
def __init__(self, dataset_len):
super().__init__()
self.__dataset_len = dataset_len
def __getitem__(self, *args):
return torch.ones(2)
def __len__(self):
return self.__dataset_len
class ExampleModel(ModelPT):
def __init__(self, *args, **kwargs):
cfg = OmegaConf.structured({})
super().__init__(cfg, trainer=kwargs.get('trainer', None))
# dummy parameter in order to allow DDP to execute
self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1)
def train_dataloader(self):
return None
def val_dataloader(self):
return None
def predict_dataloader(self):
dataset = OnesDataset(2)
return torch.utils.data.DataLoader(dataset, batch_size=2)
def forward(self, batch):
return batch.mean()
def validation_step(self, batch, batch_idx):
return self(batch)
def training_step(self, batch, batch_idx):
return self(batch)
def list_available_models(self):
pass
def setup_training_data(self):
pass
def setup_validation_data(self):
pass
def validation_epoch_end(self, loss):
self.log("val_loss", torch.stack(loss).mean())
def instantiate_multinode_ddp_if_possible():
num_gpus = torch.cuda.device_count()
trainer = Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None)
exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version="")
exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg))
return trainer
def setup_model(trainer: Trainer):
model = ExampleModel(trainer=trainer)
logging.info(f"M.Global Rank:{model.global_rank}")
logging.info(f"M.Local Rank:{model.local_rank}")
logging.info(f"M.World Size:{model.trainer.world_size}")
trainer.predict(model)
return model
def get_rank_info(texts: list, rank_key: str) -> int:
for line in texts:
if rank_key in line:
rank_value = line.split(":")[-1]
rank_value = int(rank_value)
return rank_value
print("Could not find the correct rank key !")
exit(1)
@rank_zero_only
def check_model_ranks(model: ExampleModel):
basedir = os.path.join('./ddp_check/', 'default', 'version_0')
file_template = "nemo_log_globalrank-{rank}_localrank-{rank}.txt"
world_size = torch.cuda.device_count()
for rank in range(world_size):
filename = file_template.format(rank=rank)
filepath = os.path.join(basedir, filename)
with open(filepath, 'r') as f:
texts = f.readlines()
texts = [t.replace("\n", "") for t in texts]
log_global_rank = get_rank_info(texts, rank_key='M.Global Rank')
log_world_size = get_rank_info(texts, rank_key='M.World Size')
if log_global_rank != rank:
print("Logged global rank is not equal to trainer.global_rank !")
exit(1)
if log_world_size != world_size:
print("Logged world size if not equal to trainer.world_size !")
exit(1)
@rank_zero_only
def cleanup():
if os.path.exists('./ddp_check'):
shutil.rmtree('./ddp_check', ignore_errors=True)
def run_checks():
cleanup()
trainer = instantiate_multinode_ddp_if_possible()
model = setup_model(trainer)
check_model_ranks(model)
print("DDP checks passed !")
cleanup()
if __name__ == '__main__':
run_checks()
| 28.551282
| 102
| 0.687023
| 604
| 4,454
| 4.836093
| 0.347682
| 0.030811
| 0.012325
| 0.013352
| 0.109552
| 0.05683
| 0.043136
| 0.026703
| 0.026703
| 0
| 0
| 0.005402
| 0.210373
| 4,454
| 155
| 103
| 28.735484
| 0.825135
| 0.141895
| 0
| 0.145833
| 0
| 0
| 0.113738
| 0.032834
| 0
| 0
| 0
| 0
| 0
| 1
| 0.208333
| false
| 0.041667
| 0.09375
| 0.072917
| 0.4375
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b93839299c30aa23ab066b85969c7c27e043c202
| 1,143
|
py
|
Python
|
helpers/json_manager.py
|
Lofi-Lemonade/Python-Discord-Bot-Template
|
4cb79197c751c88100ad396adb38e88bf2a4d1ed
|
[
"Apache-2.0"
] | null | null | null |
helpers/json_manager.py
|
Lofi-Lemonade/Python-Discord-Bot-Template
|
4cb79197c751c88100ad396adb38e88bf2a4d1ed
|
[
"Apache-2.0"
] | null | null | null |
helpers/json_manager.py
|
Lofi-Lemonade/Python-Discord-Bot-Template
|
4cb79197c751c88100ad396adb38e88bf2a4d1ed
|
[
"Apache-2.0"
] | null | null | null |
""""
Copyright © Krypton 2022 - https://github.com/kkrypt0nn (https://krypton.ninja)
Description:
This is a template to create your own discord bot in python.
Version: 4.1
"""
import json
def add_user_to_blacklist(user_id: int) -> None:
"""
This function will add a user based on its ID in the blacklist.json file.
:param user_id: The ID of the user that should be added into the blacklist.json file.
"""
with open("blacklist.json", "r+") as file:
file_data = json.load(file)
file_data["ids"].append(user_id)
with open("blacklist.json", "w") as file:
file.seek(0)
json.dump(file_data, file, indent=4)
def remove_user_from_blacklist(user_id: int) -> None:
"""
This function will remove a user based on its ID from the blacklist.json file.
:param user_id: The ID of the user that should be removed from the blacklist.json file.
"""
with open("blacklist.json", "r") as file:
file_data = json.load(file)
file_data["ids"].remove(user_id)
with open("blacklist.json", "w") as file:
file.seek(0)
json.dump(file_data, file, indent=4)
| 31.75
| 91
| 0.659668
| 181
| 1,143
| 4.071823
| 0.353591
| 0.141113
| 0.086839
| 0.108548
| 0.719132
| 0.708277
| 0.662144
| 0.662144
| 0.559023
| 0.559023
| 0
| 0.012346
| 0.220472
| 1,143
| 35
| 92
| 32.657143
| 0.813692
| 0.433946
| 0
| 0.533333
| 0
| 0
| 0.111296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b93a3daf85b033d7039d8c3747eadb457802db6b
| 2,814
|
py
|
Python
|
GeneratePassword/generate_password_v2.py
|
OneScreenfulOfPython/screenfuls
|
ea4e378c8d9e530edadd4a3315fe9e8acc98460b
|
[
"Apache-2.0"
] | 2
|
2015-01-19T14:50:55.000Z
|
2015-01-28T12:45:59.000Z
|
GeneratePassword/generate_password_v2.py
|
OneScreenfulOfPython/screenfuls
|
ea4e378c8d9e530edadd4a3315fe9e8acc98460b
|
[
"Apache-2.0"
] | null | null | null |
GeneratePassword/generate_password_v2.py
|
OneScreenfulOfPython/screenfuls
|
ea4e378c8d9e530edadd4a3315fe9e8acc98460b
|
[
"Apache-2.0"
] | null | null | null |
import os, sys
import random
import string
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
letters = string.ascii_letters
numbers = string.digits
punctuation = string.punctuation
def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation):
"""Generate a password by include enough random
characters to meet the password length restriction.
In addition, the user can specify that at least one
of the each of the classes of character be used.
"""
#
# Any combination of characters is valid
#
valid_characters = ""
if at_least_one_letter:
valid_characters += letters
if at_least_one_number:
valid_characters += numbers
if at_least_one_punctuation:
valid_characters += punctuation
#
# Start with a blank password and then go round enough
# times to make a password of the required length.
#
password = ""
for i in range(password_length):
#
# Each time around, ensure that one of each of the selected
# groups is chosen, and then just choose randomly from all
# groups.
#
if at_least_one_letter:
character = random.choice(letters)
at_least_one_letter = False
elif at_least_one_number:
character = random.choice(numbers)
at_least_one_number = False
elif at_least_one_punctuation:
character = random.choice(punctuation)
at_least_one_punctuation = False
else:
character = random.choice(valid_characters)
password += character
#
# Finally, shuffle the password so we don't always get a
# letter at the beginning, with a number after and some
# punctuation.
#
characters = list(password)
#
# random.shuffle shuffles a list *in place*
#
random.shuffle(characters)
#
# X.join(...) means: return all the strings in (...) joined by X
# ", ".join(['Eggs', 'Bacon', 'Beans']) => "Eggs, Bacon, Beans"
# But if you want to generate *real* .csv files, use the csv module
# because there are lots of corner-cases.
#
password = "".join(characters)
return password
if __name__ == '__main__':
password_length = int(input("How many letters? "))
at_least_one_letter = "Y" == (input("At least one letter [Y/n]? ").upper() or "Y")
at_least_one_number = "Y" == (input("At least one number [Y/n]? ").upper() or "Y")
at_least_one_punctuation = "Y" == (input("At least one punctuation [Y/n]? ").upper() or "Y")
password = generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation)
print("Your password is: {}".format(password))
| 33.5
| 108
| 0.658138
| 369
| 2,814
| 4.821138
| 0.365854
| 0.086565
| 0.123665
| 0.062957
| 0.229342
| 0.106802
| 0.106802
| 0.106802
| 0.084317
| 0.084317
| 0
| 0.001428
| 0.253376
| 2,814
| 83
| 109
| 33.903614
| 0.845312
| 0.327292
| 0
| 0.046512
| 0
| 0
| 0.074878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0.255814
| 0.069767
| 0
| 0.116279
| 0.023256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b93b21d31a5eecb527d2b3ad7f00cf5d4683d661
| 1,535
|
py
|
Python
|
forms.py
|
lennykioko/Flask-social-network
|
15bfe1f7dca90074c0cbef62c5da9d5a25b5ce65
|
[
"MIT"
] | 1
|
2018-04-15T19:35:54.000Z
|
2018-04-15T19:35:54.000Z
|
forms.py
|
lennykioko/Flask-social-network
|
15bfe1f7dca90074c0cbef62c5da9d5a25b5ce65
|
[
"MIT"
] | null | null | null |
forms.py
|
lennykioko/Flask-social-network
|
15bfe1f7dca90074c0cbef62c5da9d5a25b5ce65
|
[
"MIT"
] | null | null | null |
# forms are not just about display, instead they are more of validation
# wtf forms protect our site against csrf attacks
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, TextAreaField
from wtforms.validators import (DataRequired, Regexp, ValidationError, Email,
Length, EqualTo)
from models import User
def name_exists(form, field):
if User.select().where(User.username == field.data).exists():
raise ValidationError('User with this name already exists.')
def email_exists(form, field):
if User.select().where(User.email == field.data).exists():
raise ValidationError('User with this email already exists.')
class RegisterForm(FlaskForm):
username = StringField(
'Username', # is the label
validators=[
DataRequired(),
Regexp(
r'^[a-zA-Z0-9_]+$',
message = ("Username should be one word, letters, numbers and underscores only.")
),
name_exists
])
email = StringField(
'Email',
validators=[
DataRequired(),
Email(),
email_exists
])
password = PasswordField(
'Password',
validators=[
DataRequired(),
Length(min=8),
EqualTo('password2', message = 'Passwords must match')
])
password2 = PasswordField(
'Confirm Password',
validators=[DataRequired()
])
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
class PostForm(FlaskForm):
content = TextAreaField("What's Up?", validators = [DataRequired()])
| 25.583333
| 85
| 0.712704
| 171
| 1,535
| 6.362573
| 0.48538
| 0.141544
| 0.082721
| 0.03125
| 0.334559
| 0.240809
| 0.152574
| 0.152574
| 0
| 0
| 0
| 0.003891
| 0.162866
| 1,535
| 59
| 86
| 26.016949
| 0.842802
| 0.084691
| 0
| 0.222222
| 0
| 0
| 0.172734
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0.155556
| 0.088889
| 0
| 0.355556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9421dbb7e263a5a3de9a9e29e270b09ceba630c
| 1,004
|
py
|
Python
|
django_events/users/management/commands/create_default_su.py
|
chrisBrookes93/django-events-management
|
93886448a7bb85c8758324977ff67bcacc80bbec
|
[
"MIT"
] | null | null | null |
django_events/users/management/commands/create_default_su.py
|
chrisBrookes93/django-events-management
|
93886448a7bb85c8758324977ff67bcacc80bbec
|
[
"MIT"
] | null | null | null |
django_events/users/management/commands/create_default_su.py
|
chrisBrookes93/django-events-management
|
93886448a7bb85c8758324977ff67bcacc80bbec
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
class Command(BaseCommand):
help = "Creates a default super user if one doesn't already exist. " \
"This is designed to be used in the docker-compose.yml to create an initial super user on deployment."
def handle(self, *args, **kwargs):
"""
Checks whether any super users exist and creates a default one if not
:param args: Unused
:param kwargs: Unused
"""
super_users = get_user_model().objects.filter(is_superuser=True)
if super_users.exists():
self.stdout.write('A superuser already exists, not creating one')
else:
get_user_model().objects.create_superuser(email="admin@events.com", password="EventsEvents")
self.stdout.write('Created default superuser "admin@events.com"')
self.stdout.write('Make sure you change the password immediately!')
| 41.833333
| 114
| 0.661355
| 129
| 1,004
| 5.069767
| 0.581395
| 0.03211
| 0.055046
| 0.058104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1,004
| 23
| 115
| 43.652174
| 0.868526
| 0.111554
| 0
| 0
| 0
| 0.076923
| 0.387214
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.153846
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b942ff3dafb5c886434a478e8bfb0592e83afd1c
| 6,215
|
bzl
|
Python
|
antlir/bzl/image_layer.bzl
|
zeroxoneb/antlir
|
811d88965610d16a5c85d831d317f087797ca732
|
[
"MIT"
] | 28
|
2020-08-11T16:22:46.000Z
|
2022-03-04T15:41:52.000Z
|
antlir/bzl/image_layer.bzl
|
zeroxoneb/antlir
|
811d88965610d16a5c85d831d317f087797ca732
|
[
"MIT"
] | 137
|
2020-08-11T16:07:49.000Z
|
2022-02-27T10:59:05.000Z
|
antlir/bzl/image_layer.bzl
|
zeroxoneb/antlir
|
811d88965610d16a5c85d831d317f087797ca732
|
[
"MIT"
] | 10
|
2020-09-10T00:01:28.000Z
|
2022-03-08T18:00:28.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
An `image.layer` is a set of `feature` with some additional parameters. Its
purpose to materialize those `feature`s as a btrfs subvolume in the
per-repo `buck-image/out/volume/targets`.
We call the subvolume a "layer" because it can be built on top of a snapshot
of its `parent_layer`, and thus can be represented as a btrfs send-stream for
more efficient storage & distribution.
The Buck output of an `image.layer` target is a JSON file with information
on how to find the resulting layer in the per-repo
`buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`.
## Implementation notes
The implementation of this converter deliberately minimizes the amount of
business logic in its command. The converter must include **only** our
interactions with the buck target graph. Everything else should be
delegated to subcommands.
### Command
In composing the `bash` command, our core maxim is: make it a hermetic
function of the converter's inputs -- do not read data from disk, do not
insert disk paths into the command, do not do anything that might cause the
bytes of the command to vary between machines or between runs. To achieve
this, we use Buck macros to resolve all paths, including those to helper
scripts. We rely on environment variables or pipes to pass data between the
helper scripts.
Another reason to keep this converter minimal is that `buck test` cannot
make assertions about targets that fail to build. Since we only have the
ability to test the "good" targets, it behooves us to put most logic in
external scripts, so that we can unit-test its successes **and** failures
thoroughly.
### Output
We mark `image.layer` uncacheable, because there's no easy way to teach Buck
to serialize a btrfs subvolume (for that, we have `package.new`).
That said, we should still follow best practices to avoid problems if e.g.
the user renames their repo, or similar. These practices include:
- The output JSON must store no absolute paths.
- Store Buck target paths instead of paths into the output directory.
### Dependency resolution
An `image.layer` consumes a set of `feature` outputs to decide what to put into
the btrfs subvolume. These outputs are actually just JSON files that
reference other targets, and do not contain the data to be written into the
image.
Therefore, `image.layer` has to explicitly tell buck that it needs all
direct dependencies of its `feature`s to be present on disk -- see our
`attrfilter` queries below. Without this, Buck would merrily fetch the just
the `feature` JSONs from its cache, and not provide us with any of the
buid artifacts that comprise the image.
We do NOT need the direct dependencies of the parent layer's features,
because we treat the parent layer as a black box -- whatever it has laid
down in the image, that's what it provides (and we don't care about how).
The consequences of this information hiding are:
- Better Buck cache efficiency -- we don't have to download
the dependencies of the ancestor layers' features. Doing that would be
wasteful, since those bits are redundant with what's in the parent.
- Ability to use genrule image layers / apply non-pure post-processing to
a layer. In terms of engineering, both of these non-pure approaches are
a terrible idea and a maintainability headache, but they do provide a
useful bridge for transitioning to Buck image builds from legacy
imperative systems.
- The image compiler needs a litte extra code to walk the parent layer and
determine what it provides.
- We cannot have "unobservable" dependencies between features. Since
feature dependencies are expected to routinely cross layer boundaries,
feature implementations are forced only to depend on data that can be
inferred from the filesystem -- since this is all that the parent layer
implementation can do. NB: This is easy to relax in the future by
writing a manifest with additional metadata into each layer, and using
that metadata during compilation.
"""
load(":compile_image_features.bzl", "compile_image_features")
load(":image_layer_utils.bzl", "image_layer_utils")
load(":image_utils.bzl", "image_utils")
def image_layer(
name,
parent_layer = None,
features = None,
flavor = None,
flavor_config_override = None,
antlir_rule = "user-internal",
**image_layer_kwargs):
"""
Arguments
- `parent_layer`: The name of another `image_layer` target, on
top of which the current layer will install its features.
- `features`: List of `feature` target paths and/or
nameless structs from `feature.new`.
- `flavor`: Picks default build options for the layer, including
`build_appliance`, RPM installer, and others. See `flavor_helpers.bzl`
for details.
- `flavor_config_override`: A struct that can override the default
values fetched from `REPO_CFG[flavor].flavor_to_config`.
- `mount_config`: Specifies how this layer is mounted in the
`mounts` field of a `feature` of a parent layer. See
the field in `_image_layer_impl` in `image_layer_utils.bzl`
- `runtime`: A list of desired helper buck targets to be emitted.
`container` is always included in the list by default.
See the field in `_image_layer_impl` in `image_layer_utils.bzl` and the
[docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list of
possible helpers, their respective behaviours, and how to invoke them.
"""
image_layer_utils.image_layer_impl(
_rule_type = "image_layer",
_layer_name = name,
# Build a new layer. It may be empty.
_make_subvol_cmd = compile_image_features(
name = name,
current_target = image_utils.current_target(name),
parent_layer = parent_layer,
features = features,
flavor = flavor,
flavor_config_override = flavor_config_override,
),
antlir_rule = antlir_rule,
**image_layer_kwargs
)
| 44.078014
| 79
| 0.740628
| 950
| 6,215
| 4.774737
| 0.374737
| 0.039683
| 0.016534
| 0.011905
| 0.037037
| 0.037037
| 0.037037
| 0.037037
| 0.037037
| 0.020723
| 0
| 0
| 0.20177
| 6,215
| 140
| 80
| 44.392857
| 0.914332
| 0.8428
| 0
| 0
| 0
| 0
| 0.157596
| 0.080499
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9458ab72f55b4db845f6d76e44dba3b00e000ed
| 6,265
|
py
|
Python
|
src/features/v3/proc_v3_n1_calc_distance.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
src/features/v3/proc_v3_n1_calc_distance.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
src/features/v3/proc_v3_n1_calc_distance.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
import os
import sys
import pandas as pd
from datetime import datetime
from settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION
from src.features.helpers.processing import add_missing_timestamp_values
from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance, \
normalize_according_to_play_direction, check_group_event
from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation
week_num = int(sys.argv[1])
data_v3 = DataV3(DATA_V3_SUBVERSION)
save_file_path = data_v3.get_step1_checkpoint_path(week_num)
try:
clean_df = pd.read_csv(save_file_path)
save_file_exists = True
except FileNotFoundError:
save_file_exists = False
if not save_file_exists:
print("Started loading data")
play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv'))
games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv'))
week_and_games = games_df[games_df.week == week_num]
tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv'))
print("Data loaded. Start processing timestamps")
tracking_df = add_missing_timestamp_values(tracking_df)
games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId')
m_grouped = games_n_plays_df.groupby(['gameId', 'playId'])
df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId'])
# Remove all events without 'pass_forward'
df_t_grouped = df_t.groupby(['gameId', 'playId'])
df_t_v3 = df_t.copy().sort_index()
for name, group in df_t_grouped:
game_id, play_id = name
# if group does not contain pass forward, drop it
if all(group.event != 'pass_forward'):
df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId != play_id)]
df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event'])
df_t_v3_s = df_t_v3_s.reset_index(drop=True)
df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId'])
# remove all values before 'pass_forward'
print("Removing all values before pass forward event...")
for name, group in df_t_grouped:
game_id, play_id = name
pass_forward_frame_id = group[group.event == 'pass_forward'].index.min() - 1
remove_start = group.index.min()
df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index)
pd.options.mode.chained_assignment = None
gb = df_t_v3_s.groupby(['gameId', 'playId'])
print('Getting closest players...')
keep_indices = []
for name, group in gb:
game_id, play_id = name
try:
event_3rd = group.event.unique()[2]
except IndexError:
print('Number of events is < 3, skipping...')
continue
situation_df = group[group.event == event_3rd]
# convert dataframe into series
ball_row = situation_df[situation_df.team == 'football'].head(1)
# remove ball
player_situation_df = situation_df[situation_df.team != 'football']
try:
p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item())
except ValueError:
print('Value Error raised. This group will be skipped.')
continue
p_n_b_indices = get_players_and_ball_indices(group, p1, p2)
if p_n_b_indices:
keep_indices.extend(p_n_b_indices)
clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)]
clean_df.to_csv(
save_file_path,
index=False
)
print('Normalize...')
clean_df = normalize_according_to_play_direction(clean_df)
clean_df['homeHasPossession'] = clean_df.apply(
lambda row: home_has_possession(row), axis=1
)
clean_df['teamSituation'] = clean_df.apply(
lambda row: calculate_team_sitation(row), axis=1
)
print('Creating features...')
min_df = clean_df[[
'time', 'x', 'y', 's', 'o', 'dir', 'event', 'team',
'gameId', 'playId', 'frameId', 'isDefensivePI'
]]
gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId'])
# ball direction and orientation are NaN
calc_df = pd.DataFrame(
columns=[
'time',
'att_def_d', 'att_ball_d', 'def_ball_d',
'att_s', 'def_s', 'ball_s',
'att_o', 'def_o',
'att_dir', 'def_dir',
'event', 'gameId', 'playId', 'frameId', 'isDefensivePI'
]
)
GROUP_SIZE_MINIMUM = 3
for name, group in gb_2:
game_id, play_id, frameId = name
if len(group) < GROUP_SIZE_MINIMUM:
continue
ball = group[group.teamSituation == 'football'].head(1).squeeze()
p_att = group[group.teamSituation == 'attacking'].head(1).squeeze()
p_def = group[group.teamSituation == 'defending'].head(1).squeeze()
group_row = group.head(1).squeeze()
group_events = group.event.unique().tolist()
dict_to_append = {
'time': group_row.time,
'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y),
'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y),
'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y),
'att_s': p_att.s, 'def_s': p_def.s, 'ball_s': ball.s,
'att_a': p_att.a, 'def_a': p_def.a, 'ball_a': ball.a,
'att_o': p_att.o, 'def_o': p_def.o,
'att_dir': p_att.dir, 'def_dir': p_def.dir,
'event': group_row.event,
'pass_arrived': check_group_event(group_events, 'pass_arrived'),
'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'),
'tackle': check_group_event(group_events, 'tackle'),
'first_contact': check_group_event(group_events, 'first_contact'),
'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'),
'out_of_bounds': check_group_event(group_events, 'out_of_bounds'),
'week': week_num,
'gameId': group_row.gameId,
'playId': group_row.playId,
'frameId': group_row.frameId,
'isDefensivePI': group_row.isDefensivePI
}
calc_df = calc_df.append(
dict_to_append,
ignore_index=True
)
print("Saving data...")
calc_df.to_csv(
data_v3.get_step1_end_path(week_num),
index=False
)
print(f'End time: {datetime.now().strftime("%H:%M:%S")}')
| 35.596591
| 119
| 0.675499
| 934
| 6,265
| 4.17666
| 0.217345
| 0.017688
| 0.020508
| 0.015381
| 0.277108
| 0.136632
| 0.102538
| 0.067931
| 0.058703
| 0.044348
| 0
| 0.009312
| 0.194413
| 6,265
| 175
| 120
| 35.8
| 0.763622
| 0.03336
| 0
| 0.095588
| 0
| 0
| 0.168788
| 0.013721
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.051471
| 0.058824
| 0
| 0.058824
| 0.073529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9475ee1123a7f8c87eb161ddf2246d4b5a64a79
| 1,847
|
py
|
Python
|
fst_web/demo_settings.py
|
kamidev/autobuild_fst
|
6baffa955075ffe3c5f197789e9fd065fa74058e
|
[
"BSD-3-Clause"
] | null | null | null |
fst_web/demo_settings.py
|
kamidev/autobuild_fst
|
6baffa955075ffe3c5f197789e9fd065fa74058e
|
[
"BSD-3-Clause"
] | null | null | null |
fst_web/demo_settings.py
|
kamidev/autobuild_fst
|
6baffa955075ffe3c5f197789e9fd065fa74058e
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
ROOT = os.path.abspath(os.path.dirname(__file__))
path = lambda *args: os.path.join(ROOT, *args)
""" Template for local settings of the FST webservice (fst_web)
Please edit this file and replace all generic values with values suitable to
your particular installation.
"""
# NOTE! Always set this to False before deploying
DEBUG = True
# NOTE! Before deploying on a public, uncomment ALLOWED_HOSTS
# and add IP address and/or domain of your site
ALLOWED_HOSTS = ['localhost', '127.0.0.1', 'fst.magokoro.nu']
# Look for instance-specific settings
try:
from .instance_settings import *
except ImportError:
from .default_instance_settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': path('database/fst_demo.db')
}
}
LOG_LEVEL = "DEBUG"
# Enable this to override global DB Debug setting
# DB_DEBUG_LEVEL = "DEBUG"
# Setup mail server for sending email notifications.
# You can use any mail server you want.
# But a very simple way to get started is to use a gmail account.
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
# EMAIL_HOST_USER = 'your email'
# EMAIL_HOST_PASSWORD = 'your password'
# Admins specified here receive email notifications on critical errors.
ADMINS = ()
MANAGERS = ADMINS
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = os.path.join("/dokument/")
# Site and port for hosting FST service (do not add ending '/').
FST_SITE_URL = "http://127.0.0.1:8000"
# TODO - Check if FST_INSTANCE_PREFIX can be removed
# Site and port of specific FST instance (do not add ending '/').
FST_INSTANCE_URL = os.path.join(
"http://127.0.0.1:8000",
FST_INSTANCE_PREFIX)
| 28.415385
| 76
| 0.721711
| 281
| 1,847
| 4.629893
| 0.519573
| 0.023059
| 0.023059
| 0.013836
| 0.047656
| 0.021522
| 0
| 0
| 0
| 0
| 0
| 0.020182
| 0.168381
| 1,847
| 64
| 77
| 28.859375
| 0.826823
| 0.494857
| 0
| 0
| 0
| 0
| 0.226287
| 0.03523
| 0
| 0
| 0
| 0.015625
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b94890b4860019fd993040c0790c0701fc24a0c5
| 2,919
|
py
|
Python
|
main.py
|
valurhrafn/chromium-sync
|
df5e3299d179fc47ff34d1a95409383f46aac4d4
|
[
"MIT"
] | 4
|
2017-03-27T02:25:07.000Z
|
2021-03-07T21:40:58.000Z
|
main.py
|
valurhrafn/chromium-sync
|
df5e3299d179fc47ff34d1a95409383f46aac4d4
|
[
"MIT"
] | null | null | null |
main.py
|
valurhrafn/chromium-sync
|
df5e3299d179fc47ff34d1a95409383f46aac4d4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.appengine.api import users
import webapp2
# For datastore
import cgi
import urllib
from google.appengine.ext import ndb
class UserId(ndb.Model):
content = ndb.StringProperty()
date = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def query_user(cls, ancestor_key):
return cls.query(ancestor=ancestor_key).order(-cls.date)
# ************** MainHandler ************* #
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello world!')
# ************** GetUser ************* #
class GetUser(webapp2.RequestHandler):
def get(self):
self.response.out.write('<html><body>')
client_id = self.request.get('client_id')
ancestor_key = ndb.Key("ID", client_id or "*no_id*")
userids = UserId.query_user(ancestor_key).fetch(20)
self.response.out.write('her er eitthvad')
for userid in userids:
self.response.out.write('<blockquote>%s</blockquote>' %
cgi.escape(userid.content))
# Checks for active Google account session
# user = users.get_current_user()
# if user:
# self.response.headers['Content-Type'] = 'text/plain'
# self.response.write('Hello, ' + user.nickname())
# else:
# self.redirect(users.create_login_url(self.request.uri))
self.response.out.write('</body></html>')
def post(self):
pass
# ************** HasData ************* #
class HasData(webapp2.RequestHandler):
def get(self):
pass
#TODO does user have data
class PostData(webapp2.RequestHandler):
def post(self):
client_id = self.request.get('client_id')
chrome_user = UserId(parent=ndb.Key("ID", client_id or "*no_id*"),
content = self.request.get('client_id'))
chrome_user.put()
#TODO recieve data from client
class GetSyncData(object):
"""docstring for GetSyncData"""
def __init__(self, arg):
super(GetSyncData, self).__init__()
self.arg = arg
#implement get data for user
# property user.email() or user.user_id()
app = webapp2.WSGIApplication([
('/', MainHandler),
('/GetUser/', GetUser),
('/HasData/', HasData),
('/chrome-sync/command/', PostData),
('/GetSyncData/', GetSyncData)
], debug=True)
| 30.40625
| 74
| 0.647825
| 363
| 2,919
| 5.121212
| 0.4573
| 0.045186
| 0.051641
| 0.043034
| 0.141474
| 0.124798
| 0.124798
| 0.023669
| 0
| 0
| 0
| 0.006891
| 0.204522
| 2,919
| 95
| 75
| 30.726316
| 0.793712
| 0.378554
| 0
| 0.191489
| 0
| 0
| 0.100338
| 0.027058
| 0
| 0
| 0
| 0.010526
| 0
| 1
| 0.148936
| false
| 0.042553
| 0.106383
| 0.021277
| 0.446809
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b95403252db42b0394653a122fd73b2b596e194d
| 400
|
py
|
Python
|
app/main.py
|
meysam81/sheypoor
|
aa67e20646ebc4143b83968f60c0b28c2ad340a1
|
[
"MIT"
] | null | null | null |
app/main.py
|
meysam81/sheypoor
|
aa67e20646ebc4143b83968f60c0b28c2ad340a1
|
[
"MIT"
] | null | null | null |
app/main.py
|
meysam81/sheypoor
|
aa67e20646ebc4143b83968f60c0b28c2ad340a1
|
[
"MIT"
] | null | null | null |
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app import api
from app.core.config import config
app = FastAPI(title="Sheypoor")
# Set all CORS enabled origins
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api.router, prefix=config.API_URI)
| 21.052632
| 53
| 0.7425
| 51
| 400
| 5.686275
| 0.509804
| 0.075862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145
| 400
| 18
| 54
| 22.222222
| 0.847953
| 0.07
| 0
| 0
| 0
| 0
| 0.02973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.307692
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
b965c021bcb2dac479172708e85ad9ed89f09ef2
| 5,427
|
py
|
Python
|
View/View.py
|
MoriokaReimen/ConfigHeaderGenerator
|
73ba5d3bd5269d7e6881ec79b6fc0121ff2fb03e
|
[
"MIT"
] | null | null | null |
View/View.py
|
MoriokaReimen/ConfigHeaderGenerator
|
73ba5d3bd5269d7e6881ec79b6fc0121ff2fb03e
|
[
"MIT"
] | null | null | null |
View/View.py
|
MoriokaReimen/ConfigHeaderGenerator
|
73ba5d3bd5269d7e6881ec79b6fc0121ff2fb03e
|
[
"MIT"
] | null | null | null |
import tkinter as tk
import tkinter.messagebox
from Control import Control
class View:
def __init__(self, control : Control.Control):
self.control = control
# Init Window
self.root = tk.Tk()
self.root.title(u"Header File Generator")
self.root.geometry("700x800")
self.config_frame = tk.Frame(self.root)
# Config Table
lb_symbol = tk.Label(self.config_frame, width = 20)
lb_symbol["text"] = "Symbol"
lb_symbol.grid(row = 0, column = 0)
lb_description = tk.Label(self.config_frame, width = 40)
lb_description["text"] = "Detail"
lb_description.grid(row = 0, column = 1)
lb_enable = tk.Label(self.config_frame, width = 10)
lb_enable["text"] = "Enable"
lb_enable.grid(row = 0, column = 2)
for i, config in enumerate(self.control.getConfigs()):
symbol_entry = tk.Entry(self.config_frame, width=20)
symbol_entry.insert(tk.END, config.symbol)
symbol_entry.config(state = tk.DISABLED)
symbol_entry.config(disabledforeground = "black", disabledbackground = "white")
symbol_entry.grid(row= i + 1, column = 0)
detail_entry = tk.Entry(self.config_frame, width=40)
detail_entry.insert(tk.END, config.detail)
detail_entry.config(state = tk.DISABLED)
detail_entry.config(disabledforeground = "black", disabledbackground = "white")
detail_entry.grid(row= i + 1, column = 1)
bt_enable = tk.Button(self.config_frame, text="ON", width= 5)
bt_enable["text"] = "ON" if config.enable else "OFF"
color = "green" if config.enable else "red"
bt_enable.config(bg=color, activebackground = color)
bt_enable["command"] = lambda id = i, button = bt_enable : self.toggle_config_enable(id, button)
bt_enable.grid(row = i + 1, column = 2)
self.config_frame.pack(side=tk.TOP, anchor=tk.NW)
self.value_config_frame = tk.Frame(self.root)
# Config Table
lb_symbol = tk.Label(self.value_config_frame, width = 20)
lb_symbol["text"] = "Symbol"
lb_symbol.grid(row = 0, column = 0)
lb_description = tk.Label(self.value_config_frame, width = 40)
lb_description["text"] = "Detail"
lb_description.grid(row = 0, column = 1)
lb_value = tk.Label(self.value_config_frame, width = 10)
lb_value["text"] = "Value"
lb_value.grid(row = 0, column = 2)
lb_enable = tk.Label(self.value_config_frame, width = 10)
lb_enable["text"] = "Enable"
lb_enable.grid(row = 0, column = 3)
for i, val_config in enumerate(self.control.getValConfigs()):
symbol_entry = tk.Entry(self.value_config_frame, width=20)
symbol_entry.insert(tk.END, val_config.symbol)
symbol_entry.config(state = tk.DISABLED)
symbol_entry.config(disabledforeground = "black", disabledbackground = "white")
symbol_entry.grid(row= i + 1, column = 0)
detail_entry = tk.Entry(self.value_config_frame, width=40)
detail_entry.insert(tk.END, val_config.detail)
detail_entry.config(state = tk.DISABLED)
detail_entry.config(disabledforeground = "black", disabledbackground = "white")
detail_entry.grid(row= i + 1, column = 1)
value_entry = tk.Entry(self.value_config_frame, width=10)
value_entry.insert(tk.END, val_config.value)
value_entry.config(state = tk.DISABLED)
value_entry.config(disabledforeground = "black", disabledbackground = "white")
value_entry.grid(row= i + 1, column = 2)
bt_enable = tk.Button(self.value_config_frame, text="ON", width= 5)
bt_enable["text"] = "ON" if val_config.enable else "OFF"
color = "green" if val_config.enable else "red"
bt_enable.config(bg=color, activebackground = color)
bt_enable["command"] = lambda id = i, button = bt_enable : self.toggle_val_config_enable(id, button)
bt_enable.grid(row = i + 1, column = 3)
self.value_config_frame.pack(side=tk.TOP, anchor=tk.W)
# Generator Button
self.bt_generate = tk.Button(self.root)
self.bt_generate["text"] = "Generate Header"
self.bt_generate["command"] = self.generateHeader
self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE)
def start(self):
self.root.mainloop()
def generateHeader(self):
self.control.generateHeader()
tk.messagebox.showinfo("Header Generator Info", "Generated:{0}".format(self.control.header_config.path))
def update(self):
pass
def toggle_config_enable(self, id, button : tk.Button):
config = self.control.getConfigs()[id]
config.enable = not config.enable
button["text"] = "ON" if config.enable else "OFF"
color = "green" if config.enable else "red"
button.config(bg=color, activebackground = color)
def toggle_val_config_enable(self, id, button : tk.Button):
val_config = self.control.getValConfigs()[id]
val_config.enable = not val_config.enable
button["text"] = "ON" if val_config.enable else "OFF"
color = "green" if val_config.enable else "red"
button.config(bg=color, activebackground = color)
| 43.071429
| 112
| 0.629445
| 696
| 5,427
| 4.741379
| 0.136494
| 0.06
| 0.058182
| 0.060606
| 0.763636
| 0.713939
| 0.665758
| 0.638485
| 0.6
| 0.535758
| 0
| 0.015036
| 0.252442
| 5,427
| 125
| 113
| 43.416
| 0.798373
| 0.00995
| 0
| 0.3125
| 0
| 0
| 0.054583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0.010417
| 0.03125
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b967ba0197b144171458b230c2dfe31844ba0b72
| 5,231
|
py
|
Python
|
dags/download_decrypt_transfer_files.py
|
hms-dbmi/bch-pic-sure-airflow-dags
|
0c1e6f07da4e270581942e551ac30284474921d4
|
[
"Apache-2.0"
] | null | null | null |
dags/download_decrypt_transfer_files.py
|
hms-dbmi/bch-pic-sure-airflow-dags
|
0c1e6f07da4e270581942e551ac30284474921d4
|
[
"Apache-2.0"
] | null | null | null |
dags/download_decrypt_transfer_files.py
|
hms-dbmi/bch-pic-sure-airflow-dags
|
0c1e6f07da4e270581942e551ac30284474921d4
|
[
"Apache-2.0"
] | null | null | null |
"""
@author: anilkdegala
"""
import os
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator, BranchPythonOperator
from datetime import date, timedelta, datetime
from collections import OrderedDict
from scripts.dag_pebbles import DagPebbles
from airflow.configuration import conf
from scripts.configurations import *
from airflow.operators.dummy_operator import DummyOperator
default_args = {
"owner": "anilkdegala",
"depends_on_past": True,
"max_active_runs": 1,
"start_date": datetime(2015, 6, 1),
"is_active": True,
"is_paused_upon_creation": False,
}
def begin_pipeline(**kwargs):
print("begin_pipeline:")
files = kwargs['dag_run'].conf.get('files')
download_decrypt_arguments = ''
transfer_arguments_list = []
for f in files:
print("download_decrypt_transfer_files: file: ", f['name'], ', location: ', f['path'])
output = f['name']+','+f['path']+','+f['final_name']
download_decrypt_arguments = download_decrypt_arguments + " " + output
transfer_arguments_list.append(DATA_LOCATION + "/"+f['final_name'])
transfer_arguments = ",".join(transfer_arguments_list)
print("final download_decrypt_arguments: ",download_decrypt_arguments)
print("final transfer_arguments: ",transfer_arguments)
kwargs["ti"].xcom_push(key="download_decrypt_arguments", value=download_decrypt_arguments)
kwargs["ti"].xcom_push(key="transfer_arguments", value=transfer_arguments)
def pipeline_enable_check(**kwargs):
dp = DagPebbles()
if dp.pipeline_enable_check('DATA_LOAD'):
return "pipeline_check_passed"
else:
return "pipeline_check_skipped"
def pipeline_check_passed(**kwargs):
print("pipeline_check_passed:")
def end_pipeline(**kwargs):
print("end_pipeline:")
def pipeline_check_skipped(**kwargs):
print("pipeline_check_skipped:")
def cleanup(**kwargs):
dp = DagPebbles()
print("cleanup")
def notify(**kwargs):
dp = DagPebbles()
print("notify")
def end(**kwargs):
dp = DagPebbles()
print("end")
with DAG( "DOWNLOAD_DECRYPT_TRANSFER",
description="Download, Decrypt, Transfer files (Source: S3, Staging: EC2: Target: RDS Oracle)",
default_args=default_args,
schedule_interval=None,
catchup=False,
orientation="TB",
tags=['Utils'],
dagrun_timeout=timedelta(hours=240)
) as dag:
t_pipeline_begin = PythonOperator(
task_id="begin_pipeline",
python_callable=begin_pipeline,
provide_context=True,
dag=dag,
)
t_check_pipeline = BranchPythonOperator(
task_id="check_pipeline",
python_callable=pipeline_enable_check,
provide_context=True,
dag=dag,
)
t_pipeline_check_passed = PythonOperator(
task_id="pipeline_check_passed",
python_callable=pipeline_check_passed,
provide_context=True,
dag=dag,
)
t_pipeline_check_skipped = PythonOperator(
task_id="pipeline_check_skipped",
python_callable=pipeline_check_skipped,
provide_context=True,
dag=dag,
)
download_files_cmd = "/opt/bitnami/airflow/airflow-data/scripts/download_files.sh "+"{{ ti.xcom_pull(key='download_decrypt_arguments')}}"
t_download_files = BashOperator(
task_id='download_files',
bash_command=download_files_cmd,
dag=dag)
decrypt_files_cmd = "/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh "+"{{ ti.xcom_pull(key='download_decrypt_arguments')}} "
t_decrypt_files = BashOperator(
task_id='decrypt_files',
bash_command=decrypt_files_cmd,
dag=dag)
transfer_files_cmd = "/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl "+"{{ ti.xcom_pull(key='transfer_arguments')}} "
t_transfer_files = BashOperator(
task_id='transfer_files',
bash_command=transfer_files_cmd,
dag=dag)
t_end_pipeline = PythonOperator(
task_id="end_pipeline",
python_callable=end_pipeline,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_notify = PythonOperator(
task_id="send_notifications",
python_callable=notify,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_cleanup = PythonOperator(
task_id="cleanup",
python_callable=cleanup,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_end = PythonOperator(
task_id="end",
python_callable=end,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_pipeline_begin >> t_check_pipeline
t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline
t_check_pipeline >> t_pipeline_check_passed >> t_download_files >> t_decrypt_files >> t_transfer_files >> t_end_pipeline
t_end_pipeline >> t_cleanup >> t_notify >> t_end
| 30.770588
| 171
| 0.664118
| 585
| 5,231
| 5.586325
| 0.218803
| 0.055692
| 0.066095
| 0.025704
| 0.250306
| 0.222766
| 0.168605
| 0.146573
| 0.083843
| 0.083843
| 0
| 0.002979
| 0.229975
| 5,231
| 169
| 172
| 30.952663
| 0.808342
| 0.003823
| 0
| 0.201493
| 0
| 0
| 0.208149
| 0.111474
| 0.022388
| 0
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0.052239
| 0.074627
| 0
| 0.149254
| 0.074627
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b96b280416f0d557826ffa670a7914f2d45e5fc5
| 526
|
py
|
Python
|
src/sot_talos_balance/test/test_feet_admittance.py
|
imaroger/sot-talos-balance
|
5e56700b4e105273ecf6feb3474789beac469a77
|
[
"BSD-2-Clause"
] | null | null | null |
src/sot_talos_balance/test/test_feet_admittance.py
|
imaroger/sot-talos-balance
|
5e56700b4e105273ecf6feb3474789beac469a77
|
[
"BSD-2-Clause"
] | null | null | null |
src/sot_talos_balance/test/test_feet_admittance.py
|
imaroger/sot-talos-balance
|
5e56700b4e105273ecf6feb3474789beac469a77
|
[
"BSD-2-Clause"
] | null | null | null |
'''Test feet admittance control'''
from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient
try:
# Python 2
input = raw_input # noqa
except NameError:
pass
run_test('appli_feet_admittance.py')
run_ft_calibration('robot.ftc')
input("Wait before running the test")
print('Set saturation value')
runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01, 0.0, 0.0, 0.0]')
input("Wait before dumping the data")
runCommandClient('dump_tracer(robot.tracer)')
| 25.047619
| 97
| 0.752852
| 79
| 526
| 4.822785
| 0.556962
| 0.047244
| 0.055118
| 0.052493
| 0.028871
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030369
| 0.123574
| 526
| 20
| 98
| 26.3
| 0.796095
| 0.081749
| 0
| 0
| 0
| 0.083333
| 0.42437
| 0.17437
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.083333
| 0.083333
| 0
| 0.083333
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b96d766a7c5eab27eb3785b1277b6beccda7c9ed
| 1,446
|
py
|
Python
|
auth/tests/test_views.py
|
asb29/Redundant
|
ee816fd41f9217610bd11f757cf9175288723c70
|
[
"MIT"
] | null | null | null |
auth/tests/test_views.py
|
asb29/Redundant
|
ee816fd41f9217610bd11f757cf9175288723c70
|
[
"MIT"
] | null | null | null |
auth/tests/test_views.py
|
asb29/Redundant
|
ee816fd41f9217610bd11f757cf9175288723c70
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.test import Client
class RegisterTestCase(TestCase):
def test_register(self):
c = Client()
# on success redirects to /
response = c.post('/accounts/register/', {
'username': 'asdas',
'password1': 'asdasdasd12',
'password2': 'asdasdasd12'
})
self.assertRedirects(response, '/')
# passwords don't match
response = c.post('/accounts/register/', {
'username': 'asdasdasd1',
'password1': 'asdasdasd1',
'password2': 'asdasdasd2'
})
self.assertEquals(response.status_code, 200)
# username is empty
response = c.post('/accounts/register/', {
'username': '',
'password1': 'asdasdasd12',
'password2': 'asdasdasd12'
})
self.assertEquals(response.status_code, 200)
# no password
response = c.post('/accounts/register/', {
'username': 'asdasdasd',
'password1': '',
'password2': ''
})
self.assertEquals(response.status_code, 200)
# username and password are similar
response = c.post('/accounts/register/', {
'username': 'asdasdasd0',
'password1': 'asdasdasd1',
'password2': 'asdasdasd1'
})
self.assertEquals(response.status_code, 200)
| 30.125
| 52
| 0.53527
| 116
| 1,446
| 6.62931
| 0.37931
| 0.058518
| 0.084525
| 0.136541
| 0.563069
| 0.453836
| 0.117035
| 0
| 0
| 0
| 0
| 0.037344
| 0.333333
| 1,446
| 47
| 53
| 30.765957
| 0.760373
| 0.076763
| 0
| 0.571429
| 0
| 0
| 0.258841
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.028571
| false
| 0.285714
| 0.057143
| 0
| 0.114286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b974d5d1bd35654f50415a8f7c66f3fb9a0316ab
| 704
|
py
|
Python
|
tests/test_formatter.py
|
hbraux/kafkacli
|
5f7ed23150932b66b484fb43dd6210b6c0968776
|
[
"MIT"
] | null | null | null |
tests/test_formatter.py
|
hbraux/kafkacli
|
5f7ed23150932b66b484fb43dd6210b6c0968776
|
[
"MIT"
] | null | null | null |
tests/test_formatter.py
|
hbraux/kafkacli
|
5f7ed23150932b66b484fb43dd6210b6c0968776
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import pytest
import json
from kafkacli.formatter import Formatter
sampleJson = json.loads('{"a":"s", "b":1}')
def test_print_default(capsys):
Formatter().print(sampleJson)
captured = capsys.readouterr()
assert captured.out == '{"a": "s", "b": 1}\n'
def test_print_idents(capsys):
Formatter(indents=True).print(sampleJson)
captured = capsys.readouterr()
assert captured.out == '{\n "a": "s",\n "b": 1\n}\n'
def test_print_colors(capsys):
Formatter(colors=True).print(sampleJson)
captured = capsys.readouterr()
assert captured.out == \
'{"a": \x1b[34m"s"\x1b[39m, "b": \x1b[31m1\x1b[39m}\n'
| 24.275862
| 62
| 0.640625
| 96
| 704
| 4.635417
| 0.395833
| 0.013483
| 0.080899
| 0.195506
| 0.4
| 0.4
| 0.4
| 0.4
| 0.4
| 0
| 0
| 0.02911
| 0.170455
| 704
| 28
| 63
| 25.142857
| 0.732877
| 0.059659
| 0
| 0.166667
| 0
| 0.055556
| 0.183333
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.166667
| false
| 0
| 0.222222
| 0
| 0.388889
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b975e6fb7fb3fa8849afb4e4ce41618c2ce94c1b
| 451
|
py
|
Python
|
src/test/tests/unit/protocol.py
|
ylee88/visit
|
8e0920996d84fef70a7014b0d770360918d849d5
|
[
"BSD-3-Clause"
] | 1
|
2022-01-27T23:52:04.000Z
|
2022-01-27T23:52:04.000Z
|
src/test/tests/unit/protocol.py
|
ylee88/visit
|
8e0920996d84fef70a7014b0d770360918d849d5
|
[
"BSD-3-Clause"
] | null | null | null |
src/test/tests/unit/protocol.py
|
ylee88/visit
|
8e0920996d84fef70a7014b0d770360918d849d5
|
[
"BSD-3-Clause"
] | null | null | null |
# ----------------------------------------------------------------------------
# CLASSES: nightly
#
# Test Case: protocolo.py
#
# Tests: vistprotocol unit test
#
# Mark C. Miller, Tue Jan 11 10:19:23 PST 2011
# ----------------------------------------------------------------------------
tapp = visit_bin_path("visitprotocol")
res = sexe(tapp,ret_output=True)
if res["return_code"] == 0:
excode = 111
else:
excode = 113
Exit(excode)
| 26.529412
| 78
| 0.432373
| 44
| 451
| 4.340909
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049223
| 0.144124
| 451
| 16
| 79
| 28.1875
| 0.445596
| 0.618625
| 0
| 0
| 0
| 0
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b97d4675d330154e0b12b91fbd601affd888ea29
| 1,901
|
py
|
Python
|
examples/airflow/dags/etl_orders_7_days.py
|
phixMe/marquez
|
06d71635369893b371a8a9c9e7023f11d7cbb1f8
|
[
"Apache-2.0"
] | null | null | null |
examples/airflow/dags/etl_orders_7_days.py
|
phixMe/marquez
|
06d71635369893b371a8a9c9e7023f11d7cbb1f8
|
[
"Apache-2.0"
] | null | null | null |
examples/airflow/dags/etl_orders_7_days.py
|
phixMe/marquez
|
06d71635369893b371a8a9c9e7023f11d7cbb1f8
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
from marquez_airflow import DAG
from airflow.operators.postgres_operator import PostgresOperator
from airflow.utils.dates import days_ago
default_args = {
'owner': 'datascience',
'depends_on_past': False,
'start_date': days_ago(1),
'email_on_failure': False,
'email_on_retry': False,
'email': ['datascience@example.com']
}
dag = DAG(
'etl_orders_7_days',
schedule_interval='@hourly',
catchup=False,
default_args=default_args,
description='Loads newly placed orders weekly.'
)
t1 = PostgresOperator(
task_id='if_not_exists',
postgres_conn_id='food_delivery_db',
sql='''
CREATE TABLE IF NOT EXISTS orders_7_days (
order_id INTEGER REFERENCES orders(id),
placed_on TIMESTAMP NOT NULL,
discount_id INTEGER REFERENCES discounts(id),
menu_id INTEGER REFERENCES menus(id),
restaurant_id INTEGER REFERENCES restaurants(id),
menu_item_id INTEGER REFERENCES menu_items(id),
category_id INTEGER REFERENCES categories(id)
);''',
dag=dag
)
t2 = PostgresOperator(
task_id='tuncate',
postgres_conn_id='food_delivery_db',
sql='TRUNCATE TABLE orders_7_days;',
dag=dag
)
t3 = PostgresOperator(
task_id='insert',
postgres_conn_id='food_delivery_db',
sql='''
INSERT INTO orders_7_days (order_id, placed_on, discount_id, menu_id, restaurant_id, menu_item_id, category_id)
SELECT o.id AS order_id, o.placed_on, o.discount_id, m.id AS menu_id, m.restaurant_id, mi.id AS menu_item_id, c.id AS category_id
FROM orders AS o
INNER JOIN menu_items AS mi
ON mi.id = o.menu_item_id
INNER JOIN categories AS c
ON c.id = mi.category_id
INNER JOIN menus AS m
ON m.id = c.menu_id
WHERE o.placed_on >= NOW() - interval '7 days'
''',
dag=dag
)
t1 >> t2 >> t3
| 29.246154
| 135
| 0.681746
| 270
| 1,901
| 4.533333
| 0.318519
| 0.044118
| 0.093137
| 0.044118
| 0.105392
| 0.07598
| 0.07598
| 0
| 0
| 0
| 0
| 0.008136
| 0.224093
| 1,901
| 64
| 136
| 29.703125
| 0.821695
| 0
| 0
| 0.137931
| 0
| 0.017241
| 0.635455
| 0.012099
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.068966
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b980be1e0d2b8db749e25a4f49c35cdddbdca9d9
| 1,650
|
py
|
Python
|
tt/urls.py
|
samiksha-patil/Knowledge-Sharing-Platform
|
22e61a659d5ad63fe656fa639dc897cbdebad4fe
|
[
"bzip2-1.0.6"
] | 1
|
2021-05-09T08:18:49.000Z
|
2021-05-09T08:18:49.000Z
|
tt/urls.py
|
samiksha-patil/Knowledge-Sharing-Platform
|
22e61a659d5ad63fe656fa639dc897cbdebad4fe
|
[
"bzip2-1.0.6"
] | 9
|
2021-03-19T01:11:35.000Z
|
2022-03-12T00:20:13.000Z
|
tt/urls.py
|
samiksha-patil/Knowledge-Sharing-Platform
|
22e61a659d5ad63fe656fa639dc897cbdebad4fe
|
[
"bzip2-1.0.6"
] | null | null | null |
"""
tt URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# Uncomment next two lines to enable admin:
from django.contrib import admin
from django.urls import path, include
from users import views as user_views
from django.contrib.auth import views as auth_views
from upload import views as upload_views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
# Uncomment the next line to enable the admin:
path('admin/', admin.site.urls),
path('', include('blog.urls')),
path('register/', user_views.register, name='register'),
path('login/',auth_views.LoginView.as_view(template_name='users/login.html'),name='login'),
path('logout/',auth_views.LogoutView.as_view(template_name='users/logout.html') ,name='logout'),
path('profile/', user_views.profile, name='profile'),
path('book/',upload_views.book_list,name='book_list'),
path('book/upload',upload_views.upload_book,name='upload_book'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 35.869565
| 100
| 0.726061
| 243
| 1,650
| 4.835391
| 0.316872
| 0.051064
| 0.012766
| 0.020426
| 0.138723
| 0.099574
| 0.06383
| 0
| 0
| 0
| 0
| 0.00569
| 0.147879
| 1,650
| 46
| 101
| 35.869565
| 0.830014
| 0.428485
| 0
| 0
| 0
| 0
| 0.149733
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.368421
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
b997c70668ace413cc27502883f737e007e56239
| 1,006
|
py
|
Python
|
Doc/includes/sqlite3/load_extension.py
|
livioso/cpython
|
077061a7b24917aaf31057885c69919c5a553c88
|
[
"PSF-2.0"
] | 36
|
2019-06-07T20:44:06.000Z
|
2022-03-23T06:19:43.000Z
|
Doc/includes/sqlite3/load_extension.py
|
livioso/cpython
|
077061a7b24917aaf31057885c69919c5a553c88
|
[
"PSF-2.0"
] | 49
|
2016-02-29T17:59:52.000Z
|
2019-05-05T04:59:26.000Z
|
Doc/includes/sqlite3/load_extension.py
|
livioso/cpython
|
077061a7b24917aaf31057885c69919c5a553c88
|
[
"PSF-2.0"
] | 28
|
2019-06-27T04:11:27.000Z
|
2022-03-11T06:27:44.000Z
|
import sqlite3
con = sqlite3.connect(":memory:")
# enable extension loading
con.enable_load_extension(True)
# Load the fulltext search extension
con.execute("select load_extension('./fts3.so')")
# alternatively you can load the extension using an API call:
# con.load_extension("./fts3.so")
# disable extension loading again
con.enable_load_extension(False)
# example from SQLite wiki
con.execute("create virtual table recipe using fts3(name, ingredients)")
con.executescript("""
insert into recipe (name, ingredients) values ('broccoli stew', 'broccoli peppers cheese tomatoes');
insert into recipe (name, ingredients) values ('pumpkin stew', 'pumpkin onions garlic celery');
insert into recipe (name, ingredients) values ('broccoli pie', 'broccoli cheese onions flour');
insert into recipe (name, ingredients) values ('pumpkin pie', 'pumpkin sugar flour butter');
""")
for row in con.execute("select rowid, name, ingredients from recipe where name match 'pie'"):
print(row)
| 37.259259
| 104
| 0.744533
| 131
| 1,006
| 5.671756
| 0.465649
| 0.121131
| 0.086137
| 0.107672
| 0.239569
| 0.239569
| 0.239569
| 0
| 0
| 0
| 0
| 0.005794
| 0.142147
| 1,006
| 26
| 105
| 38.692308
| 0.855156
| 0.206759
| 0
| 0
| 0
| 0
| 0.723135
| 0.034134
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b999aec7c34874ef90e0f30812ac97217ce90cca
| 3,145
|
py
|
Python
|
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | 1
|
2020-03-19T20:09:00.000Z
|
2020-03-19T20:09:00.000Z
|
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | null | null | null |
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | null | null | null |
import requests
import urllib.request
import os.path
import shutil
import csv
def main():
with open("data.csv") as i: #Open the data.csv file
instances = i.readlines() #Write them into memory
instances = [x.strip() for x in instances] #Strip any weird issues from writing
instances.sort() #Sort them alphabetically
setup(instances) #Run setup to create all the necessary files and subfolders
count = len(instances) #Get the count just for fun
i = 0
try:
for name in instances:
try:
i += 1
print("-----!"+name+"!-----")
print(str(i) +" of " + str(count) + " remaining!")
fetch(name) #Run the fetching code
except Exception as e:
print(e) #Print the error. We catch errors here for pleroma instances, weirdly encoded urls, etc
pass #Don't stop the beat
except Exception as e:
print("Instance Error")
print(e)
pass
clone(instances) #Clone all of them into one big folder for ease of access
def fetch(name):
r = requests.get('https://%s/api/v1/custom_emojis'% name, allow_redirects=True) #Throw the instance name into the standard url for fetching data
path = "emoji/%s/" % name #Because of the clone function we know all of these folders will exist
try:
for emoji in r.json(): #Emoji = the json code from the request
try:
if os.path.isfile(path+emoji['shortcode']+".png"): #Check to see if it exists.
pass
else:
if "ms_" not in emoji['shortcode']: #Cut out Mutant Standard Emojis (Or at least most of them). #Mutant standard is huge and common
#print(emoji['shortcode'] + " found!")
emojiimage = requests.get(emoji['static_url'],allow_redirects=True) #Get the image from the json
open(path + emoji['shortcode']+".png",'wb').write(emojiimage.content) #Now save it as an image in the filesystem
except Exception as e:
print("Did not get: " + emoji['url']) #If somethings fucky throw a nice error then keep going.
print(e)
pass
except Exception as e:
print(e)
def setup(instances):
if (os.path.isdir("emoji/")): #Check to see if emoji/ exists
pass
else:
os.mkdir("emoji/") #make it if it doesnt
for name in instances:
if (os.path.isdir("emoji/%s/"%name)):
pass
else: os.mkdir("emoji/%s/"%name)
if (os.path.isdir("emoji/all")):
pass
else:
os.mkdir("emoji/all")
def clone(instances):
for name in instances:
print("Copying emoji for: %s"% name)
path = "emoji/%s/" % name
files = os.listdir(path)
for name in files: #This gets alll files
try:
shutil.copyfile(path+name,"emoji/all/"+name) #Then copies them into the all folder
except Exception as e:
print(e)
pass
if __name__ == '__main__':
main()
| 37.440476
| 151
| 0.574245
| 418
| 3,145
| 4.289474
| 0.373206
| 0.020078
| 0.047407
| 0.050195
| 0.139431
| 0.070273
| 0
| 0
| 0
| 0
| 0
| 0.001403
| 0.320191
| 3,145
| 83
| 152
| 37.891566
| 0.837231
| 0.294118
| 0
| 0.430556
| 0
| 0
| 0.114299
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.111111
| 0.069444
| 0
| 0.125
| 0.138889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b99add86778172fa08bc930ed29f8f26a88ec4d3
| 943
|
py
|
Python
|
String/640.One Edit Distance/Solution_DP.py
|
Zhenye-Na/LxxxCode
|
afd79d790d0a7495d75e6650f80adaa99bd0ff07
|
[
"MIT"
] | 12
|
2019-05-04T04:21:27.000Z
|
2022-03-02T07:06:57.000Z
|
String/640.One Edit Distance/Solution_DP.py
|
Zhenye-Na/LxxxCode
|
afd79d790d0a7495d75e6650f80adaa99bd0ff07
|
[
"MIT"
] | 1
|
2019-07-24T18:43:53.000Z
|
2019-07-24T18:43:53.000Z
|
String/640.One Edit Distance/Solution_DP.py
|
Zhenye-Na/LxxxCode
|
afd79d790d0a7495d75e6650f80adaa99bd0ff07
|
[
"MIT"
] | 10
|
2019-07-01T04:03:04.000Z
|
2022-03-09T03:57:37.000Z
|
class Solution:
"""
@param s: a string
@param t: a string
@return: true if they are both one edit distance apart or false
"""
def isOneEditDistance(self, s, t):
# write your code here
if s == t:
return False
if abs(len(s) - len(t)) > 1:
return False
n, m = len(s), len(t)
f = [[0] * (m + 1) for _ in range(2)]
for j in range(m + 1):
f[0][j] = j
for i in range(1, n + 1):
f[i % 2][0] = i
for j in range(1, m + 1):
if s[i - 1] == t[j - 1]:
f[i % 2][j] = min(f[(i - 1) % 2][j - 1],
f[(i - 1) % 2][j] + 1, f[i % 2][j - 1] + 1)
else:
f[i % 2][j] = min(f[(i - 1) % 2][j - 1] + 1,
f[(i - 1) % 2][j] + 1, f[i % 2][j - 1] + 1)
return f[n % 2][m] == 1
| 29.46875
| 81
| 0.341463
| 149
| 943
| 2.154362
| 0.275168
| 0.056075
| 0.056075
| 0.049844
| 0.165109
| 0.165109
| 0.158879
| 0.158879
| 0.158879
| 0.158879
| 0
| 0.076446
| 0.486744
| 943
| 31
| 82
| 30.419355
| 0.586777
| 0.130435
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 0
| 1
| 0.05
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9a767c55418efb8b98d12205d59e512ca419081
| 1,860
|
py
|
Python
|
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Author: Emmanuel Odeke <odeke@ualberta.ca>
# This example steps you through using resty & restAssured to save pickled/serialized
# data as a blob and then later re-using it in after deserialization.
# Sample usage might be in collaborative computing ie publish results from an expensive
# computation on one machine so that other machines can load it as live data.
def testSerializer():
import Serializer
bs = Serializer.BinarySerializer()
js = Serializer.JSONSerializer()
data = dict((i, i) for i in range(10))
bserial = bs.serialize(data)
jserial = js.serialize(data)
bdserial = bs.deserialize(bserial)
jdserial = js.deserialize(jserial)
print('bdserial', bdserial)
ioS = bs.ioStream(bserial)
ioR = ioS.read()
print('ioS data from the stream', ioR)
def testCloudPassagePickledVersion():
from entrails.cloudPassage import CloudPassageHandler
cc = CloudPassageHandler()
data = dict((i, i*10) for i in range(9))
title = 'Dict of items 0-8999, keys i*10'
res = cc.push(data, title=title, asPickle=True)
pulledObj = cc.pull(metaData='pickle')
print('PulledObj', pulledObj, data)
assert(pulledObj == data)
rmTry = cc.removeTrace(data, asPickle=True)
print(rmTry)
def testCloudPassageJSONVersion():
from entrails.cloudPassage import CloudPassageHandler
cc = CloudPassageHandler()
data = dict((str(i), i*10) for i in range(9))
title = 'Dict of items 0-8999, keys i*10'
res = cc.push(data, title=title, asPickle=False)
pulledObj = cc.pull(metaData='json')
print('PulledObj', pulledObj, data)
assert(pulledObj == data)
rmTry = cc.removeTrace(data)
print(rmTry)
def main():
testSerializer()
testCloudPassageJSONVersion()
testCloudPassagePickledVersion()
if __name__ == '__main__':
main()
| 31
| 87
| 0.7
| 234
| 1,860
| 5.529915
| 0.478632
| 0.009274
| 0.01391
| 0.025502
| 0.341577
| 0.341577
| 0.341577
| 0.341577
| 0.341577
| 0.22102
| 0
| 0.015323
| 0.193011
| 1,860
| 59
| 88
| 31.525424
| 0.846769
| 0.203226
| 0
| 0.292683
| 0
| 0
| 0.088076
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 1
| 0.097561
| false
| 0.195122
| 0.073171
| 0
| 0.170732
| 0.146341
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9a7d3f5b98af28c51ffb55578408fad9a1d3f99
| 3,066
|
py
|
Python
|
venv/Lib/site-packages/dataframe/_dataframe_column_set.py
|
kavanAdeshara/Expense_Tracker
|
b3e4810e858a7786e05cda6b91ba674b73b87981
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/dataframe/_dataframe_column_set.py
|
kavanAdeshara/Expense_Tracker
|
b3e4810e858a7786e05cda6b91ba674b73b87981
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/dataframe/_dataframe_column_set.py
|
kavanAdeshara/Expense_Tracker
|
b3e4810e858a7786e05cda6b91ba674b73b87981
|
[
"Apache-2.0"
] | null | null | null |
# dataframe: a data-frame implementation using method piping
#
# Copyright (C) 2016 Simon Dirmeier
#
# This file is part of dataframe.
#
# dataframe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dataframe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dataframe. If not, see <http://www.gnu.org/licenses/>.
#
#
# @author = 'Simon Dirmeier'
# @email = 'mail@simon-dirmeier.net'
from itertools import chain
import tabulate
from ._dataframe_column import DataFrameColumn
from ._dataframe_row import DataFrameRow
class DataFrameColumnSet:
def __init__(self, **kwargs):
self.__data_columns = []
self.__nrow = -1
self.cbind(**kwargs)
def __getitem__(self, item):
if isinstance(item, int):
return self.__data_columns[item]
raise ValueError("Item should be integer!")
def __iter__(self):
for col in self.__data_columns:
yield col
def __str__(self):
stri = "\nA dataframe"
ta = []
for col in self.__data_columns:
vals = col.values
if len(vals) > 10:
vals = list(chain(vals[:3], "...", vals[-3:]))
ta.append(vals)
ta = tabulate.tabulate(zip(*ta), headers=self.colnames)
return stri + "\n\n" + ta.__str__()
@property
def nrow(self):
return self.__nrow
@property
def ncol(self):
return len(self.colnames)
@property
def colnames(self):
return [x.colname for x in self.__data_columns]
def rows(self, idxs):
return [self.row(i) for i in idxs]
def row(self, idx):
"""
Returns DataFrameRow of the DataFrame given its index.
:param idx: the index of the row in the DataFrame.
:return: returns a DataFrameRow
"""
return DataFrameRow(idx, [x[idx] for x in self], self.colnames)
def which_colnames(self, *args):
idx = []
for i in range(len(self.__data_columns)):
if self.colnames[i] in args:
idx.append(i)
return idx
def cbind(self, **columns):
keys = sorted([x for x in columns.keys()])
for k in keys:
self.__cbind(DataFrameColumn(str(k), columns.get(k)))
def __cbind(self, column):
if column.colname in self.colnames:
ValueError("Appending duplicate col-name!")
self.__data_columns.append(column)
self.__nrow = self.__data_columns[-1].size()
for col in self.__data_columns:
if col.size() != self.__nrow:
raise ValueError("Columns do not have equal lengths!")
| 30.356436
| 71
| 0.63242
| 405
| 3,066
| 4.62963
| 0.377778
| 0.0384
| 0.072
| 0.036267
| 0.080533
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0.004906
| 0.268754
| 3,066
| 100
| 72
| 30.66
| 0.831401
| 0.302022
| 0
| 0.105263
| 0
| 0
| 0.050986
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.070175
| 0.070175
| 0.438596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9bb675bdbf31f94537da2d2380efe251bd20dd2
| 1,036
|
py
|
Python
|
rest_auth/registration/urls.py
|
soul4code/django-rest-auth
|
b7a2e06e7736865b18f6aab79dcd42210e06c28b
|
[
"MIT"
] | null | null | null |
rest_auth/registration/urls.py
|
soul4code/django-rest-auth
|
b7a2e06e7736865b18f6aab79dcd42210e06c28b
|
[
"MIT"
] | null | null | null |
rest_auth/registration/urls.py
|
soul4code/django-rest-auth
|
b7a2e06e7736865b18f6aab79dcd42210e06c28b
|
[
"MIT"
] | null | null | null |
from django.urls import re_path
from django.views.generic import TemplateView
from .views import RegisterView, VerifyEmailView
urlpatterns = [
re_path(r'^$', RegisterView.as_view(), name='rest_register'),
re_path(r'^verify-email/$', VerifyEmailView.as_view(), name='rest_verify_email'),
# This url is used by django-allauth and empty TemplateView is
# defined just to allow reverse() call inside app, for example when email
# with verification link is being sent, then it's required to render email
# content.
# account_confirm_email - You should override this view to handle it in
# your API client somehow and then, send post to /verify-email/ endpoint
# with proper key.
# If you don't want to use API on that step, then just use ConfirmEmailView
# view from:
# django-allauth https://github.com/pennersr/django-allauth/blob/master/allauth/account/views.py
re_path(r'^account-confirm-email/(?P<key>[-:\w]+)/$', TemplateView.as_view(),
name='account_confirm_email'),
]
| 41.44
| 100
| 0.721042
| 150
| 1,036
| 4.886667
| 0.56
| 0.032742
| 0.028649
| 0.038199
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17278
| 1,036
| 24
| 101
| 43.166667
| 0.855309
| 0.532819
| 0
| 0
| 0
| 0
| 0.230444
| 0.131078
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
b9cea3f3b51bf703897e952ed45d88260e3502a1
| 1,190
|
py
|
Python
|
dd_app/messaging/backend.py
|
datadealer/dd_app
|
3806b9b9df165a49f0fca8a249170b4ccd4d0177
|
[
"Artistic-2.0"
] | 2
|
2018-12-17T10:10:49.000Z
|
2018-12-17T11:18:32.000Z
|
dd_app/messaging/backend.py
|
datadealer/dd_app
|
3806b9b9df165a49f0fca8a249170b4ccd4d0177
|
[
"Artistic-2.0"
] | null | null | null |
dd_app/messaging/backend.py
|
datadealer/dd_app
|
3806b9b9df165a49f0fca8a249170b4ccd4d0177
|
[
"Artistic-2.0"
] | 1
|
2021-06-06T22:28:12.000Z
|
2021-06-06T22:28:12.000Z
|
class RedisBackend(object):
def __init__(self, settings={}, *args, **kwargs):
self.settings = settings
@property
def connection(self):
# cached redis connection
if not hasattr(self, '_connection'):
self._connection = self.settings.get('redis.connector').get()
return self._connection
@property
def channel(self):
# Fanout channel
if not hasattr(self, '_channel'):
self._channel = self.connection.pubsub()
return self._channel
def subscribe(self, channels=[]):
# Fanout subscriber
for chan_id in channels:
self.channel.subscribe(chan_id)
def listen(self):
# Fanout generator
for m in self.channel.listen():
if m['type'] == 'message':
yield m
def send(self, channel_id, payload):
# Fanout emitter
return self.connection.publish(channel_id, payload)
def listen_queue(self, queue_keys):
# Message queue generator
while 1:
yield self.connection.blpop(queue_keys)
def send_queue(self, queue_key, payload):
return self.connection.rpush(payload)
| 28.333333
| 73
| 0.608403
| 131
| 1,190
| 5.381679
| 0.351145
| 0.139007
| 0.085106
| 0.04539
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001185
| 0.290756
| 1,190
| 41
| 74
| 29.02439
| 0.834123
| 0.094118
| 0
| 0.074074
| 0
| 0
| 0.042017
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.296296
| false
| 0
| 0
| 0.074074
| 0.481481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9d22fbf764d6a06a81fe68e7bedb0cb2069ff17
| 2,360
|
py
|
Python
|
mpl/models/leaf.py
|
jiangyuang/ModelPruningLibrary
|
9c8ba5a3c5d118f37768d5d42254711f48d88745
|
[
"MIT"
] | 13
|
2020-02-24T16:57:37.000Z
|
2021-12-14T16:47:41.000Z
|
mpl/models/leaf.py
|
jiangyuang/ModelPruningLibrary
|
9c8ba5a3c5d118f37768d5d42254711f48d88745
|
[
"MIT"
] | 3
|
2021-01-08T14:06:33.000Z
|
2021-09-07T13:39:46.000Z
|
mpl/models/leaf.py
|
jiangyuang/ModelPruningLibrary
|
9c8ba5a3c5d118f37768d5d42254711f48d88745
|
[
"MIT"
] | 3
|
2020-05-30T17:59:43.000Z
|
2021-04-13T04:55:33.000Z
|
from torch import nn as nn
from .base_model import BaseModel
from ..nn.conv2d import DenseConv2d
from ..nn.linear import DenseLinear
__all__ = ["Conv2", "conv2", "Conv4", "conv4"]
class Conv2(BaseModel):
def __init__(self):
super(Conv2, self).__init__()
self.features = nn.Sequential(DenseConv2d(1, 32, kernel_size=5, padding=2), # 32x28x28
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2), # 32x14x14
DenseConv2d(32, 64, kernel_size=5, padding=2), # 64x14x14
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2)) # 64x7x7
self.classifier = nn.Sequential(DenseLinear(64 * 7 * 7, 2048),
nn.ReLU(inplace=True),
DenseLinear(2048, 62))
self.collect_prunable_layers()
def forward(self, inp):
out = self.features(inp)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
class Conv4(BaseModel):
def __init__(self):
super(Conv4, self).__init__()
self.features = nn.Sequential(DenseConv2d(3, 32, kernel_size=3, padding=1),
nn.BatchNorm2d(32),
nn.MaxPool2d(2),
DenseConv2d(32, 32, kernel_size=3, padding=1),
nn.BatchNorm2d(32),
nn.MaxPool2d(2),
DenseConv2d(32, 32, kernel_size=3, padding=2),
nn.BatchNorm2d(32),
nn.MaxPool2d(2),
DenseConv2d(32, 32, kernel_size=3, padding=2),
nn.BatchNorm2d(32),
nn.MaxPool2d(2))
self.classifier = DenseLinear(in_features=32 * 6 * 6, out_features=2)
def forward(self, inp):
out = self.features(inp)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
def conv2() -> Conv2:
return Conv2()
def conv4() -> Conv4:
return Conv4()
# TODO: define pretrain etc.
| 36.307692
| 96
| 0.469068
| 235
| 2,360
| 4.578723
| 0.255319
| 0.055762
| 0.066915
| 0.048327
| 0.597584
| 0.515799
| 0.515799
| 0.435874
| 0.435874
| 0.368959
| 0
| 0.093658
| 0.425424
| 2,360
| 64
| 97
| 36.875
| 0.699853
| 0.025424
| 0
| 0.5
| 0
| 0
| 0.008718
| 0
| 0
| 0
| 0
| 0.015625
| 0
| 1
| 0.125
| false
| 0
| 0.083333
| 0.041667
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9d992fc9c803eca7ba614c187b28cbfcef4b1f8
| 5,988
|
py
|
Python
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 8
|
2021-08-31T02:14:19.000Z
|
2021-12-28T19:20:59.000Z
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 8
|
2021-07-12T13:55:00.000Z
|
2021-10-04T14:53:21.000Z
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-09-16T05:06:18.000Z
|
2021-09-16T05:06:18.000Z
|
#
# Copyright (c) Contributors to the Open 3D Engine Project.
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
#
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
#
import abc
import importlib
import os
import pkgutil
import re
import time
from typing import Dict, List, Tuple
VERBOSE = False
class Commit(abc.ABC):
"""An interface for accessing details about a commit"""
@abc.abstractmethod
def get_files(self) -> List[str]:
"""Returns a list of local files added/modified by the commit"""
pass
@abc.abstractmethod
def get_removed_files(self) -> List[str]:
"""Returns a list of local files removed by the commit"""
pass
@abc.abstractmethod
def get_file_diff(self, str) -> str:
"""
Given a file name, returns a string in unified diff format
that represents the changes made to that file for this commit.
Most validators will only pay attention to added lines (with + in front)
"""
pass
@abc.abstractmethod
def get_description(self) -> str:
"""Returns the description of the commit"""
pass
@abc.abstractmethod
def get_author(self) -> str:
"""Returns the author of the commit"""
pass
def validate_commit(commit: Commit, out_errors: List[str] = None, ignore_validators: List[str] = None) -> bool:
"""Validates a commit against all validators
:param commit: The commit to validate
:param out_errors: if not None, will populate with the list of errors given by the validators
:param ignore_validators: Optional list of CommitValidator classes to ignore, by class name
:return: True if there are no validation errors, and False otherwise
"""
failed_count = 0
passed_count = 0
start_time = time.time()
# Find all the validators in the validators package (recursively)
validator_classes = []
validators_dir = os.path.join(os.path.dirname(__file__), 'validators')
for _, module_name, is_package in pkgutil.iter_modules([validators_dir]):
if not is_package:
module = importlib.import_module('commit_validation.validators.' + module_name)
validator = module.get_validator()
if ignore_validators and validator.__name__ in ignore_validators:
print(f"Disabled validation for '{validator.__name__}'")
else:
validator_classes.append(validator)
error_summary = {}
# Process validators
for validator_class in validator_classes:
validator = validator_class()
validator_name = validator.__class__.__name__
error_list = []
passed = validator.run(commit, errors = error_list)
if passed:
passed_count += 1
print(f'{validator.__class__.__name__} PASSED')
else:
failed_count += 1
print(f'{validator.__class__.__name__} FAILED')
error_summary[validator_name] = error_list
end_time = time.time()
if failed_count:
print("VALIDATION FAILURE SUMMARY")
for val_name in error_summary.keys():
errors = error_summary[val_name]
if errors:
for error_message in errors:
first_line = True
for line in error_message.splitlines():
if first_line:
first_line = False
print(f'VALIDATOR_FAILED: {val_name} {line}')
else:
print(f' {line}') # extra detail lines do not need machine parsing
stats_strs = []
if failed_count > 0:
stats_strs.append(f'{failed_count} failed')
if passed_count > 0:
stats_strs.append(f'{passed_count} passed')
stats_str = ', '.join(stats_strs) + f' in {end_time - start_time:.2f}s'
print()
print(stats_str)
return failed_count == 0
def IsFileSkipped(file_name) -> bool:
if os.path.splitext(file_name)[1].lower() not in SOURCE_AND_SCRIPT_FILE_EXTENSIONS:
skipped = True
for pattern in SOURCE_AND_SCRIPT_FILE_PATTERNS:
if pattern.match(file_name):
skipped = False
break
return skipped
return False
class CommitValidator(abc.ABC):
"""A commit validator"""
@abc.abstractmethod
def run(self, commit: Commit, errors: List[str]) -> bool:
"""Validates a commit
:param commit: The commit to validate
:param errors: List of errors generated, append them to this list
:return: True if the commit is valid, and False otherwise
"""
pass
SOURCE_FILE_EXTENSIONS: Tuple[str, ...] = (
'.c', '.cc', '.cpp', '.cxx', '.h', '.hpp', '.hxx', '.inl', '.m', '.mm', '.cs', '.java'
)
"""File extensions for compiled source code"""
SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = (
'.py', '.lua', '.bat', '.cmd', '.sh', '.js'
)
"""File extensions for interpreted code"""
BUILD_FILE_EXTENSIONS: Tuple[str, ...] = (
'.cmake',
)
"""File extensions for build files"""
SOURCE_AND_SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = SOURCE_FILE_EXTENSIONS + SCRIPT_FILE_EXTENSIONS + BUILD_FILE_EXTENSIONS
"""File extensions for both compiled and interpreted code"""
BUILD_FILE_PATTERNS: Tuple[re.Pattern, ...] = (
re.compile(r'.*CMakeLists\.txt'),
re.compile(r'.*Jenkinsfile')
)
"""File patterns for build files"""
SOURCE_AND_SCRIPT_FILE_PATTERNS: Tuple[re.Pattern, ...] = BUILD_FILE_PATTERNS
EXCLUDED_VALIDATION_PATTERNS = [
'*/.git/*',
'*/3rdParty/*',
'*/__pycache__/*',
'*/External/*',
'build',
'Cache',
'*/Code/Framework/AzCore/azgnmx/azgnmx/*',
'Code/Tools/CryFXC',
'Code/Tools/HLSLCrossCompiler',
'Code/Tools/HLSLCrossCompilerMETAL',
'Docs',
'python/runtime',
'restricted/*/Tools/*RemoteControl',
'Tools/3dsmax',
'*/user/Cache/*',
'*/user/log/*',
]
| 31.68254
| 124
| 0.631096
| 720
| 5,988
| 5.047222
| 0.3
| 0.04623
| 0.033021
| 0.031646
| 0.168134
| 0.118327
| 0.106219
| 0.042928
| 0.022014
| 0.022014
| 0
| 0.003149
| 0.257515
| 5,988
| 188
| 125
| 31.851064
| 0.814215
| 0.215932
| 0
| 0.125
| 0
| 0
| 0.15374
| 0.056325
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0.108333
| 0.066667
| 0
| 0.175
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9dd82e962e13070a8526b2d4d0da1d0be6265ee
| 7,417
|
py
|
Python
|
src/py65/devices/mpu65c02.py
|
dabeaz/py65
|
62d790445018f0616508022912b67d8d64935a29
|
[
"BSD-3-Clause"
] | 5
|
2015-03-19T22:22:45.000Z
|
2020-05-15T18:26:59.000Z
|
src/py65/devices/mpu65c02.py
|
BigEd/py65
|
57d5e7191362006c1d6fa20662da3e4854f1b7c2
|
[
"BSD-3-Clause"
] | null | null | null |
src/py65/devices/mpu65c02.py
|
BigEd/py65
|
57d5e7191362006c1d6fa20662da3e4854f1b7c2
|
[
"BSD-3-Clause"
] | 3
|
2015-04-27T02:42:29.000Z
|
2021-07-16T20:50:23.000Z
|
from py65.devices import mpu6502
from py65.utils.devices import make_instruction_decorator
class MPU(mpu6502.MPU):
def __init__(self, *args, **kwargs):
mpu6502.MPU.__init__(self, *args, **kwargs)
self.name = '65C02'
self.waiting = False
def step(self):
if self.waiting:
self.processorCycles += 1
else:
mpu6502.MPU.step(self)
return self
# Make copies of the lists
instruct = mpu6502.MPU.instruct[:]
cycletime = mpu6502.MPU.cycletime[:]
extracycles = mpu6502.MPU.extracycles[:]
disassemble = mpu6502.MPU.disassemble[:]
instruction = make_instruction_decorator(instruct, disassemble,
cycletime, extracycles)
# addressing modes
def ZeroPageIndirectAddr(self):
return self.WordAt( 255 & (self.ByteAt(self.pc)))
def AccumulatorAddr(self):
return self.a
# operations
def opRMB(self, x, mask):
address = x()
self.memory[address] &= mask
def opSMB(self, x, mask):
address = x()
self.memory[address] |= mask
def opSTZ(self, x):
self.memory[x()] = 0x00
def opTSB(self, x):
address = x()
m = self.memory[address]
self.p &= ~self.ZERO
z = m & self.a
if z != 0:
self.p |= self.ZERO
self.memory[address] = m | self.a
def opTRB(self, x):
address = x()
m = self.memory[address]
self.p &= ~self.ZERO
z = m & self.a
if z != 0:
self.p |= self.ZERO
self.memory[address] = m & ~self.a
# instructions
@instruction(name="RMB0", mode="zpg", cycles=5)
def inst_0x07(self):
self.opRMB(self.ZeroPageAddr, 0xFE)
self.pc += 1
@instruction(name="ORA", mode="zpi", cycles=5)
def inst_0x12(self):
self.opORA(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="RMB1", mode="zpg", cycles=5)
def inst_0x17(self):
self.opRMB(self.ZeroPageAddr, 0xFD)
self.pc += 1
@instruction(name="RMB2", mode="zpg", cycles=5)
def inst_0x27(self):
self.opRMB(self.ZeroPageAddr, 0xFB)
self.pc += 1
@instruction(name="AND", mode="zpi", cycles=5)
def inst_0x32(self):
self.opAND(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="BIT", mode="zpx", cycles=4)
def inst_0x34(self):
self.opBIT(self.ZeroPageXAddr)
self.pc += 1
@instruction(name="RMB3", mode="zpg", cycles=5)
def inst_0x37(self):
self.opRMB(self.ZeroPageAddr, 0xF7)
self.pc += 1
@instruction(name="BIT", mode="abx", cycles=4)
def inst_0x3c(self):
self.opBIT(self.AbsoluteXAddr)
self.pc += 2
@instruction(name="RMB4", mode="zpg", cycles=5)
def inst_0x47(self):
self.opRMB(self.ZeroPageAddr, 0xEF)
self.pc += 1
@instruction(name="EOR", mode="zpi", cycles=5)
def inst_0x52(self):
self.opEOR(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="RMB5", mode="zpg", cycles=5)
def inst_0x57(self):
self.opRMB(self.ZeroPageAddr, 0xDF)
self.pc += 1
@instruction(name="PHY", mode="imp", cycles=3)
def inst_0x5a(self):
self.stPush(self.y)
@instruction(name="STZ", mode="imp", cycles=3)
def inst_0x64(self):
self.opSTZ(self.ZeroPageAddr)
self.pc += 1
@instruction(name="RMB6", mode="zpg", cycles=5)
def inst_0x67(self):
self.opRMB(self.ZeroPageAddr, 0xBF)
self.pc += 1
@instruction(name="ADC", mode="zpi", cycles=5)
def inst_0x72(self):
self.opADC(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="STZ", mode="zpx", cycles=4)
def inst_0x74(self):
self.opSTZ(self.ZeroPageXAddr)
self.pc += 1
@instruction(name="PHY", mode="imp", cycles=4)
def inst_0x7a(self):
self.y = self.stPop()
self.FlagsNZ(self.y)
@instruction(name="RMB7", mode="zpg", cycles=5)
def inst_0x77(self):
self.opRMB(self.ZeroPageAddr, 0x7F)
self.pc += 1
@instruction(name="SMB0", mode="zpg", cycles=5)
def inst_0x87(self):
self.opSMB(self.ZeroPageAddr, 0x01)
self.pc += 1
@instruction(name="STA", mode="zpi", cycles=5)
def inst_0x92(self):
self.opSTA(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="SMB1", mode="zpg", cycles=5)
def inst_0x97(self):
self.opSMB(self.ZeroPageAddr, 0x02)
self.pc += 1
@instruction(name="STZ", mode="abs", cycles=4)
def inst_0x9c(self):
self.opSTZ(self.AbsoluteAddr)
self.pc += 2
@instruction(name="STZ", mode="abx", cycles=5)
def inst_0x9e(self):
self.opSTZ(self.AbsoluteXAddr)
self.pc += 2
@instruction(name="SMB2", mode="zpg", cycles=5)
def inst_0xa7(self):
self.opSMB(self.ZeroPageAddr, 0x04)
self.pc += 1
@instruction(name="LDA", mode="zpi", cycles=5)
def inst_0xb2(self):
self.opLDA(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="SMB3", mode="zpg", cycles=5)
def inst_0xb7(self):
self.opSMB(self.ZeroPageAddr, 0x08)
self.pc += 1
@instruction(name="SMB4", mode="zpg", cycles=5)
def inst_0xc7(self):
self.opSMB(self.ZeroPageAddr, 0x10)
self.pc += 1
@instruction(name="SMB5", mode="zpg", cycles=5)
def inst_0xd7(self):
self.opSMB(self.ZeroPageAddr, 0x20)
self.pc += 1
@instruction(name="PHX", mode="imp", cycles=3)
def inst_0xda(self):
self.stPush(self.x)
@instruction(name="SMB6", mode="zpg", cycles=5)
def inst_0xe7(self):
self.opSMB(self.ZeroPageAddr, 0x40)
self.pc += 1
@instruction(name="SMB7", mode="zpg", cycles=5)
def inst_0xf7(self):
self.opSMB(self.ZeroPageAddr, 0x80)
self.pc += 1
@instruction(name="PLX", mode="imp", cycles=4)
def inst_0xfa(self):
self.x = self.stPop()
self.FlagsNZ(self.x)
@instruction(name="TSB", mode="zpg", cycles=5)
def inst_0x04(self):
self.opTSB(self.ZeroPageAddr)
self.pc += 1
@instruction(name="TSB", mode="abs", cycles=6)
def inst_0x0c(self):
self.opTSB(self.AbsoluteAddr)
self.pc += 2
@instruction(name="TRB", mode="zpg", cycles=5)
def inst_0x14(self):
self.opTRB(self.ZeroPageAddr)
self.pc += 1
@instruction(name="INC", mode="acc", cycles=2)
def inst_0x1a(self):
self.opINCR(None)
@instruction(name="TRB", mode="abs", cycles=6)
def inst_0x1c(self):
self.opTRB(self.AbsoluteAddr)
self.pc += 2
@instruction(name="DEC", mode="acc", cycles=2)
def inst_0x3a(self):
self.opDECR(None)
@instruction(name="BRA", mode="rel", cycles=1, extracycles=1)
def inst_0x80(self):
self.BranchRelAddr()
@instruction(name="WAI", mode='imp', cycles=3)
def inst_0xCB(self):
self.waiting = True
@instruction(name="CMP", mode='zpi', cycles=6) # Don't know cycles
def inst_0xD2(self):
self.opCPY(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="SBC", mode="zpi", cycles=5)
def inst_0xf2(self):
self.opSBC(self.ZeroPageIndirectAddr)
self.pc += 1
| 27.369004
| 71
| 0.58676
| 949
| 7,417
| 4.528978
| 0.201264
| 0.14658
| 0.047231
| 0.117264
| 0.620754
| 0.427873
| 0.255002
| 0.079107
| 0.079107
| 0.061424
| 0
| 0.050804
| 0.262235
| 7,417
| 270
| 72
| 27.47037
| 0.734649
| 0.011191
| 0
| 0.229665
| 0
| 0
| 0.037259
| 0
| 0
| 0
| 0.009281
| 0
| 0
| 1
| 0.244019
| false
| 0
| 0.009569
| 0.009569
| 0.296651
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9ddc98cf55e2bef4fcf498ec4787ca57bad46d0
| 5,623
|
py
|
Python
|
tests/test__io.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 1
|
2021-09-03T04:02:25.000Z
|
2021-09-03T04:02:25.000Z
|
tests/test__io.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 3
|
2020-09-29T21:43:39.000Z
|
2021-07-21T22:18:27.000Z
|
tests/test__io.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 4
|
2019-11-14T13:25:24.000Z
|
2021-04-30T22:08:53.000Z
|
import pandas as pd
import shutil
import os
import io
from ms_mint.Mint import Mint
from pathlib import Path as P
from ms_mint.io import (
ms_file_to_df,
mzml_to_pandas_df_pyteomics,
convert_ms_file_to_feather,
convert_ms_file_to_parquet,
MZMLB_AVAILABLE,
)
from paths import (
TEST_MZML,
TEST_MZXML,
TEST_PARQUET,
TEST_MZMLB_POS,
TEST_MZML_POS,
TEST_MZML_NEG,
)
def test__ms_file_to_df__mzML():
result = ms_file_to_df(TEST_MZML)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__ms_file_to_df__mzML_timeunit_minutes():
result = ms_file_to_df(TEST_MZML, time_unit="minutes")
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__ms_file_to_df__mzXML():
result = ms_file_to_df(TEST_MZXML)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__mzml_to_pandas_df_pyteomics_pos():
result = mzml_to_pandas_df_pyteomics(TEST_MZML_POS)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
assert all(result.polarity == "+"), f'Polarity should be "+"\n{result}'
def test__mzml_to_pandas_df_pyteomics_neg():
result = mzml_to_pandas_df_pyteomics(TEST_MZML_NEG)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
assert all(result.polarity == "-"), f'Polarity should be "-"\n{result}'
def test__read_parquet():
result = ms_file_to_df(TEST_PARQUET)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__write_read_hdf(tmpdir):
df = ms_file_to_df(TEST_PARQUET)
fn = P(tmpdir) / "file.hdf"
df.to_hdf(fn, key="data")
result = ms_file_to_df(fn)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__read_mzMLb(tmpdir):
if not MZMLB_AVAILABLE:
return None
result = ms_file_to_df(TEST_MZMLB_POS)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
# assert all(result.polarity == '+'), f'Polarity should be "+"\n{result}'
def test__convert_ms_file_to_feather(tmpdir):
print(tmpdir)
shutil.copy(TEST_MZML, tmpdir)
fn = P(tmpdir) / P(TEST_MZML).name
fn_out = fn.with_suffix(".feather")
print(fn, fn_out)
convert_ms_file_to_feather(fn)
assert fn_out.is_file(), f"File not generated {fn_out}"
df = ms_file_to_df(fn)
df_fea = ms_file_to_df(fn_out)
assert df_fea.equals(df), "DataFrames not equal"
def test__convert_ms_file_to_parquet(tmpdir):
print(tmpdir)
shutil.copy(TEST_MZML, tmpdir)
fn = P(tmpdir) / P(TEST_MZML).name
fn_out = fn.with_suffix(".parquet")
print(fn, fn_out)
convert_ms_file_to_parquet(fn)
assert fn_out.is_file(), f"File not generated {fn_out}"
df = ms_file_to_df(fn)
df_fea = ms_file_to_df(fn_out)
assert df_fea.equals(df), "DataFrames not equal"
def test__export_to_excel(tmp_path):
filename = os.path.join(tmp_path, "output.xlsx")
mint = Mint(verbose=True)
mint.ms_files = "tests/data/test.mzXML"
mint.run()
mint.export(filename)
assert os.path.isfile(filename)
def test__export_to_excel_without_fn():
mint = Mint(verbose=True)
mint.ms_files = TEST_MZXML
mint.targets = pd.DataFrame(
{
"peak_label": ["A"],
"mz_mean": [200],
"mz_width": [10],
"intensity_threshold": [0],
"rt_min": [0],
"rt_max": [10],
"targets_filename": ["unknown"],
}
)
mint.run()
buffer = mint.export()
assert isinstance(buffer, io.BytesIO)
df = pd.read_excel(buffer, sheet_name="Results")
assert len(df) == 1, len(df)
assert df.loc[0, "peak_label"] == "A", df.loc[0, "peak_label"]
assert df.loc[0, "ms_file"] == P(TEST_MZXML).name, df.loc[0, "ms_file"]
| 27.563725
| 81
| 0.634181
| 771
| 5,623
| 4.293126
| 0.143969
| 0.041692
| 0.050755
| 0.045317
| 0.776435
| 0.712387
| 0.676737
| 0.622659
| 0.583988
| 0.583988
| 0
| 0.003273
| 0.239374
| 5,623
| 203
| 82
| 27.699507
| 0.770634
| 0.012627
| 0
| 0.528736
| 0
| 0
| 0.178198
| 0.003784
| 0.005747
| 0
| 0
| 0
| 0.155172
| 1
| 0.068966
| false
| 0
| 0.045977
| 0
| 0.12069
| 0.022989
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9e0543df8f2ae150950f2a9787edb6296aac618
| 2,482
|
py
|
Python
|
bluesky/tests/test_simulators.py
|
NSLS-II/bluesky
|
b7d666e65cf4ef556fb46b744c33264c8e3f7507
|
[
"BSD-3-Clause"
] | 43
|
2015-08-04T20:13:41.000Z
|
2019-04-12T17:21:36.000Z
|
bluesky/tests/test_simulators.py
|
NSLS-II/bluesky
|
b7d666e65cf4ef556fb46b744c33264c8e3f7507
|
[
"BSD-3-Clause"
] | 966
|
2015-07-29T16:43:21.000Z
|
2019-05-09T21:02:28.000Z
|
bluesky/tests/test_simulators.py
|
NSLS-II/bluesky
|
b7d666e65cf4ef556fb46b744c33264c8e3f7507
|
[
"BSD-3-Clause"
] | 40
|
2015-07-29T16:42:41.000Z
|
2019-02-07T02:30:34.000Z
|
from bluesky.plans import scan
from bluesky.simulators import (print_summary, print_summary_wrapper,
summarize_plan,
check_limits,
plot_raster_path)
import pytest
from bluesky.plans import grid_scan
def test_print_summary(hw):
det = hw.det
motor = hw.motor
print_summary(scan([det], motor, -1, 1, 10)) # old name
summarize_plan(scan([det], motor, -1, 1, 10)) # new name
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(hw):
det = hw.det
motor = hw.motor
motor1 = hw.motor1
motor2 = hw.motor2
from bluesky.plan_tools import (print_summary, print_summary_wrapper,
plot_raster_path)
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
with pytest.warns(UserWarning):
plan = grid_scan([det], motor1, -5, 5, 10, motor2, -7, 7, 15, True)
plot_raster_path(plan, 'motor1', 'motor2', probe_size=.3)
def test_check_limits(RE, hw):
det = hw.det
motor = hw.motor
# The motor object does not currently implement limits.
# Use an assert to help us out if this changes in the future.
assert not hasattr(motor, 'limits')
# # check_limits should warn if it can't find check_value
# TODO: Is there _any_ object to test?
# with pytest.warns(UserWarning):
# check_limits(scan([det], motor, -1, 1, 3))
# monkey-patch some limits
motor.limits = (-2, 2)
# check_limits should do nothing here
check_limits(scan([det], motor, -1, 1, 3))
# check_limits should error if limits are exceeded only if object raises
# this object does not raise
check_limits(scan([det], motor, -3, 3, 3))
# check_limits should raise if limits are equal only if object raises
# this object does not raise
motor.limits = (2, 2)
check_limits(scan([det], motor, -1, 1, 3))
def test_check_limits_needs_RE():
with pytest.raises(RuntimeError) as ctx:
check_limits([])
assert str(ctx.value) == "Bluesky event loop not running"
def test_plot_raster_path(hw):
det = hw.det
motor1 = hw.motor1
motor2 = hw.motor2
plan = grid_scan([det], motor1, -5, 5, 10, motor2, -7, 7, 15, True)
plot_raster_path(plan, 'motor1', 'motor2', probe_size=.3)
| 34
| 76
| 0.636583
| 357
| 2,482
| 4.271709
| 0.268908
| 0.086557
| 0.07082
| 0.068197
| 0.531148
| 0.492459
| 0.372459
| 0.32918
| 0.251803
| 0.199344
| 0
| 0.037695
| 0.251813
| 2,482
| 72
| 77
| 34.472222
| 0.783522
| 0.224013
| 0
| 0.565217
| 0
| 0
| 0.031414
| 0
| 0
| 0
| 0
| 0.013889
| 0.043478
| 1
| 0.108696
| false
| 0
| 0.108696
| 0
| 0.217391
| 0.152174
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9e2c12e3855c30001fd37ab610587d3e95c803d
| 535
|
py
|
Python
|
microservices/users/config.py
|
Levakin/sanic-test-app
|
d96a54a21f6d0d3b262bbc7bc75f5fa3b12c3b61
|
[
"Apache-2.0"
] | null | null | null |
microservices/users/config.py
|
Levakin/sanic-test-app
|
d96a54a21f6d0d3b262bbc7bc75f5fa3b12c3b61
|
[
"Apache-2.0"
] | null | null | null |
microservices/users/config.py
|
Levakin/sanic-test-app
|
d96a54a21f6d0d3b262bbc7bc75f5fa3b12c3b61
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from distutils.util import strtobool
class Config:
DEBUG = bool(strtobool(os.getenv('DEBUG', "False")))
DATABASE_URI = os.getenv('DATABASE_URI', '127.0.0.1:27017')
WORKERS = int(os.getenv('WORKERS', 2))
LOGO = os.getenv('LOGO', None)
HOST = os.getenv('HOST', '127.0.0.1')
PORT = int(os.getenv('PORT', 8000))
SECRET = os.getenv('SECRET', 'secret')
LOGIN_MIN_LENGTH = int(os.getenv('LOGIN_MIN_LENGTH', 1))
LOGIN_MAX_LENGTH = int(os.getenv('LOGIN_MAX_LENGTH', 32))
| 31.470588
| 63
| 0.646729
| 80
| 535
| 4.2
| 0.4375
| 0.214286
| 0.130952
| 0.035714
| 0.130952
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058166
| 0.164486
| 535
| 16
| 64
| 33.4375
| 0.693512
| 0.039252
| 0
| 0
| 0
| 0
| 0.212891
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9e36baa14d5265769af32c8ed910969e39eaf3a
| 199
|
py
|
Python
|
semantic-python/test/fixtures/4-01-lambda-literals.py
|
Temurson/semantic
|
2e9cd2c006cec9a0328791e47d8c6d60af6d5a1b
|
[
"MIT"
] | 8,844
|
2019-05-31T15:47:12.000Z
|
2022-03-31T18:33:51.000Z
|
semantic-python/test/fixtures/4-01-lambda-literals.py
|
Qanora/semantic
|
b0eda9a61bbc690a342fb177cfc12eec8c1c001c
|
[
"MIT"
] | 401
|
2019-05-31T18:30:26.000Z
|
2022-03-31T16:32:29.000Z
|
semantic-python/test/fixtures/4-01-lambda-literals.py
|
Qanora/semantic
|
b0eda9a61bbc690a342fb177cfc12eec8c1c001c
|
[
"MIT"
] | 504
|
2019-05-31T17:55:03.000Z
|
2022-03-30T04:15:04.000Z
|
# CHECK-TREE: { const <- \x -> \y -> x; y <- const #true #true; z <- const #false #false; #record { const: const, y : y, z: z, }}
const = lambda x, y: x
y = const(True, True)
z = const(False, False)
| 39.8
| 129
| 0.557789
| 34
| 199
| 3.264706
| 0.294118
| 0.072072
| 0.054054
| 0.072072
| 0.594595
| 0.594595
| 0.594595
| 0.594595
| 0.594595
| 0.594595
| 0
| 0
| 0.221106
| 199
| 4
| 130
| 49.75
| 0.716129
| 0.613065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9e707edd4da101ada4ff00b233330f2c2f9843e
| 148
|
py
|
Python
|
abc153/d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
abc153/d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
abc153/d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
h = int(input())
i = 1
a = 1
b = 1
c = 1
while h >= a:
a = 2 ** i
i += 1
s = 0
t = True
for j in range(1, i-1):
c += 2 ** j
print(c)
| 8.705882
| 23
| 0.398649
| 35
| 148
| 1.685714
| 0.542857
| 0.101695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 0.398649
| 148
| 16
| 24
| 9.25
| 0.550562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9ebcddd99e456fbeb39a0191aad31656c7f4943
| 856
|
py
|
Python
|
setup.py
|
EdWard680/python-firetv
|
4c02f79a1c8ae60a489297178d010a31545a3b5d
|
[
"MIT"
] | null | null | null |
setup.py
|
EdWard680/python-firetv
|
4c02f79a1c8ae60a489297178d010a31545a3b5d
|
[
"MIT"
] | null | null | null |
setup.py
|
EdWard680/python-firetv
|
4c02f79a1c8ae60a489297178d010a31545a3b5d
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='firetv',
version='1.0.7',
description='Communicate with an Amazon Fire TV device via ADB over a network.',
url='https://github.com/happyleavesaoc/python-firetv/',
license='MIT',
author='happyleaves',
author_email='happyleaves.tfr@gmail.com',
packages=['firetv'],
install_requires=['pycryptodome', 'rsa', 'adb-homeassistant', 'pure-python-adb-homeassistant'],
extras_require={
'firetv-server': ['Flask>=0.10.1', 'PyYAML>=3.12']
},
entry_points={
'console_scripts': [
'firetv-server = firetv.__main__:main'
]
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3'
]
)
| 30.571429
| 99
| 0.613318
| 91
| 856
| 5.67033
| 0.725275
| 0.062016
| 0.096899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018209
| 0.23014
| 856
| 27
| 100
| 31.703704
| 0.764795
| 0
| 0
| 0
| 0
| 0
| 0.538551
| 0.063084
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.038462
| 0
| 0.038462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9ffb7c6fff3e245dc8ea1ea786cc6f60c2d4cde
| 2,427
|
py
|
Python
|
generator/cache/cache.py
|
biarmic/OpenCache
|
bb9e110e434deb83900de328cc76b63901ba582f
|
[
"BSD-3-Clause"
] | 5
|
2021-09-15T18:29:49.000Z
|
2022-03-26T04:41:01.000Z
|
generator/cache/cache.py
|
VLSIDA/OpenCache
|
0e79bf353c68d57dcc49d78178b12fd0b468f19a
|
[
"BSD-3-Clause"
] | null | null | null |
generator/cache/cache.py
|
VLSIDA/OpenCache
|
0e79bf353c68d57dcc49d78178b12fd0b468f19a
|
[
"BSD-3-Clause"
] | null | null | null |
# See LICENSE for licensing information.
#
# Copyright (c) 2021 Regents of the University of California and The Board
# of Regents for the Oklahoma Agricultural and Mechanical College
# (acting for and on behalf of Oklahoma State University)
# All rights reserved.
#
import debug
import datetime
from policy import associativity
from globals import OPTS, print_time
class cache:
"""
This is not a design module, but contains a cache design instance.
"""
def __init__(self, cache_config, name):
cache_config.set_local_config(self)
self.name = name
# Import the design module of the cache
if OPTS.associativity == associativity.DIRECT:
from direct_cache import direct_cache as cache
elif OPTS.associativity == associativity.N_WAY:
from n_way_cache import n_way_cache as cache
elif OPTS.associativity == associativity.FULLY:
# TODO: from full_cache import full_cache as cache
debug.error("Fully associative cache is not supported at the moment.", -1)
else:
debug.error("Invalid associativity.", -1)
self.c = cache(cache_config, name)
def config_write(self, paths):
""" Save the config files. """
self.c.config_write(paths)
def verilog_write(self, path):
""" Save the Verilog file. """
self.c.verilog_write(path)
def save(self):
""" Save all the output files. """
debug.print_raw("Saving output files...")
# Write the config files
start_time = datetime.datetime.now()
cpaths = {
"data": OPTS.output_path + OPTS.data_array_name + "_config.py",
"tag": OPTS.output_path + OPTS.tag_array_name + "_config.py",
"use": OPTS.output_path + OPTS.use_array_name + "_config.py"
}
if not OPTS.replacement_policy.has_sram_array(): del cpaths["use"]
for k, cpath in cpaths.items():
debug.print_raw("Config: Writing to {}".format(cpath))
self.config_write(cpaths)
print_time("Config", datetime.datetime.now(), start_time)
# Write the Verilog file
start_time = datetime.datetime.now()
vpath = OPTS.output_path + self.c.name + ".v"
debug.print_raw("Verilog: Writing to {}".format(vpath))
self.verilog_write(vpath)
print_time("Verilog", datetime.datetime.now(), start_time)
| 33.246575
| 86
| 0.646477
| 311
| 2,427
| 4.884244
| 0.327974
| 0.013167
| 0.050033
| 0.03555
| 0.134299
| 0.060566
| 0.060566
| 0
| 0
| 0
| 0
| 0.003319
| 0.255047
| 2,427
| 73
| 87
| 33.246575
| 0.836836
| 0.217965
| 0
| 0.051282
| 0
| 0
| 0.107817
| 0
| 0
| 0
| 0
| 0.013699
| 0
| 1
| 0.102564
| false
| 0
| 0.153846
| 0
| 0.282051
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a00d6b8c83e85268bd294d4e512d54f000cfc8a
| 2,843
|
py
|
Python
|
pytype/tests/py2/test_stdlib.py
|
souravbadami/pytype
|
804fa97e7f9208df2711976085a96f756b3949e6
|
[
"Apache-2.0"
] | 1
|
2020-04-20T02:55:21.000Z
|
2020-04-20T02:55:21.000Z
|
pytype/tests/py2/test_stdlib.py
|
doc22940/pytype
|
4772ad6fe89f4df75ae3d08e7374f68074175d4a
|
[
"Apache-2.0"
] | null | null | null |
pytype/tests/py2/test_stdlib.py
|
doc22940/pytype
|
4772ad6fe89f4df75ae3d08e7374f68074175d4a
|
[
"Apache-2.0"
] | null | null | null |
"""Tests of selected stdlib functions."""
from pytype.tests import test_base
class StdlibTests(test_base.TargetPython27FeatureTest):
"""Tests for files in typeshed/stdlib."""
def testPosix(self):
ty = self.Infer("""
import posix
x = posix.urandom(10)
""")
self.assertTypesMatchPytd(ty, """
posix = ... # type: module
x = ... # type: str
""")
def testXRange(self):
self.Check("""
import random
random.sample(xrange(10), 5)
""")
def testStringTypes(self):
ty = self.Infer("""
import types
if isinstance("", types.StringTypes):
x = 42
if isinstance(False, types.StringTypes):
y = 42
if isinstance(u"", types.StringTypes):
z = 42
""", deep=False)
self.assertTypesMatchPytd(ty, """
types = ... # type: module
x = ... # type: int
z = ... # type: int
""")
def testDefaultDict(self):
self.Check("""
import collections
import itertools
ids = collections.defaultdict(itertools.count(17).next)
""")
def testSysVersionInfoLt(self):
ty = self.Infer("""
import sys
if sys.version_info[0] < 3:
v = 42
else:
v = "hello world"
""")
self.assertTypesMatchPytd(ty, """
sys = ... # type: module
v = ... # type: int
""")
def testSysVersionInfoLe(self):
ty = self.Infer("""
import sys
if sys.version_info[0] <= 2:
v = 42
else:
v = "hello world"
""")
self.assertTypesMatchPytd(ty, """
sys = ... # type: module
v = ... # type: int
""")
def testSysVersionInfoEq(self):
ty = self.Infer("""
import sys
if sys.version_info[0] == 2:
v = 42
elif sys.version_info[0] == 3:
v = "hello world"
else:
v = None
""")
self.assertTypesMatchPytd(ty, """
sys = ... # type: module
v = ... # type: int
""")
def testSysVersionInfoGe(self):
ty = self.Infer("""
import sys
if sys.version_info[0] >= 3:
v = 42
else:
v = "hello world"
""")
self.assertTypesMatchPytd(ty, """
sys = ... # type: module
v = ... # type: str
""")
def testSysVersionInfoGt(self):
ty = self.Infer("""
import sys
if sys.version_info[0] > 2:
v = 42
else:
v = "hello world"
""")
self.assertTypesMatchPytd(ty, """
sys = ... # type: module
v = ... # type: str
""")
def testSysVersionInfoNamedAttribute(self):
ty = self.Infer("""
import sys
if sys.version_info.major == 2:
v = 42
else:
v = "hello world"
""")
self.assertTypesMatchPytd(ty, """
sys: module
v: int
""")
test_base.main(globals(), __name__ == "__main__")
| 21.869231
| 61
| 0.518115
| 299
| 2,843
| 4.866221
| 0.26087
| 0.03299
| 0.054983
| 0.082474
| 0.468729
| 0.439863
| 0.428179
| 0.428179
| 0.428179
| 0.428179
| 0
| 0.020888
| 0.326416
| 2,843
| 129
| 62
| 22.03876
| 0.738903
| 0.024974
| 0
| 0.616071
| 0
| 0
| 0.604853
| 0.03477
| 0
| 0
| 0
| 0
| 0.071429
| 1
| 0.089286
| false
| 0
| 0.107143
| 0
| 0.205357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a04d1fd425aed6effcc3e48e1eb103f0872ab5a
| 3,621
|
py
|
Python
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | 1
|
2021-04-05T07:15:37.000Z
|
2021-04-05T07:15:37.000Z
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | 1
|
2022-02-27T12:17:27.000Z
|
2022-02-27T12:17:27.000Z
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 David R. Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import imaplib
import re
import keyring
from libqtile.log_utils import logger
from libqtile.widget import base
class ImapWidget(base.ThreadPoolText):
"""Email IMAP widget
This widget will scan one of your imap email boxes and report the number of
unseen messages present. I've configured it to only work with imap with
ssl. Your password is obtained from the Gnome Keyring.
Writing your password to the keyring initially is as simple as (changing
out <userid> and <password> for your userid and password):
1) create the file ~/.local/share/python_keyring/keyringrc.cfg with the
following contents::
[backend]
default-keyring=keyring.backends.Gnome.Keyring
keyring-path=/home/<userid>/.local/share/keyring/
2) Execute the following python shell script once::
#!/usr/bin/env python3
import keyring
user = <userid>
password = <password>
keyring.set_password('imapwidget', user, password)
mbox names must include the path to the mbox (except for the default
INBOX). So, for example if your mailroot is ``~/Maildir``, and you want to
look at the mailbox at HomeMail/fred, the mbox setting would be:
``mbox="~/Maildir/HomeMail/fred"``. Note the nested sets of quotes! Labels
can be whatever you choose, of course.
Widget requirements: keyring_.
.. _keyring: https://pypi.org/project/keyring/
"""
defaults = [
('mbox', '"INBOX"', 'mailbox to fetch'),
('label', 'INBOX', 'label for display'),
('user', None, 'email username'),
('server', None, 'email server name'),
]
def __init__(self, **config):
base.ThreadPoolText.__init__(self, "", **config)
self.add_defaults(ImapWidget.defaults)
password = keyring.get_password('imapwidget', self.user)
if password is not None:
self.password = password
else:
logger.critical('Gnome Keyring Error')
def poll(self):
im = imaplib.IMAP4_SSL(self.server, 993)
if self.password == 'Gnome Keyring Error':
self.text = 'Gnome Keyring Error'
else:
im.login(self.user, self.password)
status, response = im.status(self.mbox, '(UNSEEN)')
self.text = response[0].decode()
self.text = self.label + ': ' + re.sub(r'\).*$', '', re.sub(r'^.*N\s', '', self.text))
im.logout()
return self.text
| 38.521277
| 98
| 0.67219
| 485
| 3,621
| 4.985567
| 0.472165
| 0.036394
| 0.021092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004681
| 0.233085
| 3,621
| 93
| 99
| 38.935484
| 0.866042
| 0.631041
| 0
| 0.064516
| 0
| 0
| 0.150741
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0.16129
| 0.16129
| 0
| 0.322581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a04e4f203740a253735948c968506f6632354e6
| 2,486
|
py
|
Python
|
game/views/tests/game_definition_view_test.py
|
dimadk24/english-fight-api
|
506a3eb2cb4cb91203b1e023b5248c27975df075
|
[
"MIT"
] | null | null | null |
game/views/tests/game_definition_view_test.py
|
dimadk24/english-fight-api
|
506a3eb2cb4cb91203b1e023b5248c27975df075
|
[
"MIT"
] | null | null | null |
game/views/tests/game_definition_view_test.py
|
dimadk24/english-fight-api
|
506a3eb2cb4cb91203b1e023b5248c27975df075
|
[
"MIT"
] | null | null | null |
from rest_framework.response import Response
from rest_framework.test import APIClient
from game.models import GameDefinition, AppUser
def create_game_definition(api_client: APIClient) -> Response:
return api_client.post("/api/game_definition")
def get_game_definition(api_client: APIClient, game_def_id: str) -> Response:
return api_client.get(f"/api/game_definition/{game_def_id}")
def test_returns_game_def_to_the_current_user_by_hash_id(api_client):
post_game_def_response = create_game_definition(api_client)
assert post_game_def_response.status_code == 201
game_def_id = post_game_def_response.data["id"]
assert isinstance(game_def_id, str)
get_game_def_response = get_game_definition(api_client, game_def_id)
assert get_game_def_response.status_code == 200
assert get_game_def_response.data == post_game_def_response.data
def test_returns_game_def_to_another_user_by_hash_id(api_client):
post_game_def_response = create_game_definition(api_client)
assert post_game_def_response.status_code == 201
game_def_id = post_game_def_response.data["id"]
assert isinstance(game_def_id, str)
user2 = AppUser.objects.create(vk_id=2, username=2)
api_client.force_authenticate(user2)
get_game_def_response = get_game_definition(api_client, game_def_id)
assert get_game_def_response.status_code == 200
assert get_game_def_response.data == post_game_def_response.data
def test_game_def_not_found_by_int_id(api_client):
post_game_def_response = create_game_definition(api_client)
assert post_game_def_response.status_code == 201
game_def_id = post_game_def_response.data["id"]
int_game_def_id = GameDefinition.objects.get(pk=game_def_id).id.id
assert isinstance(int_game_def_id, int)
get_game_def_response = get_game_definition(
api_client, str(int_game_def_id)
)
assert get_game_def_response.status_code == 404
assert get_game_def_response.data == {"detail": "Страница не найдена."}
def test_game_def_permission_denied_if_started(api_client):
post_game_def_response = create_game_definition(api_client)
game_def_id = post_game_def_response.data["id"]
GameDefinition.objects.filter(id=game_def_id).update(started=True)
get_game_def_response = get_game_definition(api_client, game_def_id)
assert get_game_def_response.status_code == 403
assert get_game_def_response.data == {
'detail': 'К игре уже нельзя подключиться'
}
| 35.514286
| 77
| 0.79284
| 382
| 2,486
| 4.657068
| 0.172775
| 0.177066
| 0.210793
| 0.138842
| 0.680157
| 0.639123
| 0.617201
| 0.578977
| 0.578977
| 0.535132
| 0
| 0.011579
| 0.131537
| 2,486
| 69
| 78
| 36.028986
| 0.812413
| 0
| 0
| 0.444444
| 0
| 0
| 0.049879
| 0.013677
| 0
| 0
| 0
| 0
| 0.311111
| 1
| 0.133333
| false
| 0
| 0.066667
| 0.044444
| 0.244444
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a0b84b7b59fd4b039d379ec665100c80b070e0d
| 1,347
|
py
|
Python
|
2. Add Two Numbers DC(12-1-21).py
|
Dharaneeshwar/Leetcode
|
cc3ed07f6ac5f4d6e3f60c57a94a06a8be2f5287
|
[
"MIT"
] | 4
|
2020-11-17T05:24:24.000Z
|
2021-06-14T21:01:45.000Z
|
2. Add Two Numbers DC(12-1-21).py
|
Dharaneeshwar/Leetcode
|
cc3ed07f6ac5f4d6e3f60c57a94a06a8be2f5287
|
[
"MIT"
] | null | null | null |
2. Add Two Numbers DC(12-1-21).py
|
Dharaneeshwar/Leetcode
|
cc3ed07f6ac5f4d6e3f60c57a94a06a8be2f5287
|
[
"MIT"
] | null | null | null |
# Time Complexity - O(n) ; Space Complexity - O(n)
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
carry = 0
out = temp = ListNode()
while l1 is not None and l2 is not None:
tempsum = l1.val + l2.val
tempsum += carry
if tempsum > 9:
carry = tempsum//10
tempsum %= 10
else:
carry = 0
temp.next = ListNode(tempsum)
temp = temp.next
l1 = l1.next
l2 = l2.next
if l1:
while l1:
tempsum = l1.val + carry
if tempsum > 9:
carry = tempsum//10
tempsum %= 10
else:
carry = 0
temp.next = ListNode(tempsum)
temp = temp.next
l1 = l1.next
elif l2:
while l2:
tempsum = l2.val + carry
if tempsum > 9:
carry = tempsum//10
tempsum %= 10
else:
carry = 0
temp.next = ListNode(tempsum)
temp = temp.next
l2 = l2.next
if carry:
temp.next = ListNode(carry)
return out.next
| 31.325581
| 76
| 0.400148
| 131
| 1,347
| 4.114504
| 0.236641
| 0.103896
| 0.118738
| 0.083488
| 0.539889
| 0.502783
| 0.502783
| 0.502783
| 0.502783
| 0.502783
| 0
| 0.061129
| 0.526355
| 1,347
| 43
| 77
| 31.325581
| 0.783699
| 0.035635
| 0
| 0.634146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0
| 0
| 0.073171
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a186a13afeea2c9ca39fb78982684eb10c871db
| 3,784
|
py
|
Python
|
bench_fastapi/authentication/controllers/login.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 1
|
2021-10-14T07:27:47.000Z
|
2021-10-14T07:27:47.000Z
|
bench_fastapi/authentication/controllers/login.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 4
|
2019-08-06T02:26:32.000Z
|
2021-06-10T21:39:19.000Z
|
bench_fastapi/authentication/controllers/login.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 1
|
2018-05-11T18:04:41.000Z
|
2018-05-11T18:04:41.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# login.py
# @Author : Gustavo Freitas (gustavo@gmf-tech.com)
# @Link :
# @Date : 12/12/2019, 11:43:07 AM
from typing import Optional, Any
from fastapi import APIRouter, Body, Depends, HTTPException
from fastapi import Header, Security
from authentication.models.users import User
from fastapi.security import HTTPBasic, HTTPBasicCredentials, APIKeyHeader
from typing import List
from starlette.responses import Response
from fastapi.encoders import jsonable_encoder
from authentication.interfaces.database import database
import jwt
from starlette.status import HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED
from datetime import datetime, timedelta
from hashlib import sha256
from authentication.interfaces.token import verify_token
router = APIRouter()
security = HTTPBasic(auto_error=True)
api_key = APIKeyHeader(name="x-api-key", auto_error=True)
@router.post("/login", tags=["token"])
async def renew_token(
response: Response,
user: dict = Depends(verify_token),
x_api_key: str = Header(None),
):
response.headers["x-api-key"] = x_api_key
return {"verified": True, "user": user["email"]}
@router.put("/login", tags=["token"])
async def renew_token(response: Response, user: dict = Depends(verify_token)):
sql = """UPDATE users.tbl_users
SET token = :token WHERE
id = :id"""
token = f"{user['pwd_updated_at']}-{user['email']}-{datetime.now()}"
mhash = sha256(token.encode("utf-8"))
token = mhash.hexdigest()
await database.execute(query=sql, values={"id": user["id"], "token": token})
response.headers["x-api-key"] = jwt.encode(
{**user, **dict(exp=(datetime.now() + timedelta(hours=8)))},
token,
algorithm="HS256",
).decode()
return {"renew": True}
# @router.post("/login", dependencies=[Depends(verify_token)])
# async def renew_token(x_api_key: str = Header(None)):
# return {"ok": x_api_key}
@router.get(
"/login", response_model=User, tags=["auth"], response_model_exclude_unset=True
)
async def login_basic(
response: Response, authorization: HTTPBasicCredentials = Security(security)
):
sql = """SELECT tu.id, tu.email, tu."name", tu.linkedin_id , tu.pwd_updated_at
FROM users.tbl_users tu
WHERE tu.passwd is NOT NULL
AND tu.passwd = crypt(:secret,tu.passwd)
AND tu.email = :email
AND tu.enabled = true """
users = await database.fetch_one(
query=sql,
values={"email": authorization.username, "secret": authorization.password},
)
if not users:
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED)
user = jsonable_encoder(users)
sql = """SELECT tp.acl_profile as profile
FROM users.tbl_users tu inner join
users.tbl_profile_users tpu on tpu.id_users = tu.id inner join
users.tbl_profile tp on tp.id = tpu.id_profile
WHERE tu.passwd is NOT NULL
AND tu.passwd = crypt(:secret,tu.passwd)
AND tu.email = :email"""
profiles = await database.fetch_all(
query=sql,
values={"email": authorization.username, "secret": authorization.password},
)
if not profiles:
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED)
user["acl"] = jsonable_encoder(profiles)
sql = """UPDATE users.tbl_users
SET token = :token WHERE
id = :id"""
token = f"{user['pwd_updated_at']}-{authorization.username}-{datetime.now()}"
mhash = sha256(token.encode("utf-8"))
token = mhash.hexdigest()
await database.execute(query=sql, values={"id": user["id"], "token": token})
response.headers["x-api-key"] = jwt.encode(
{**user, **dict(exp=(datetime.now() + timedelta(hours=8)))},
token,
algorithm="HS256",
).decode()
return user
| 29.795276
| 83
| 0.681818
| 496
| 3,784
| 5.08871
| 0.294355
| 0.021395
| 0.022187
| 0.021395
| 0.493265
| 0.443344
| 0.443344
| 0.425515
| 0.385103
| 0.385103
| 0
| 0.014815
| 0.17944
| 3,784
| 126
| 84
| 30.031746
| 0.798068
| 0.075846
| 0
| 0.395349
| 0
| 0
| 0.267355
| 0.055651
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.069767
| 0.162791
| 0
| 0.197674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a1f7efcf406b9bcc9bc35cc271b47eed9db309f
| 7,998
|
py
|
Python
|
mod_core.py
|
nokia-wroclaw/innovativeproject-dbshepherd
|
f82f3b36caaf9fcd6d28076051cb92458ba2edd3
|
[
"MIT"
] | null | null | null |
mod_core.py
|
nokia-wroclaw/innovativeproject-dbshepherd
|
f82f3b36caaf9fcd6d28076051cb92458ba2edd3
|
[
"MIT"
] | null | null | null |
mod_core.py
|
nokia-wroclaw/innovativeproject-dbshepherd
|
f82f3b36caaf9fcd6d28076051cb92458ba2edd3
|
[
"MIT"
] | 1
|
2020-02-05T20:02:15.000Z
|
2020-02-05T20:02:15.000Z
|
import re
import os
import cmd
import sys
import common
from getpass import getpass
from kp import KeePassError, get_password
from configmanager import ConfigManager, ConfigManagerError
common.init()
class ParseArgsException(Exception):
def __init__(self, msg):
self.msg = msg
class ModuleCore(cmd.Cmd):
def __init__(self, module = ''):
cmd.Cmd.__init__(self)
self.master = None
if module == '#':
self.prompt_sign = '#>'
elif module != '':
self.prompt_sign = '[' + module + ']>'
else:
self.prompt_sign = '->'
#defaults
self.ruler = '-'
#Completions
self.directories = []
self.file_server_database = []
self.file_server = []
self.do_cd('.')
configs = ConfigManager().get_config_list()
for conf in configs:
self.file_server_database.append(conf)
self.file_server.append(conf)
for srv in ConfigManager('config/' + conf + '.yaml').get_all():
self.file_server_database.append(conf + '.' + srv)
self.file_server.append(conf + '.' + srv)
for db in ConfigManager('config/' + conf + '.yaml').get(srv)['databases']:
self.file_server_database.append(conf + '.' + srv + '.' + db)
def precmd(self, line):
if not sys.stdin.isatty():
print(line)
return line
def postcmd(self, stop, line):
if not sys.stdin.isatty():
print("")
return stop
def parse_args(self, string="", n=0, m=0):
list = re.findall('"+.*"+|[a-zA-Z0-9!@#$%^&*()_+-,./<>?]+', string)
arg_counter = len(list);
if (arg_counter >= n and arg_counter <= m) or (arg_counter == n and m == 0) or n == 0:
r_list = []
for l in list:
r_list.append(l.replace('"', ''))
return (r_list, len(list))
else:
raise ParseArgsException("Incorrect number of arguments")
# wykonuje daną funkcję (callback) na wszystkich bazach
def exec_on_config(self, callback, args, values, view = ''): # link - file.server.base
if values == '': # wykonaj na wszystkich plikach
files = ConfigManager().get_config_list() # pobierz listę plików konfiguracyjnych
# wyświetl na czym będziesz wykonywać
print("Exec on:")
for file in files:
print('+-',file)
ans = input("Are you sure? [NO/yes/info]: ")
if ans == "yes": #wykonaj callback
for file in files:
if view == 'tree': print('+-', file)
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
if view == 'tree': print("| +-", srv)
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("| | +-", db)
if view == 'list': print('[', file, '->', srv, '->', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
elif ans == "info": #podaj tylko informację na czym callback zostałby wykonany
for file in files:
print('+-', file)
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
print('| +-', srv)
databases = servers[srv]["databases"]
for db in databases:
print('| | +-', db)
else: #jeżeli nie zdecydujemy się na wykonanie czegokolwiek
print("aborted")
else: # jeżeli specjalizujemy na czym chcemy wykonać
val = values.split('.') #rozdzielamy nazwę_pliku.serwera.bazy
params = len(val)
if params == 1: # jeżeli podano nazwę tylko pliku to wykonaj na wszystkich serwerach, bazach które są w nim zapisane
file = val[0]
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
if view == 'tree': print("+-", srv)
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("| +-", db)
if view == 'list': print('[', srv, '->', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
elif params == 2: # jeżeli podano nazwę pliku i serwer to wykonaj na wszystkich bazach na serwerze
file = val[0]
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
srv = val[1]
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("+-", db)
if view == 'list': print('[', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
elif params == 3: # podano nazwę pliku, serwer i nazwę bazy - wykonaj polecenie dokładnie na niej
try:
callback(val[0], val[1], val[2], *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
# zwraca skróconą ścieżkę do aktualnego katalogu - funkcja pomocnicza
def get_shortpath(self):
path = common.get_cdir()
separator = ''
if '\\' in path:
separator = '\\'
else:
separator = '/'
start = path.find(separator)
end = path.rfind(separator, 0, len(path)-1)
if start < end:
return (path[0:start+1] + '...' + path[end:])
else:
return (path)
# autouzupełnienia dla cmd polecenia cd
def complete_cd(self, text, line, begidx, endidx):
if not text:
completions = self.directories[:]
else:
completions = [f for f in self.directories if f.startswith(text)]
return completions
# polecenie cd - pozwala na przemieszczanie się po katalogach
def do_cd(self, args):
"Move to directory"
if args == '':
print(common.get_cdir())
else:
try:
common.chdir(args)
self.prompt = self.get_shortpath() + ' ' + self.prompt_sign
self.directories = []
for name in os.listdir(common.get_cdir()):
if os.path.isdir(os.path.join(common.get_cdir(), name)):
self.directories.append(name)
except FileNotFoundError as e:
print(e)
# wyświetla wszystkie pliki w lokalizacji
def do_ls(self, args):
"List directory"
for name in os.listdir(common.get_cdir()):
print(name)
# podaje pełną ścieżkę aktualnego katalogu
def do_pwd(self, args):
"Print path"
print(common.get_cdir())
# pozwala na decyzję czy chcemy wyświetlać warningi
def do_warn(self, args):
"""warn <on/off>"""
try:
(values, values_num) = self.parse_args(args, 0, 1)
if values_num == 1:
if values[0] == 'on':
print('Warnings on')
self.warn = True
elif values[0] == 'off':
print('Warnings off')
self.warn = False
else:
print('Incorrect argument.')
else:
if self.warn == True:
print('Status: on')
else:
print('Status: off')
except ParseArgsException as e:
print(e)
# ustawia masterpassword dla keepasa
def do_setMaster(self,args):
"Set master password"
if sys.stdin.isatty(): # jezeli jako shell
p = getpass('Enter Master Password: ')
else:
p = sys.stdin.readline().rstrip()
self.master = p
def do_exit(self, *args):
return True
def do_EOF(self, line):
return True
def emptyline(self):
return False
# Musimy wyłapać wszystko co możliwe, nie ma pliku, zly master itp. i zwrocic 1 wyjątek
def get_password(self, alias):
keepass_path = common.keepass_path
if self.master == None:
raise KeePassError("Master Password Not Set")
try:
return get_password(keepass_path, self.master, alias)
except KeePassError as e:
raise e
def connect_command_builder(self,connection, perm):
try:
command = connection["adress"] + "_" + connection["user"]+ "_" + \
self.get_password(connection["keepass"]) + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm
except (KeyError, KeePassError) as e1:
try:
command = connection["adress"] + "_" + connection["user"]+ "_" + \
connection["passwd"] + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm
return command
except KeyError as e2:
if isinstance(e1,KeePassError):
raise KeePassError("Unable to use Keepass(" + e1.value + ") or Password")
else:
raise KeePassError("Invalid connection in yaml file")
raise KeePassError(e1)
return command
| 29.512915
| 132
| 0.635159
| 1,039
| 7,998
| 4.805582
| 0.256015
| 0.006008
| 0.01442
| 0.016223
| 0.286
| 0.276387
| 0.231925
| 0.187863
| 0.175446
| 0.175446
| 0
| 0.0048
| 0.218555
| 7,998
| 271
| 133
| 29.512915
| 0.79408
| 0.147162
| 0
| 0.348416
| 0
| 0
| 0.102377
| 0.005542
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081448
| false
| 0.072398
| 0.036199
| 0.013575
| 0.180995
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a315f9411feef2bef3f2cfb2fab79f19fe80e02
| 7,842
|
py
|
Python
|
weaver/wps_restapi/quotation/quotes.py
|
crim-ca/weaver
|
107fec5e19f20b77061b9405a764da911d2db8a2
|
[
"Apache-2.0"
] | 16
|
2019-03-18T12:23:05.000Z
|
2022-02-25T00:39:11.000Z
|
weaver/wps_restapi/quotation/quotes.py
|
crim-ca/weaver
|
107fec5e19f20b77061b9405a764da911d2db8a2
|
[
"Apache-2.0"
] | 346
|
2019-03-06T21:05:04.000Z
|
2022-03-31T13:38:37.000Z
|
weaver/wps_restapi/quotation/quotes.py
|
crim-ca/weaver
|
107fec5e19f20b77061b9405a764da911d2db8a2
|
[
"Apache-2.0"
] | 5
|
2019-03-15T01:38:28.000Z
|
2021-11-11T15:38:43.000Z
|
import logging
import random
from datetime import timedelta
from typing import TYPE_CHECKING
from duration import to_iso8601
from pyramid.httpexceptions import HTTPBadRequest, HTTPCreated, HTTPNotFound, HTTPOk
from weaver import sort
from weaver.config import WEAVER_CONFIGURATION_ADES, WEAVER_CONFIGURATION_EMS, get_weaver_configuration
from weaver.database import get_db
from weaver.datatype import Bill, Quote
from weaver.exceptions import ProcessNotFound, QuoteNotFound, log_unhandled_exceptions
from weaver.formats import OUTPUT_FORMAT_JSON
from weaver.processes.types import PROCESS_APPLICATION, PROCESS_WORKFLOW
from weaver.processes.wps_package import get_package_workflow_steps, get_process_location
from weaver.store.base import StoreBills, StoreQuotes
from weaver.utils import get_settings, get_weaver_url
from weaver.wps_restapi import swagger_definitions as sd
from weaver.wps_restapi.processes.processes import submit_local_job
if TYPE_CHECKING:
from weaver.datatype import Process
from weaver.typedefs import JSON
LOGGER = logging.getLogger(__name__)
def process_quote_estimator(process): # noqa: E811
# type: (Process) -> JSON
"""
Simulate quote parameters for the process execution.
:param process: instance of :class:`weaver.datatype.Process` for which to evaluate the quote.
:return: dict of {price, currency, estimatedTime} values for the process quote.
"""
# TODO: replace by some fancy ml technique or something?
price = random.uniform(0, 10) # nosec
currency = "CAD"
estimated_time = to_iso8601(timedelta(minutes=random.uniform(5, 60))) # nosec
return {"price": price, "currency": currency, "estimatedTime": estimated_time}
@sd.process_quotes_service.post(tags=[sd.TAG_BILL_QUOTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.PostProcessQuoteRequestEndpoint(), response_schemas=sd.post_quotes_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def request_quote(request):
"""
Request a quotation for a process.
"""
settings = get_settings(request)
weaver_config = get_weaver_configuration(settings)
if weaver_config not in [WEAVER_CONFIGURATION_ADES, WEAVER_CONFIGURATION_EMS]:
raise HTTPBadRequest("Unsupported request for configuration '{}'.".format(weaver_config))
process_id = request.matchdict.get("process_id")
process_store = get_db(request).get_store("processes")
try:
process = process_store.fetch_by_id(process_id)
except ProcessNotFound:
raise HTTPNotFound("Could not find process with specified 'process_id'.")
store = get_db(request).get_store(StoreQuotes)
process_url = get_process_location(process_id, data_source=get_weaver_url(settings))
process_type = process.type
process_params = dict()
for param in ["inputs", "outputs", "mode", "response"]:
if param in request.json:
process_params[param] = request.json.pop(param)
process_quote_info = process_quote_estimator(process)
process_quote_info.update({
"process": process_id,
"processParameters": process_params,
"location": process_url,
"user": str(request.authenticated_userid)
})
# loop workflow sub-process steps to get individual quotes
if process_type == PROCESS_WORKFLOW and weaver_config == WEAVER_CONFIGURATION_EMS:
workflow_quotes = list()
for step in get_package_workflow_steps(process_url):
# retrieve quote from provider ADES
# TODO: data source mapping
process_step_url = get_process_location(step["reference"])
process_quote_url = "{}/quotations".format(process_step_url)
subreq = request.copy()
subreq.path_info = process_quote_url
resp_json = request.invoke_subrequest(subreq).json()
quote_json = resp_json["quote"]
quote = store.save_quote(Quote(**quote_json))
workflow_quotes.append(quote.id)
process_quote_info.update({"steps": workflow_quotes})
quote = store.save_quote(Quote(**process_quote_info))
return HTTPCreated(json={"quote": quote.json()})
# single application quotes (ADES or EMS)
elif process_type == PROCESS_APPLICATION:
quote = store.save_quote(Quote(**process_quote_info))
quote_json = quote.json()
quote_json.pop("steps", None)
return HTTPCreated(json={"quote": quote_json})
# error if not handled up to this point
raise HTTPBadRequest("Unsupported quoting process type '{0}' on '{1}'.".format(process_type, weaver_config))
@sd.process_quotes_service.get(tags=[sd.TAG_BILL_QUOTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.ProcessQuotesEndpoint(), response_schemas=sd.get_quote_list_responses)
@sd.quotes_service.get(tags=[sd.TAG_BILL_QUOTE], renderer=OUTPUT_FORMAT_JSON,
schema=sd.QuotesEndpoint(), response_schemas=sd.get_quote_list_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def get_quote_list(request):
"""
Get list of quotes IDs.
"""
page = int(request.params.get("page", "0"))
limit = int(request.params.get("limit", "10"))
filters = {
"process_id": request.params.get("process", None) or request.matchdict.get("process_id", None),
"page": page,
"limit": limit,
"sort": request.params.get("sort", sort.SORT_CREATED),
}
store = get_db(request).get_store(StoreQuotes)
items, count = store.find_quotes(**filters)
return HTTPOk(json={
"count": count,
"page": page,
"limit": limit,
"quotes": [quote.id for quote in items]
})
@sd.process_quote_service.get(tags=[sd.TAG_BILL_QUOTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.ProcessQuoteEndpoint(), response_schemas=sd.get_quote_responses)
@sd.quote_service.get(tags=[sd.TAG_BILL_QUOTE], renderer=OUTPUT_FORMAT_JSON,
schema=sd.QuoteEndpoint(), response_schemas=sd.get_quote_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def get_quote_info(request):
"""
Get quote information.
"""
quote_id = request.matchdict.get("quote_id")
store = get_db(request).get_store(StoreQuotes)
try:
quote = store.fetch_by_id(quote_id)
except QuoteNotFound:
raise HTTPNotFound("Could not find quote with specified 'quote_id'.")
return HTTPOk(json={"quote": quote.json()})
@sd.process_quote_service.post(tags=[sd.TAG_BILL_QUOTE, sd.TAG_EXECUTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.PostProcessQuote(), response_schemas=sd.post_quote_responses)
@sd.quote_service.post(tags=[sd.TAG_BILL_QUOTE, sd.TAG_EXECUTE], renderer=OUTPUT_FORMAT_JSON,
schema=sd.PostQuote(), response_schemas=sd.post_quote_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def execute_quote(request):
"""
Execute a quoted process.
"""
quote_info = get_quote_info(request).json["quote"]
quote_bill_info = {
"quote": quote_info.get("id"),
"price": quote_info.get("price"),
"currency": quote_info.get("currency")
}
job_resp = submit_local_job(request)
job_json = job_resp.json
job_id = job_json.get("jobID")
user_id = str(request.authenticated_userid)
store = get_db(request).get_store(StoreBills)
bill = store.save_bill(Bill(user=user_id, job=job_id, **quote_bill_info))
job_json.update({"bill": bill.id})
return HTTPCreated(json=job_json)
| 43.810056
| 119
| 0.718822
| 968
| 7,842
| 5.568182
| 0.206612
| 0.025974
| 0.023748
| 0.016883
| 0.311317
| 0.284972
| 0.219666
| 0.200557
| 0.167532
| 0.156957
| 0
| 0.003418
| 0.179291
| 7,842
| 178
| 120
| 44.05618
| 0.834058
| 0.080719
| 0
| 0.130769
| 0
| 0
| 0.067594
| 0
| 0
| 0
| 0
| 0.011236
| 0
| 1
| 0.038462
| false
| 0
| 0.153846
| 0
| 0.238462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a31701fc7c063904134f212988d1c0c79559f82
| 6,722
|
py
|
Python
|
pysnmp/CISCO-VSI-CONTROLLER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/CISCO-VSI-CONTROLLER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/CISCO-VSI-CONTROLLER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module CISCO-VSI-CONTROLLER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-VSI-CONTROLLER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:03:33 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
ObjectIdentity, NotificationType, Gauge32, Bits, Unsigned32, IpAddress, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, Counter32, Counter64, iso, Integer32, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "NotificationType", "Gauge32", "Bits", "Unsigned32", "IpAddress", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "Counter32", "Counter64", "iso", "Integer32", "TimeTicks")
TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "DisplayString")
ciscoVSIControllerMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 141))
if mibBuilder.loadTexts: ciscoVSIControllerMIB.setLastUpdated('9906080000Z')
if mibBuilder.loadTexts: ciscoVSIControllerMIB.setOrganization('Cisco Systems, Inc.')
class CvcControllerShelfLocation(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("internal", 1), ("external", 2))
class CvcControllerType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("par", 1), ("pnni", 2), ("lsc", 3))
cvcMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 141, 1))
cvcConfController = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1))
cvcConfTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1), )
if mibBuilder.loadTexts: cvcConfTable.setStatus('current')
cvcConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1), ).setIndexNames((0, "CISCO-VSI-CONTROLLER-MIB", "cvcConfControllerID"))
if mibBuilder.loadTexts: cvcConfEntry.setStatus('current')
cvcConfControllerID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: cvcConfControllerID.setStatus('current')
cvcConfControllerType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 2), CvcControllerType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cvcConfControllerType.setStatus('current')
cvcConfControllerShelfLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 3), CvcControllerShelfLocation()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cvcConfControllerShelfLocation.setStatus('current')
cvcConfControllerLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cvcConfControllerLocation.setStatus('current')
cvcConfControllerName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 5), DisplayString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cvcConfControllerName.setStatus('current')
cvcConfVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cvcConfVpi.setStatus('current')
cvcConfVci = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cvcConfVci.setStatus('current')
cvcConfRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 141, 1, 1, 1, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cvcConfRowStatus.setStatus('current')
cvcMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 141, 3))
cvcMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 141, 3, 1))
cvcMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 141, 3, 2))
cvcMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 141, 3, 1, 1)).setObjects(("CISCO-VSI-CONTROLLER-MIB", "cvcConfGroup"), ("CISCO-VSI-CONTROLLER-MIB", "cvcConfGroupExternal"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cvcMIBCompliance = cvcMIBCompliance.setStatus('current')
cvcConfGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 141, 3, 2, 1)).setObjects(("CISCO-VSI-CONTROLLER-MIB", "cvcConfControllerType"), ("CISCO-VSI-CONTROLLER-MIB", "cvcConfControllerShelfLocation"), ("CISCO-VSI-CONTROLLER-MIB", "cvcConfControllerLocation"), ("CISCO-VSI-CONTROLLER-MIB", "cvcConfControllerName"), ("CISCO-VSI-CONTROLLER-MIB", "cvcConfRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cvcConfGroup = cvcConfGroup.setStatus('current')
cvcConfGroupExternal = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 141, 3, 2, 2)).setObjects(("CISCO-VSI-CONTROLLER-MIB", "cvcConfVpi"), ("CISCO-VSI-CONTROLLER-MIB", "cvcConfVci"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cvcConfGroupExternal = cvcConfGroupExternal.setStatus('current')
mibBuilder.exportSymbols("CISCO-VSI-CONTROLLER-MIB", cvcConfTable=cvcConfTable, cvcMIBGroups=cvcMIBGroups, cvcConfControllerType=cvcConfControllerType, cvcConfVpi=cvcConfVpi, CvcControllerShelfLocation=CvcControllerShelfLocation, cvcConfControllerLocation=cvcConfControllerLocation, cvcConfController=cvcConfController, cvcConfControllerName=cvcConfControllerName, PYSNMP_MODULE_ID=ciscoVSIControllerMIB, cvcConfControllerID=cvcConfControllerID, cvcConfGroupExternal=cvcConfGroupExternal, cvcMIBCompliance=cvcMIBCompliance, cvcConfEntry=cvcConfEntry, ciscoVSIControllerMIB=ciscoVSIControllerMIB, cvcConfControllerShelfLocation=cvcConfControllerShelfLocation, cvcConfRowStatus=cvcConfRowStatus, cvcConfGroup=cvcConfGroup, CvcControllerType=CvcControllerType, cvcConfVci=cvcConfVci, cvcMIBObjects=cvcMIBObjects, cvcMIBCompliances=cvcMIBCompliances, cvcMIBConformance=cvcMIBConformance)
| 105.03125
| 883
| 0.759298
| 739
| 6,722
| 6.903924
| 0.197564
| 0.012544
| 0.01176
| 0.014896
| 0.356919
| 0.253038
| 0.217758
| 0.217758
| 0.217758
| 0.215602
| 0
| 0.070471
| 0.090152
| 6,722
| 63
| 884
| 106.698413
| 0.763734
| 0.050878
| 0
| 0.09434
| 0
| 0
| 0.1849
| 0.063569
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.132075
| 0
| 0.283019
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a415615b9b2bc4e4bdf10ab3d417314a169e277
| 44,836
|
py
|
Python
|
phi/math/backend/_backend.py
|
marc-gav/PhiFlow
|
b6186fd1503d040997b52d49aa18cd875267c27e
|
[
"MIT"
] | null | null | null |
phi/math/backend/_backend.py
|
marc-gav/PhiFlow
|
b6186fd1503d040997b52d49aa18cd875267c27e
|
[
"MIT"
] | null | null | null |
phi/math/backend/_backend.py
|
marc-gav/PhiFlow
|
b6186fd1503d040997b52d49aa18cd875267c27e
|
[
"MIT"
] | null | null | null |
from collections import namedtuple
from contextlib import contextmanager
from threading import Barrier
from typing import List, Callable
import numpy
from ._dtype import DType, combine_types
SolveResult = namedtuple('SolveResult', [
'method', 'x', 'residual', 'iterations', 'function_evaluations', 'converged', 'diverged', 'message',
])
class ComputeDevice:
"""
A physical device that can be selected to perform backend computations.
"""
def __init__(self, backend: 'Backend', name: str, device_type: str, memory: int, processor_count: int, description: str, ref=None):
self.name: str = name
""" Name of the compute device. CPUs are typically called `'CPU'`. """
self.device_type: str = device_type
""" Type of device such as `'CPU'`, `'GPU'` or `'TPU'`. """
self.memory: int = memory
""" Maximum memory of the device that can be allocated (in bytes). -1 for n/a. """
self.processor_count: int = processor_count
""" Number of CPU cores or GPU multiprocessors. -1 for n/a. """
self.description: str = description
""" Further information about the device such as driver version. """
self.ref = ref
""" (Optional) Reference to the internal device representation. """
self.backend: 'Backend' = backend
""" Backend that this device belongs to. Different backends represent the same device with different objects. """
def __repr__(self):
mem = f"{(self.memory / 1024 ** 2)} MB" if self.memory > 0 else "memory: n/a"
pro = f"{self.processor_count} processors" if self.processor_count > 0 else "processors: n/a"
descr = self.description.replace('\n', ' ')
if len(descr) > 30:
descr = descr[:28] + "..."
return f"'{self.name}' ({self.device_type}) | {mem} | {pro} | {descr}"
class Backend:
def __init__(self, name: str, default_device: ComputeDevice):
"""
Backends delegate low-level operations to a compute library or emulate them.
The methods of `Backend` form a comprehensive list of available operations.
To support a compute library, subclass `Backend` and register it by adding it to `BACKENDS`.
Args:
name: Human-readable string
default_device: `ComputeDevice` being used by default
"""
self._name = name
self._default_device = default_device
def __enter__(self):
_DEFAULT.append(self)
def __exit__(self, exc_type, exc_val, exc_tb):
_DEFAULT.pop(-1)
@property
def name(self) -> str:
return self._name
def supports(self, feature: str or Callable) -> bool:
"""
Tests if this backend supports the given feature.
Features correspond to a method of this backend that must be implemented if the feature is supported.
Possible features:
* `sparse_tensor`
* `gradients
Args:
feature: `str` or unbound Backend method, e.g. `Backend.sparse_tensor`
Returns:
Whether the feature is supported.
"""
feature = feature if isinstance(feature, str) else feature.__name__
if not hasattr(Backend, feature):
raise ValueError(f"Not a valid feature: '{feature}'")
backend_fun = getattr(Backend, feature)
impl_fun = getattr(self.__class__, feature)
return impl_fun is not backend_fun
def prefers_channels_last(self) -> bool:
raise NotImplementedError()
@property
def precision(self) -> int:
""" Short for math.backend.get_precision() """
return get_precision()
@property
def float_type(self) -> DType:
return DType(float, self.precision)
@property
def as_registered(self) -> 'Backend':
from phi.math.backend import BACKENDS
for backend in BACKENDS:
if self.name in backend.name:
return backend
raise RuntimeError(f"Backend '{self}' is not visible.")
@property
def complex_type(self) -> DType:
return DType(complex, max(64, self.precision))
def combine_types(self, *dtypes: DType) -> DType:
return combine_types(*dtypes, fp_precision=self.precision)
def auto_cast(self, *tensors) -> list:
"""
Determins the appropriate values type resulting from operations involving the tensors as input.
This method is called by the default implementations of basic operators.
Backends can override this method to prevent unnecessary casting.
Args:
*tensors: tensors to cast and to consider when determining the common data type
Returns:
tensors cast to a common data type
"""
dtypes = [self.dtype(t) for t in tensors]
result_type = self.combine_types(*dtypes)
if result_type.kind in (int, float, complex, bool):
tensors = [self.cast(t, result_type) for t in tensors]
return tensors
def __str__(self):
return self.name
def __repr__(self):
return self.name
def list_devices(self, device_type: str or None = None) -> List[ComputeDevice]:
"""
Fetches information about all available compute devices this backend can use.
Implementations:
* NumPy: [`os.cpu_count`](https://docs.python.org/3/library/os.html#os.cpu_count)
* PyTorch: [`torch.cuda.get_device_properties`](https://pytorch.org/docs/stable/cuda.html#torch.cuda.get_device_properties)
* TensorFlow: `tensorflow.python.client.device_lib.list_local_devices`
* Jax: [`jax.devices`](https://jax.readthedocs.io/en/latest/jax.html#jax.devices)
Args:
device_type: (optional) Return only devices of this type, e.g. `'GPU'` or `'CPU'`. See `ComputeDevice.device_type`.
Returns:
`list` of all currently available devices.
"""
raise NotImplementedError()
def get_default_device(self) -> ComputeDevice:
return self._default_device
def set_default_device(self, device: ComputeDevice or str):
if isinstance(device, str):
devices = self.list_devices(device)
assert len(devices) >= 1, f"{self.name}: Cannot select '{device} because no device of this type is available."
device = devices[0]
self._default_device = device
def seed(self, seed: int):
raise NotImplementedError()
def is_tensor(self, x, only_native=False):
"""
An object is considered a native tensor by a backend if no internal conversion is required by backend methods.
An object is considered a tensor (nativer or otherwise) by a backend if it is not a struct (e.g. tuple, list) and all methods of the backend accept it as a tensor argument.
Args:
x: object to check
only_native: If True, only accepts true native tensor representations, not Python numbers or others that are also supported as tensors (Default value = False)
Returns:
bool: whether `x` is considered a tensor by this backend
"""
raise NotImplementedError()
def as_tensor(self, x, convert_external=True):
"""
Converts a tensor-like object to the native tensor representation of this backend.
If x is a native tensor of this backend, it is returned without modification.
If x is a Python number (numbers.Number instance), `convert_numbers` decides whether to convert it unless the backend cannot handle Python numbers.
*Note:* There may be objects that are considered tensors by this backend but are not native and thus, will be converted by this method.
Args:
x: tensor-like, e.g. list, tuple, Python number, tensor
convert_external: if False and `x` is a Python number that is understood by this backend, this method returns the number as-is. This can help prevent type clashes like int32 vs int64. (Default value = True)
Returns:
tensor representation of `x`
"""
raise NotImplementedError()
def is_available(self, tensor) -> bool:
"""
Tests if the value of the tensor is known and can be read at this point.
If true, `numpy(tensor)` must return a valid NumPy representation of the value.
Tensors are typically available when the backend operates in eager mode.
Args:
tensor: backend-compatible tensor
Returns:
bool
"""
raise NotImplementedError()
def numpy(self, tensor) -> numpy.ndarray:
"""
Returns a NumPy representation of the given tensor.
If `tensor` is already a NumPy array, it is returned without modification.
This method raises an error if the value of the tensor is not known at this point, e.g. because it represents a node in a graph.
Use `is_available(tensor)` to check if the value can be represented as a NumPy array.
Args:
tensor: backend-compatible tensor
Returns:
NumPy representation of the values stored in the tensor
"""
raise NotImplementedError()
def to_dlpack(self, tensor):
raise NotImplementedError()
def from_dlpack(self, capsule):
raise NotImplementedError()
def copy(self, tensor, only_mutable=False):
raise NotImplementedError()
def call(self, f: Callable, *args, name=None):
"""
Calls `f(*args)` and returns the result.
This method may be used to register internal calls with the profiler.
Usage:
choose_backend(key).call(custom_function, *args)
"""
return f(*args)
def block_until_ready(self, values):
pass
def jit_compile(self, f: Callable) -> Callable:
return NotImplemented
def functional_gradient(self, f, wrt: tuple or list, get_output: bool):
raise NotImplementedError(self)
def custom_gradient(self, f: Callable, gradient: Callable) -> Callable:
"""
Creates a function based on `f` that uses a custom gradient for backprop.
Args:
f: Forward function.
gradient: Function for backprop. Will be called as `gradient(*d_out)` to compute the gradient of `f`.
Returns:
Function with similar signature and return values as `f`. However, the returned function does not support keyword arguments.
"""
return NotImplemented
def jit_compile_grad(self, f, wrt: tuple or list, get_output: bool):
raise NotImplementedError()
def transpose(self, tensor, axes):
raise NotImplementedError()
def random_uniform(self, shape):
""" Float tensor of selected precision containing random values in the range [0, 1) """
raise NotImplementedError(self)
def random_normal(self, shape):
""" Float tensor of selected precision containing random values sampled from a normal distribution with mean 0 and std 1. """
raise NotImplementedError(self)
def stack(self, values, axis=0):
raise NotImplementedError(self)
def concat(self, values, axis):
raise NotImplementedError(self)
def pad(self, value, pad_width, mode: str = 'constant', constant_values=0):
"""
Pad a tensor with values as specified by `mode` and `constant_values`.
If the mode is not supported, returns NotImplemented.
Args:
value: tensor
pad_width: 2D tensor specifying the number of values padded to the edges of each axis in the form [[axis 0 lower, axis 0 upper], ...] including batch and component axes.
mode: constant', 'boundary', 'periodic', 'symmetric', 'reflect'
constant_values: used for out-of-bounds points if mode='constant' (Default value = 0)
mode: str: (Default value = 'constant')
Returns:
padded tensor or NotImplemented
"""
raise NotImplementedError(self)
def reshape(self, value, shape):
raise NotImplementedError(self)
def flip(self, value, axes: tuple or list):
slices = tuple(slice(None, None, -1 if i in axes else None) for i in range(self.ndims(value)))
return value[slices]
def sum(self, value, axis=None, keepdims=False):
raise NotImplementedError(self)
def prod(self, value, axis=None):
raise NotImplementedError(self)
def divide_no_nan(self, x, y):
"""
Computes x/y but returns 0 if y=0.
Args:
x:
y:
Returns:
"""
raise NotImplementedError(self)
def where(self, condition, x=None, y=None):
raise NotImplementedError(self)
def nonzero(self, values):
"""
Args:
values: Tensor with only spatial dimensions
Returns:
non-zero multi-indices as tensor of shape (nnz, vector)
"""
raise NotImplementedError(self)
def mean(self, value, axis=None, keepdims=False):
raise NotImplementedError(self)
def range(self, start, limit=None, delta=1, dtype: DType = DType(int, 32)):
raise NotImplementedError(self)
def zeros(self, shape, dtype: DType = None):
raise NotImplementedError(self)
def zeros_like(self, tensor):
raise NotImplementedError(self)
def ones(self, shape, dtype: DType = None):
raise NotImplementedError(self)
def ones_like(self, tensor):
raise NotImplementedError(self)
def meshgrid(self, *coordinates):
raise NotImplementedError(self)
def linspace(self, start, stop, number):
raise NotImplementedError(self)
def tensordot(self, a, a_axes: tuple or list, b, b_axes: tuple or list):
""" Multiply-sum-reduce a_axes of a with b_axes of b. """
raise NotImplementedError(self)
def matmul(self, A, b):
raise NotImplementedError(self)
def einsum(self, equation, *tensors):
raise NotImplementedError(self)
def while_loop(self, loop: Callable, values: tuple):
"""
```python
while any(values[0]):
values = loop(*values)
return values
```
This operation does not support backpropagation.
Args:
loop: Loop function, must return a `tuple` with entries equal to `values` in shape and data type.
values: Initial values of loop variables.
Returns:
Loop variables upon loop completion.
"""
raise NotImplementedError(self)
def abs(self, x):
raise NotImplementedError(self)
def sign(self, x):
raise NotImplementedError(self)
def round(self, x):
raise NotImplementedError(self)
def ceil(self, x):
raise NotImplementedError(self)
def floor(self, x):
raise NotImplementedError(self)
def max(self, x, axis=None, keepdims=False):
raise NotImplementedError(self)
def min(self, x, axis=None, keepdims=False):
raise NotImplementedError(self)
def maximum(self, a, b):
raise NotImplementedError(self)
def minimum(self, a, b):
raise NotImplementedError(self)
def clip(self, x, minimum, maximum):
raise NotImplementedError(self)
def sqrt(self, x):
raise NotImplementedError(self)
def exp(self, x):
raise NotImplementedError(self)
def conv(self, value, kernel, zero_padding=True):
"""
Convolve value with kernel.
Depending on the tensor rank, the convolution is either 1D (rank=3), 2D (rank=4) or 3D (rank=5).
Higher dimensions may not be supported.
Args:
value: tensor of shape (batch_size, in_channel, spatial...)
kernel: tensor of shape (batch_size or 1, out_channel, in_channel, spatial...)
zero_padding: If True, pads the edges of `value` with zeros so that the result has the same shape as `value`.
Returns:
Convolution result as tensor of shape (batch_size, out_channel, spatial...)
"""
raise NotImplementedError(self)
def expand_dims(self, a, axis=0, number=1):
raise NotImplementedError(self)
def shape(self, tensor):
raise NotImplementedError(self)
def staticshape(self, tensor):
raise NotImplementedError(self)
def cast(self, x, dtype: DType):
raise NotImplementedError(self)
def to_float(self, x):
"""
Converts a tensor to floating point values with precision equal to the currently set default precision.
See Also:
`Backend.precision()`.
If `x` is mutable and of the correct floating type, returns a copy of `x`.
To convert float tensors to the backend precision but leave non-float tensors untouched, use `Backend.as_tensor()`.
Args:
x: tensor of bool, int or float
Returns:
Values of `x` as float tensor
"""
return self.cast(x, self.float_type)
def to_int32(self, x):
return self.cast(x, DType(int, 32))
def to_int64(self, x):
return self.cast(x, DType(int, 64))
def to_complex(self, x):
return self.cast(x, DType(complex, max(64, min(self.precision * 2, 128))))
def batched_gather_nd(self, values, indices):
"""
Gathers values from the tensor `values` at locations `indices`.
The first dimension of `values` and `indices` is the batch dimension which must be either equal for both or one for either.
Args:
values: tensor of shape (batch, spatial..., channel)
indices: int tensor of shape (batch, any..., multi_index) where the size of multi_index is values.rank - 2.
Returns:
Gathered values as tensor of shape (batch, any..., channel)
"""
raise NotImplementedError(self)
def flatten(self, x):
return self.reshape(x, (-1,))
def std(self, x, axis=None, keepdims=False):
raise NotImplementedError(self)
def boolean_mask(self, x, mask, axis=0):
"""
Args:
x: tensor with any number of dimensions
mask: 1D mask tensor
axis: Axis index >= 0
"""
raise NotImplementedError(self)
def isfinite(self, x):
raise NotImplementedError(self)
def scatter(self, base_grid, indices, values, mode: str):
"""
Depending on `mode`, performs scatter_update or scatter_add.
Args:
base_grid: Tensor into which scatter values are inserted at indices. Tensor of shape (batch_size, spatial..., channels)
indices: Tensor of shape (batch_size or 1, update_count, index_vector)
values: Values to scatter at indices. Tensor of shape (batch_size or 1, update_count or 1, channels or 1)
mode: One of ('update', 'add')
Returns:
Copy of base_grid with values at `indices` updated by `values`.
"""
raise NotImplementedError(self)
def any(self, boolean_tensor, axis=None, keepdims=False):
raise NotImplementedError(self)
def all(self, boolean_tensor, axis=None, keepdims=False):
raise NotImplementedError(self)
def fft(self, x):
"""
Computes the n-dimensional FFT along all but the first and last dimensions.
Args:
x: tensor of dimension 3 or higher
Returns:
"""
raise NotImplementedError(self)
def ifft(self, k):
"""
Computes the n-dimensional inverse FFT along all but the first and last dimensions.
Args:
k: tensor of dimension 3 or higher
Returns:
"""
raise NotImplementedError(self)
def imag(self, x):
raise NotImplementedError(self)
def real(self, x):
raise NotImplementedError(self)
def sin(self, x):
raise NotImplementedError(self)
def cos(self, x):
raise NotImplementedError(self)
def tan(self, x):
raise NotImplementedError(self)
def log(self, x):
""" Natural logarithm """
raise NotImplementedError(self)
def log2(self, x):
raise NotImplementedError(self)
def log10(self, x):
raise NotImplementedError(self)
def dtype(self, array) -> DType:
raise NotImplementedError(self)
def tile(self, value, multiples):
"""
Repeats the tensor along each axis the number of times given by multiples.
If `multiples` has more dimensions than `value`, these dimensions are added to `value` as outer dimensions.
Args:
value: tensor
multiples: tuple or list of integers
Returns:
tile tensor
"""
raise NotImplementedError(self)
def sparse_tensor(self, indices, values, shape):
"""
Optional features.
Args:
indices: tuple/list matching the dimensions (pair for matrix)
values: param shape:
shape:
Returns:
"""
raise NotImplementedError(self)
def coordinates(self, tensor):
"""
Returns the coordinates and values of a tensor.
Args:
tensor: Sparse tensor
Returns:
coordinates: `tuple` of tensor holding the coordinate vectors, i.e. (row, col) for matrices.
indices: Tensor holding the corresponding values
"""
raise NotImplementedError(self)
def minimize(self, method: str, f, x0, atol, max_iter, trj: bool):
from scipy.optimize import OptimizeResult, minimize
from threading import Thread
assert self.supports(Backend.functional_gradient)
assert len(self.staticshape(x0)) == 2 # (batch, parameters)
batch_size = self.staticshape(x0)[0]
fg = self.functional_gradient(f, [0], get_output=True)
method_description = f"SciPy {method} with {self.name}"
iterations = [0] * batch_size
function_evaluations = [0] * batch_size
xs = [None] * batch_size
final_losses = [None] * batch_size
converged = [False] * batch_size
diverged = [False] * batch_size
messages = [""] * batch_size
f_inputs = [None] * batch_size
f_b_losses = None
f_b_losses_np = None
f_grad_np = None
f_input_available = Barrier(batch_size + 1)
f_output_available = Barrier(batch_size + 1)
finished = [False] * batch_size
all_finished = False
trajectories = [[] for _ in range(batch_size)] if trj else None
threads = []
for b in range(batch_size):
def b_thread(b=b):
recent_b_losses = []
def b_fun(x: numpy.ndarray):
function_evaluations[b] += 1
f_inputs[b] = self.as_tensor(x, convert_external=True)
f_input_available.wait()
f_output_available.wait()
recent_b_losses.append(f_b_losses[b])
if final_losses[b] is None: # first evaluation
final_losses[b] = f_b_losses[b]
if trajectories is not None:
trajectories[b].append(SolveResult(method_description, x0[b], f_b_losses[b], 0, 1, False, False, ""))
return f_b_losses_np[b], f_grad_np[b]
def callback(x, *args): # L-BFGS-B only passes x but the documentation says (x, state)
iterations[b] += 1
loss = min(recent_b_losses)
recent_b_losses.clear()
final_losses[b] = loss
if trajectories is not None:
trajectories[b].append(SolveResult(method_description, x, loss, iterations[b], function_evaluations[b], False, False, ""))
res = minimize(fun=b_fun, x0=x0[b], jac=True, method=method, tol=atol[b], options={'maxiter': max_iter[b]}, callback=callback)
assert isinstance(res, OptimizeResult)
# res.nit, res.nfev
xs[b] = res.x
converged[b] = res.success
diverged[b] = res.status not in (0, 1) # 0=success
messages[b] = res.message
finished[b] = True
while not all_finished:
f_input_available.wait()
f_output_available.wait()
b_thread = Thread(target=b_thread)
threads.append(b_thread)
b_thread.start()
while True:
f_input_available.wait()
if all(finished):
all_finished = True
f_output_available.wait()
break
_, f_b_losses, f_grad = fg(self.stack(f_inputs))
f_b_losses_np = self.numpy(f_b_losses).astype(numpy.float64)
f_grad_np = self.numpy(f_grad).astype(numpy.float64)
f_output_available.wait()
for b_thread in threads:
b_thread.join() # make sure threads exit correctly
if trj:
max_trajectory_length = max([len(t) for t in trajectories])
last_points = [SolveResult(method_description, xs[b], final_losses[b], iterations[b], function_evaluations[b], converged[b], diverged[b], "") for b in range(batch_size)]
trajectories = [t[:-1] + [last_point] * (max_trajectory_length - len(t) + 1) for t, last_point in zip(trajectories, last_points)]
trajectory = []
for states in zip(*trajectories):
x = self.stack([self.to_float(state.x) for state in states])
residual = self.stack([state.residual for state in states])
iterations = [state.iterations for state in states]
function_evaluations = [state.function_evaluations for state in states]
converged = [state.converged for state in states]
diverged = [state.diverged for state in states]
trajectory.append(SolveResult(method_description, x, residual, iterations, function_evaluations, converged, diverged, messages))
return trajectory
else:
x = self.stack(xs)
residual = self.stack(final_losses)
return SolveResult(method_description, x, residual, iterations, function_evaluations, converged, diverged, messages)
def linear_solve(self, method: str, lin, y, x0, rtol, atol, max_iter, trj: bool) -> SolveResult or List[SolveResult]:
"""
Solve the system of linear equations A · x = y.
This method need not provide a gradient for the operation.
Args:
method: Which algorithm to use. One of `('auto', 'CG', 'CG-adaptive')`.
lin: Linear operation. One of
* sparse/dense matrix valid for all instances
* tuple/list of sparse/dense matrices for varying matrices along batch, must have the same nonzero locations.
* linear function A(x), must be called on all instances in parallel
y: target result of A * x. 2nd order tensor (batch, vector) or list of vectors.
x0: Initial guess of size (batch, parameters)
rtol: Relative tolerance of size (batch,)
atol: Absolute tolerance of size (batch,)
max_iter: Maximum number of iterations of size (batch,)
trj: Whether to record and return the optimization trajectory as a `List[SolveResult]`.
Returns:
result: `SolveResult` or `List[SolveResult]`, depending on `trj`.
"""
if method == 'auto':
return self.conjugate_gradient_adaptive(lin, y, x0, rtol, atol, max_iter, trj)
elif method == 'CG':
return self.conjugate_gradient(lin, y, x0, rtol, atol, max_iter, trj)
elif method == 'CG-adaptive':
return self.conjugate_gradient_adaptive(lin, y, x0, rtol, atol, max_iter, trj)
else:
raise NotImplementedError(f"Method '{method}' not supported for linear solve.")
def conjugate_gradient(self, lin, y, x0, rtol, atol, max_iter, trj: bool) -> SolveResult or List[SolveResult]:
""" Standard conjugate gradient algorithm. Signature matches to `Backend.linear_solve()`. """
# Based on "An Introduction to the Conjugate Gradient Method Without the Agonizing Pain" by Jonathan Richard Shewchuk
# symbols: dx=d, dy=q, step_size=alpha, residual_squared=delta, residual=r, y=b
method = f"Φ-Flow CG ({self.name})"
y = self.to_float(y)
x0 = self.copy(self.to_float(x0), only_mutable=True)
batch_size = self.staticshape(y)[0]
tolerance_sq = self.maximum(rtol ** 2 * self.sum(y ** 2, -1), atol ** 2)
x = x0
dx = residual = y - self.linear(lin, x)
it_counter = 0
iterations = self.zeros([batch_size], DType(int, 32))
function_evaluations = self.ones([batch_size], DType(int, 32))
residual_squared = rsq0 = self.sum(residual ** 2, -1, keepdims=True)
diverged = self.any(~self.isfinite(x), axis=(1,))
converged = self.all(residual_squared <= tolerance_sq, axis=(1,))
trajectory = [SolveResult(method, x, residual, iterations, function_evaluations, converged, diverged, "")] if trj else None
finished = converged | diverged | (iterations >= max_iter); not_finished_1 = self.to_int32(~finished) # ; active = self.to_float(self.expand_dims(not_finished_1, -1))
while ~self.all(finished):
it_counter += 1; iterations += not_finished_1
dy = self.linear(lin, dx); function_evaluations += not_finished_1
dx_dy = self.sum(dx * dy, axis=-1, keepdims=True)
step_size = self.divide_no_nan(residual_squared, dx_dy)
step_size *= self.expand_dims(self.to_float(not_finished_1), -1) # this is not really necessary but ensures batch-independence
x += step_size * dx
if it_counter % 50 == 0:
residual = y - self.linear(lin, x); function_evaluations += 1
else:
residual = residual - step_size * dy # in-place subtraction affects convergence
residual_squared_old = residual_squared
residual_squared = self.sum(residual ** 2, -1, keepdims=True)
dx = residual + self.divide_no_nan(residual_squared, residual_squared_old) * dx
diverged = self.any(residual_squared / rsq0 > 100, axis=(1,)) & (iterations >= 8)
converged = self.all(residual_squared <= tolerance_sq, axis=(1,))
if trajectory is not None:
trajectory.append(SolveResult(method, x, residual, iterations, function_evaluations, converged, diverged, ""))
x = self.copy(x)
iterations = self.copy(iterations)
finished = converged | diverged | (iterations >= max_iter); not_finished_1 = self.to_int32(~finished) # ; active = self.to_float(self.expand_dims(not_finished_1, -1))
return trajectory if trj else SolveResult(method, x, residual, iterations, function_evaluations, converged, diverged, "")
def conjugate_gradient_adaptive(self, lin, y, x0, rtol, atol, max_iter, trj: bool) -> SolveResult or List[SolveResult]:
""" Conjugate gradient algorithm with adaptive step size. Signature matches to `Backend.linear_solve()`. """
# Based on the variant described in "Methods of Conjugate Gradients for Solving Linear Systems" by Magnus R. Hestenes and Eduard Stiefel
# https://nvlpubs.nist.gov/nistpubs/jres/049/jresv49n6p409_A1b.pdf
method = f"Φ-Flow CG-adaptive ({self.name})"
y = self.to_float(y)
x0 = self.copy(self.to_float(x0), only_mutable=True)
batch_size = self.staticshape(y)[0]
tolerance_sq = self.maximum(rtol ** 2 * self.sum(y ** 2, -1), atol ** 2)
x = x0
dx = residual = y - self.linear(lin, x)
dy = self.linear(lin, dx)
iterations = self.zeros([batch_size], DType(int, 32))
function_evaluations = self.ones([batch_size], DType(int, 32))
residual_squared = rsq0 = self.sum(residual ** 2, -1, keepdims=True)
diverged = self.any(~self.isfinite(x), axis=(1,))
converged = self.all(residual_squared <= tolerance_sq, axis=(1,))
trajectory = [SolveResult(method, x, residual, iterations, function_evaluations, converged, diverged, "")] if trj else None
continue_ = ~converged & ~diverged & (iterations < max_iter)
def loop(continue_, it_counter, x, dx, dy, residual, iterations, function_evaluations, _converged, _diverged):
continue_1 = self.to_int32(continue_)
it_counter += 1
iterations += continue_1
dx_dy = self.sum(dx * dy, axis=-1, keepdims=True)
step_size = self.divide_no_nan(self.sum(dx * residual, axis=-1, keepdims=True), dx_dy)
step_size *= self.expand_dims(self.to_float(continue_1), -1) # this is not really necessary but ensures batch-independence
x += step_size * dx
# if it_counter % 50 == 0: # Not traceable since Python bool
# residual = y - self.linear(lin, x); function_evaluations += 1
# else:
residual = residual - step_size * dy # in-place subtraction affects convergence
residual_squared = self.sum(residual ** 2, -1, keepdims=True)
dx = residual - self.divide_no_nan(self.sum(residual * dy, axis=-1, keepdims=True) * dx, dx_dy)
dy = self.linear(lin, dx); function_evaluations += continue_1
diverged = self.any(residual_squared / rsq0 > 100, axis=(1,)) & (iterations >= 8)
converged = self.all(residual_squared <= tolerance_sq, axis=(1,))
if trajectory is not None:
trajectory.append(SolveResult(method, x, residual, iterations, function_evaluations, converged, diverged, ""))
x = self.copy(x)
iterations = self.copy(iterations)
continue_ = ~converged & ~diverged & (iterations < max_iter)
return continue_, it_counter, x, dx, dy, residual, iterations, function_evaluations, converged, diverged
_, _, x, _, _, residual, iterations, function_evaluations, converged, diverged =\
self.while_loop(loop, (continue_, 0, x, dx, dy, residual, iterations, function_evaluations, converged, diverged))
return trajectory if trj else SolveResult(method, x, residual, iterations, function_evaluations, converged, diverged, "")
def linear(self, lin, vector):
if callable(lin):
return lin(vector)
elif isinstance(lin, (tuple, list)):
for lin_i in lin:
lin_shape = self.staticshape(lin_i)
assert len(lin_shape) == 2
return self.stack([self.matmul(m, v) for m, v in zip(lin, self.unstack(vector))])
else:
lin_shape = self.staticshape(lin)
assert len(lin_shape) == 2, f"A must be a matrix but got shape {lin_shape}"
return self.matmul(lin, vector)
def gradients(self, y, xs: tuple or list, grad_y) -> tuple:
raise NotImplementedError(self)
def record_gradients(self, xs: tuple or list, persistent=False):
raise NotImplementedError(self)
def stop_gradient(self, value):
raise NotImplementedError(self)
def grid_sample(self, grid, spatial_dims: tuple, coordinates, extrapolation='constant'):
"""
Interpolates a regular grid at the specified coordinates.
Args:
grid: Tensor
spatial_dims: Dimension indices that correspond to coordinate vectors
coordinates: Tensor of floating grid indices.
The last dimension must match `spatial_dims`.
The first grid point of dimension i lies at position 0, the last at values.shape[i]-1.
extrapolation: Values to use for coordinates outside the grid.
One of `('undefined', 'zeros', 'boundary', 'periodic', 'symmetric', 'reflect')`.
Returns:
sampled values with linear interpolation
"""
return NotImplemented
def variable(self, value):
return NotImplemented
def ndims(self, tensor):
return len(self.staticshape(tensor))
def size(self, array):
return self.prod(self.shape(array))
def batch_gather(self, tensor, batches):
if isinstance(batches, int):
batches = [batches]
return tensor[batches, ...]
def unstack(self, tensor, axis=0, keepdims=False) -> tuple:
if axis < 0:
axis += len(tensor.shape)
if axis >= len(tensor.shape) or axis < 0:
raise ValueError("Illegal axis value")
result = []
for slice_idx in range(tensor.shape[axis]):
if keepdims:
component = tensor[tuple([slice(slice_idx, slice_idx + 1) if d == axis else slice(None) for d in range(len(tensor.shape))])]
else:
component = tensor[tuple([slice_idx if d == axis else slice(None) for d in range(len(tensor.shape))])]
result.append(component)
return tuple(result)
def equal(self, x, y):
""" Element-wise equality check """
raise NotImplementedError(self)
def not_equal(self, x, y):
return ~self.equal(x, y)
def greater_than(self, x, y):
x, y = self.auto_cast(x, y)
return x > y
def greater_or_equal(self, x, y):
x, y = self.auto_cast(x, y)
return x >= y
def add(self, a, b):
a, b = self.auto_cast(a, b)
return a + b
def sub(self, a, b):
a, b = self.auto_cast(a, b)
return a - b
def mul(self, a, b):
a, b = self.auto_cast(a, b)
return a * b
def div(self, numerator, denominator):
numerator, denominator = self.auto_cast(numerator, denominator)
return numerator / denominator
def pow(self, base, exp):
base, exp = self.auto_cast(base, exp)
return base ** exp
def mod(self, dividend, divisor):
dividend, divisor = self.auto_cast(dividend, divisor)
return dividend % divisor
def and_(self, a, b):
a, b = self.auto_cast(a, b)
return a & b
def or_(self, a, b):
a, b = self.auto_cast(a, b)
return a | b
def xor(self, a, b):
a, b = self.auto_cast(a, b)
return a ^ b
def floordiv(self, a, b):
a, b = self.auto_cast(a, b)
return a // b
BACKENDS = []
""" Global list of all registered backends. Register a `Backend` by adding it to the list. """
_DEFAULT = [] # [0] = global default, [1:] from 'with' blocks
_PRECISION = [32] # [0] = global precision in bits, [1:] from 'with' blocks
def choose_backend(*values, prefer_default=False) -> Backend:
"""
Selects a suitable backend to handle the given values.
This function is used by most math functions operating on `Tensor` objects to delegate the actual computations.
Args:
*values:
prefer_default: if True, selects the default backend assuming it can handle handle the values, see `default_backend()`.
raise_error: Determines the behavior of this function if no backend can handle the given values.
If True, raises a `NoBackendFound` error, else returns `None`.
Returns:
the selected `Backend`
"""
# --- Default Backend has priority ---
if _is_applicable(_DEFAULT[-1], values) and (prefer_default or _is_specific(_DEFAULT[-1], values)):
return _DEFAULT[-1]
# --- Filter out non-applicable ---
backends = [backend for backend in BACKENDS if _is_applicable(backend, values)]
if len(backends) == 0:
raise NoBackendFound(f"No backend found for types {[type(v).__name__ for v in values]}; registered backends are {BACKENDS}")
# --- Native tensors? ---
for backend in backends:
if _is_specific(backend, values):
return backend
return backends[0]
class NoBackendFound(Exception):
"""
Thrown by `choose_backend` if no backend can handle the given values.
"""
def __init__(self, msg):
Exception.__init__(self, msg)
def default_backend() -> Backend:
"""
The default backend is preferred by `choose_backend()`.
The default backend can be set globally using `set_global_default_backend()` and locally using `with backend:`.
Returns:
current default `Backend`
"""
return _DEFAULT[-1]
def context_backend() -> Backend or None:
"""
Returns the backend set by the inner-most surrounding `with backend:` block.
If called outside a backend context, returns `None`.
Returns:
`Backend` or `None`
"""
return _DEFAULT[-1] if len(_DEFAULT) > 1 else None
def set_global_default_backend(backend: Backend):
"""
Sets the given backend as default.
This setting can be overridden using `with backend:`.
See `default_backend()`, `choose_backend()`.
Args:
backend: `Backend` to set as default
"""
assert isinstance(backend, Backend)
_DEFAULT[0] = backend
def set_global_precision(floating_point_bits: int):
"""
Sets the floating point precision of DYNAMIC_BACKEND which affects all registered backends.
If `floating_point_bits` is an integer, all floating point tensors created henceforth will be of the corresponding data type, float16, float32 or float64.
Operations may also convert floating point values to this precision, even if the input had a different precision.
If `floating_point_bits` is None, new tensors will default to float32 unless specified otherwise.
The output of math operations has the same precision as its inputs.
Args:
floating_point_bits: one of (16, 32, 64, None)
"""
_PRECISION[0] = floating_point_bits
def get_precision() -> int:
"""
Gets the current target floating point precision in bits.
The precision can be set globally using `set_global_precision()` or locally using `with precision(p):`.
Any Backend method may convert floating point values to this precision, even if the input had a different precision.
Returns:
16 for half, 32 for single, 64 for double
"""
return _PRECISION[-1]
@contextmanager
def precision(floating_point_bits: int):
"""
Sets the floating point precision for the local context.
Usage: `with precision(p):`
This overrides the global setting, see `set_global_precision()`.
Args:
floating_point_bits: 16 for half, 32 for single, 64 for double
"""
_PRECISION.append(floating_point_bits)
try:
yield None
finally:
_PRECISION.pop(-1)
def convert(tensor, backend: Backend = None, use_dlpack=True):
"""
Convert a Tensor to the native format of `backend`.
If the target backend can operate natively on `tensor`, returns `tensor`.
If both backends support *DLPack* and `use_dlpack=True`, uses zero-copy conversion using the DLPack library.
Else, intermediately converts `tensor` to a NumPy array.
*Warning*: This operation breaks the automatic differentiation chain.
Args:
tensor: Native tensor belonging to any registered backend.
backend: Target backend. If `None`, uses the current default backend, see `default_backend()`.
Returns:
Tensor belonging to `backend`.
"""
backend = backend or default_backend()
current_backend = choose_backend(tensor, prefer_default=False)
if backend.is_tensor(tensor, True) or backend is current_backend:
return tensor
if use_dlpack and current_backend.supports(Backend.to_dlpack) and backend.supports(Backend.from_dlpack):
capsule = current_backend.to_dlpack(tensor)
return backend.from_dlpack(capsule)
else:
nparray = current_backend.numpy(tensor)
return backend.as_tensor(nparray)
# Backend choice utility functions
def _is_applicable(backend, values):
for value in values:
if not backend.is_tensor(value, only_native=False):
return False
return True
def _is_specific(backend, values):
for value in values:
if backend.is_tensor(value, only_native=True):
return True
return False
# Other low-level helper functions
def combined_dim(dim1, dim2, type_str: str = 'batch'):
if dim1 is None and dim2 is None:
return None
if dim1 is None or dim1 == 1:
return dim2
if dim2 is None or dim2 == 1:
return dim1
assert dim1 == dim2, f"Incompatible {type_str} dimensions: x0 {dim1}, y {dim2}"
return dim1
| 37.677311
| 216
| 0.629293
| 5,596
| 44,836
| 4.937098
| 0.138134
| 0.068626
| 0.066889
| 0.074055
| 0.31041
| 0.258651
| 0.223252
| 0.202295
| 0.191871
| 0.180976
| 0
| 0.008629
| 0.276274
| 44,836
| 1,189
| 217
| 37.708999
| 0.842769
| 0.322754
| 0
| 0.314035
| 0
| 0.003509
| 0.029159
| 0.000804
| 0
| 0
| 0
| 0
| 0.014035
| 1
| 0.254386
| false
| 0.001754
| 0.015789
| 0.02807
| 0.396491
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbde5b0dbcab23e1ef72b1961f7810d2ab8cc002
| 6,452
|
py
|
Python
|
file_importer0.py
|
Alva789ro/Regional-Comprehensive-Economic-Partnership-RCEP-Economic-Default-Risk-Analysis
|
454583f47883edae17391f101b10b38b68c9834f
|
[
"MIT"
] | 1
|
2021-03-15T19:44:36.000Z
|
2021-03-15T19:44:36.000Z
|
file_importer0.py
|
Alva789ro/Regional-Comprehensive-Economic-Partnership-RCEP-Economic-Default-Risk-Analysis
|
454583f47883edae17391f101b10b38b68c9834f
|
[
"MIT"
] | null | null | null |
file_importer0.py
|
Alva789ro/Regional-Comprehensive-Economic-Partnership-RCEP-Economic-Default-Risk-Analysis
|
454583f47883edae17391f101b10b38b68c9834f
|
[
"MIT"
] | 1
|
2022-02-06T01:33:41.000Z
|
2022-02-06T01:33:41.000Z
|
import xlsxwriter
import pandas as pd
import numpy as np
import mysql.connector
australia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Australia')
brunei=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Brunei')
cambodia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Cambodia')
china=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='China')
indonesia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Indonesia')
japan=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Japan')
lao=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Lao')
malaysia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Malaysia')
myanmar=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Myanmar')
new_zeland=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='New Zeland')
philipines=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Philipines')
singapore=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Singapore')
thailand=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Thailand')
vietnam=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Vietnam')
'''
mydb = mysql.connector.connect(
host = "localhost",
user = "root",
passwd = "",
database = ""
)
mycursor = mydb.cursor()
sqlformula1 = "INSERT INTO australia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(australia['Year'], australia['RGDP'], australia['NGDP'], australia['GDP_pc'], australia['Inflation'], australia['Unemployment_Rate'], australia['Net_LB'], australia['Account_Balance']):
mycursor.execute(sqlformula1, [a, b, c, d, e, f, g, h])
sqlformula2 = "INSERT INTO brunei VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(brunei['Year'], brunei['RGDP'], brunei['NGDP'], brunei['GDP_pc'], brunei['Inflation'], brunei['Unemployment_Rate'], brunei['Net_LB'], brunei['Account_Balance']):
mycursor.execute(sqlformula2, [a, b, c, d, e, f, g, h])
sqlformula3 = "INSERT INTO cambodia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(cambodia['Year'], cambodia['RGDP'], cambodia['NGDP'], cambodia['GDP_pc'], cambodia['Inflation'], cambodia['Unemployment_Rate'], cambodia['Net_LB'], cambodia['Account_Balance']):
mycursor.execute(sqlformula3, [a, b, c, d, e, f, g, h])
sqlformula4 = "INSERT INTO china VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(china['Year'], china['RGDP'], china['NGDP'], china['GDP_pc'], china['Inflation'], china['Unemployment_Rate'], china['Net_LB'], china['Account_Balance']):
mycursor.execute(sqlformula4, [a, b, c, d, e, f, g, h])
sqlformula5 = "INSERT INTO indonesia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(indonesia['Year'], indonesia['RGDP'], indonesia['NGDP'], indonesia['GDP_pc'], indonesia['Inflation'], indonesia['Unemployment_Rate'], indonesia['Net_LB'], indonesia['Account_Balance']):
mycursor.execute(sqlformula5, [a, b, c, d, e, f, g, h])
sqlformula6 = "INSERT INTO japan VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(japan['Year'], japan['RGDP'], japan['NGDP'], japan['GDP_pc'], japan['Inflation'], japan['Unemployment_Rate'], japan['Net_LB'], japan['Account_Balance']):
mycursor.execute(sqlformula6, [a, b, c, d, e, f, g, h])
sqlformula7 = "INSERT INTO lao VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(lao['Year'], lao['RGDP'], lao['NGDP'], lao['GDP_pc'], lao['Inflation'], lao['Unemployment_Rate'], lao['Net_LB'], lao['Account_Balance']):
mycursor.execute(sqlformula7, [a, b, c, d, e, f, g, h])
sqlformula8 = "INSERT INTO malaysia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(malaysia['Year'], malaysia['RGDP'], malaysia['NGDP'], malaysia['GDP_pc'], malaysia['Inflation'], malaysia['Unemployment_Rate'], malaysia['Net_LB'], malaysia['Account_Balance']):
mycursor.execute(sqlformula8, [a, b, c, d, e, f, g, h])
sqlformula9 = "INSERT INTO myanmar VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(myanmar['Year'], myanmar['RGDP'], myanmar['NGDP'], myanmar['GDP_pc'], myanmar['Inflation'], myanmar['Unemployment_Rate'], myanmar['Net_LB'], myanmar['Account_Balance']):
mycursor.execute(sqlformula9, [a, b, c, d, e, f, g, h])
sqlformula10 = "INSERT INTO new_zeland VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(new_zeland['Year'], new_zeland['RGDP'], new_zeland['NGDP'], new_zeland['GDP_pc'], new_zeland['Inflation'], new_zeland['Unemployment_Rate'], new_zeland['Net_LB'], new_zeland['Account_Balance']):
mycursor.execute(sqlformula10, [a, b, c, d, e, f, g, h])
sqlformula11 = "INSERT INTO philipines VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(philipines['Year'], philipines['RGDP'], philipines['NGDP'], philipines['GDP_pc'], philipines['Inflation'], philipines['Unemployment_Rate'], philipines['Net_LB'], philipines['Account_Balance']):
mycursor.execute(sqlformula11, [a, b, c, d, e, f, g, h])
sqlformula12 = "INSERT INTO singapore VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(singapore['Year'], singapore['RGDP'], singapore['NGDP'], singapore['GDP_pc'], singapore['Inflation'], singapore['Unemployment_Rate'], singapore['Net_LB'], singapore['Account_Balance']):
mycursor.execute(sqlformula12, [a, b, c, d, e, f, g, h])
sqlformula13 = "INSERT INTO thailand VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(thailand['Year'], thailand['RGDP'], thailand['NGDP'], thailand['GDP_pc'], thailand['Inflation'], thailand['Unemployment_Rate'], thailand['Net_LB'], thailand['Account_Balance']):
mycursor.execute(sqlformula13, [a, b, c, d, e, f, g, h])
sqlformula14 = "INSERT INTO vietnam VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(vietnam['Year'], vietnam['RGDP'], vietnam['NGDP'], vietnam['GDP_pc'], vietnam['Inflation'], vietnam['Unemployment_Rate'], vietnam['Net_LB'], vietnam['Account_Balance']):
mycursor.execute(sqlformula14, [a, b, c, d, e, f, g, h])
'''
#mydb.commit()
| 72.494382
| 227
| 0.67359
| 1,019
| 6,452
| 4.14524
| 0.087341
| 0.046402
| 0.059659
| 0.066288
| 0.334754
| 0.334754
| 0.334754
| 0.334754
| 0.308239
| 0.308239
| 0
| 0.006621
| 0.110508
| 6,452
| 88
| 228
| 73.318182
| 0.729395
| 0.002015
| 0
| 0
| 0
| 0
| 0.538147
| 0.467302
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbe3e139f969d2b0c02202b763923425574d8d2e
| 2,764
|
py
|
Python
|
default.py
|
SimonPreissner/get-shifty
|
aff49220932921c77e419a34ca472b51e0b26b72
|
[
"MIT"
] | null | null | null |
default.py
|
SimonPreissner/get-shifty
|
aff49220932921c77e419a34ca472b51e0b26b72
|
[
"MIT"
] | null | null | null |
default.py
|
SimonPreissner/get-shifty
|
aff49220932921c77e419a34ca472b51e0b26b72
|
[
"MIT"
] | null | null | null |
"""
This file contains meta information and default configurations of the project
"""
RSC_YEARS = [1660, 1670, 1680, 1690,
1700, 1710, 1720, 1730, 1740, 1750, 1760, 1770, 1780, 1790,
1800, 1810, 1820, 1830, 1840, 1850, 1860, 1870, 1880, 1890,
1900, 1910, 1920]
# cf. Chapter 4.4.1 of the thesis
SPACE_PAIR_SELECTION = [(1740,1750), (1750,1760),
(1680,1710), (1710,1740), (1740,1770), (1770,1800), (1800,1830), (1830,1860), (1860,1890),
(1700,1800), (1800,1900),
(1700,1900)]
COUPLING_CONFIG = { # Alternatives
# parameters passed to the GWOT object
'metric': "cosine", # 'euclidian',
'normalize_vecs': "both", # 'mean', 'whiten', 'whiten_zca'
'normalize_dists': "mean", # 'max', 'median'
'score_type': "coupling", # #TODO fill in the rest of the options in the comments
'adjust': None, # 'csls', ...
'distribs': "uniform", # 'custom', 'zipf'
'share_vocs':False, # True
'size':1000, # 100 is small, 1e4
'max_anchors':100, # used with small couplings (for projection)
# parameters to be passed to the optimizer
'opt_loss_fun': "square_loss", # 'kl_loss'
'opt_entropic': True, # False
'opt_entreg': 5e-4, # stay within the range of e-4 (originally: 1e-4)
'opt_tol': 1e-9, # no limits
'opt_round_g': False, # True
'opt_compute_accuracy': False, # True would require a test dict, but that's not implemented!
'opt_gpu': False, # GPU optimization not tested
# parameters for calling fit()
'fit_maxiter': 300, # no limits; normally converges within 150 iterations
'fit_tol': 1e-9, # no limits
'fit_plot_every': 100000, # normally 20; 'deactivate' the file spam by choosing a large value
'fit_print_every': 1, # no limits
'fit_verbose': True, # False
'fit_save_plots': None # "/my_dir/my_optimizer_plots"
}
DIST_SHAPES = ['uniform', 'zipf', 'custom']
SHIFT_EXPERIMENTS = ["all",
"unsup_bi",
"unsup_mono",
"dis_tech"]
| 52.150943
| 119
| 0.458032
| 272
| 2,764
| 4.511029
| 0.625
| 0.02608
| 0.01793
| 0.01304
| 0.02282
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15557
| 0.434877
| 2,764
| 53
| 120
| 52.150943
| 0.629962
| 0.281476
| 0
| 0
| 0
| 0
| 0.164447
| 0
| 0
| 0
| 0
| 0.018868
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.027027
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbe44f6e05680f0d1dad7aaee47f96f07f3de643
| 2,128
|
py
|
Python
|
tests/python/metaclass_inheritance.py
|
gmgunter/pyre
|
e9ff3f8c04661f8b2cd2ba0caded08b6fe8054e2
|
[
"BSD-3-Clause"
] | 25
|
2018-04-23T01:45:39.000Z
|
2021-12-10T06:01:23.000Z
|
tests/python/metaclass_inheritance.py
|
gmgunter/pyre
|
e9ff3f8c04661f8b2cd2ba0caded08b6fe8054e2
|
[
"BSD-3-Clause"
] | 53
|
2018-05-31T04:55:00.000Z
|
2021-10-07T21:41:32.000Z
|
tests/python/metaclass_inheritance.py
|
gmgunter/pyre
|
e9ff3f8c04661f8b2cd2ba0caded08b6fe8054e2
|
[
"BSD-3-Clause"
] | 12
|
2018-04-23T22:50:40.000Z
|
2022-02-20T17:27:23.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2021 all rights reserved
#
#
"""
When a metaclass understands the extra keywords that can be passed during class declaration,
it has to override all these to accommodate the change in signature
"""
class meta(type):
@classmethod
def __prepare__(metacls, name, bases, **kwds):
assert metacls.__name__ == 'meta'
assert name in ['base', 'derived']
if name == 'base':
assert bases == (object,)
assert kwds == {'arg1': True, 'arg2': False}
if name == 'derived':
assert bases == (base,)
assert kwds == {'arg1': False, 'arg2': True}
return super().__prepare__(name, bases)
def __new__(metacls, name, bases, attributes, **kwds):
assert metacls.__name__ == 'meta'
assert name in ['base', 'derived']
if name == 'base':
assert bases == (object,)
assert kwds == {'arg1': True, 'arg2': False}
if name == 'derived':
assert bases == (base,)
assert kwds == {'arg1': False, 'arg2': True}
return super().__new__(metacls, name, bases, attributes)
def __init__(self, name, bases, attributes, **kwds):
assert self.__name__ in ['base', 'derived']
if self.__name__ == 'base':
assert bases == (object,)
assert kwds == {'arg1': True, 'arg2': False}
if self.__name__ == 'derived':
assert bases == (base,)
assert kwds == {'arg1': False, 'arg2': True}
super().__init__(name, bases, attributes)
return
class base(object, metaclass=meta, arg1=True, arg2=False):
def __init__(self, **kwds):
assert type(self).__name__ == 'base'
assert kwds == {}
return
class derived(base, arg1=False, arg2=True):
def __init__(self, **kwds):
assert type(self).__name__ == 'derived'
assert kwds == {}
return
def test():
b = base()
d = derived()
return
# main
if __name__ == "__main__":
test()
# end of file
| 25.035294
| 92
| 0.56156
| 239
| 2,128
| 4.715481
| 0.297071
| 0.070985
| 0.074534
| 0.060337
| 0.545697
| 0.451642
| 0.451642
| 0.451642
| 0.393079
| 0.393079
| 0
| 0.017264
| 0.292293
| 2,128
| 84
| 93
| 25.333333
| 0.731076
| 0.134868
| 0
| 0.583333
| 0
| 0
| 0.077303
| 0
| 0
| 0
| 0
| 0
| 0.4375
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.3125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbe7a0b13a437a6e05e68098ff2efe008a915ee9
| 862
|
py
|
Python
|
bin/sort.py
|
pelavarre/pybashish
|
03f74356fb0a2a0ef7106f09c059fd9b375ce89a
|
[
"CNRI-Python"
] | 4
|
2020-07-10T20:16:13.000Z
|
2022-02-16T02:11:20.000Z
|
bin/sort.py
|
pelavarre/pybashish
|
03f74356fb0a2a0ef7106f09c059fd9b375ce89a
|
[
"CNRI-Python"
] | null | null | null |
bin/sort.py
|
pelavarre/pybashish
|
03f74356fb0a2a0ef7106f09c059fd9b375ce89a
|
[
"CNRI-Python"
] | 2
|
2020-06-24T20:37:36.000Z
|
2020-07-10T20:16:17.000Z
|
#!/usr/bin/env python3
"""
usage: sort.py [-h]
sort lines
options:
-h, --help show this help message and exit
quirks:
sorts tabs as different than spaces
sorts some spaces ending a line as different than none ending a line
examples:
Oh no! No examples disclosed!! 💥 💔 💥
"""
# FIXME: doc -k$N,$N and -n and maybe little else is worth learning
# FIXME: ass -k-1,-1 for negative field indexing
# FIXME: think into the mess at "sort" vs "LC_ALL=C sort"
import sys
import argdoc
def main():
args = argdoc.parse_args()
sys.stderr.write("{}\n".format(args))
sys.stderr.write("{}\n".format(argdoc.format_usage().rstrip()))
sys.stderr.write("sort.py: error: not implemented\n")
sys.exit(2) # exit 2 from rejecting usage
if __name__ == "__main__":
main()
# copied from: git clone https://github.com/pelavarre/pybashish.git
| 21.02439
| 70
| 0.678654
| 139
| 862
| 4.151079
| 0.618705
| 0.046794
| 0.07279
| 0.062392
| 0.086655
| 0.086655
| 0
| 0
| 0
| 0
| 0
| 0.007133
| 0.186775
| 862
| 40
| 71
| 21.55
| 0.811698
| 0.62761
| 0
| 0
| 0
| 0
| 0.159091
| 0
| 0
| 0
| 0
| 0.025
| 0
| 1
| 0.1
| false
| 0
| 0.2
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbec13a8be9b82963156b2e9e29130d14a7c09eb
| 975
|
py
|
Python
|
tests/formatters/fseventsd.py
|
SamuelePilleri/plaso
|
f5687f12a89c7309797ccc285da78e855c120579
|
[
"Apache-2.0"
] | null | null | null |
tests/formatters/fseventsd.py
|
SamuelePilleri/plaso
|
f5687f12a89c7309797ccc285da78e855c120579
|
[
"Apache-2.0"
] | null | null | null |
tests/formatters/fseventsd.py
|
SamuelePilleri/plaso
|
f5687f12a89c7309797ccc285da78e855c120579
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the fseventsd record event formatter."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import fseventsd
from tests.formatters import test_lib
class FseventsdFormatterTest(test_lib.EventFormatterTestCase):
"""Tests for the fseventsd record event formatter."""
def testInitialization(self):
"""Tests the initialization."""
event_formatter = fseventsd.FSEventsdEventFormatter()
self.assertIsNotNone(event_formatter)
def testGetFormatStringAttributeNames(self):
"""Tests the GetFormatStringAttributeNames function."""
event_formatter = fseventsd.FSEventsdEventFormatter()
expected_attribute_names = [
u'event_identifier', u'flag_values', u'hex_flags', u'path']
self._TestGetFormatStringAttributeNames(
event_formatter, expected_attribute_names)
# TODO: add test for GetSources.
if __name__ == '__main__':
unittest.main()
| 26.351351
| 67
| 0.756923
| 100
| 975
| 7.11
| 0.52
| 0.118143
| 0.030942
| 0.056259
| 0.112518
| 0.112518
| 0.112518
| 0
| 0
| 0
| 0
| 0.001202
| 0.146667
| 975
| 36
| 68
| 27.083333
| 0.853365
| 0.251282
| 0
| 0.125
| 0
| 0
| 0.067893
| 0
| 0
| 0
| 0
| 0.027778
| 0.0625
| 1
| 0.125
| false
| 0
| 0.25
| 0
| 0.4375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbef5ddea825a12fdea28a38b148d831f47bd566
| 1,446
|
py
|
Python
|
python_modules/lakehouse/lakehouse/snowflake_table.py
|
vatervonacht/dagster
|
595d78c883ef20618052ac1575fe46cde51fd541
|
[
"Apache-2.0"
] | 3
|
2020-04-28T16:27:33.000Z
|
2020-07-22T07:43:30.000Z
|
python_modules/lakehouse/lakehouse/snowflake_table.py
|
vatervonacht/dagster
|
595d78c883ef20618052ac1575fe46cde51fd541
|
[
"Apache-2.0"
] | 2
|
2021-05-11T13:36:27.000Z
|
2021-09-03T01:53:11.000Z
|
python_modules/lakehouse/lakehouse/snowflake_table.py
|
vatervonacht/dagster
|
595d78c883ef20618052ac1575fe46cde51fd541
|
[
"Apache-2.0"
] | 1
|
2021-02-21T12:16:47.000Z
|
2021-02-21T12:16:47.000Z
|
from dagster import check
from .house import Lakehouse
from .table import create_lakehouse_table_def
class SnowflakeLakehouse(Lakehouse):
def __init__(self):
pass
def hydrate(self, _context, _table_type, _table_metadata, table_handle, _dest_metadata):
return None
def materialize(self, context, table_type, table_metadata, value):
return None, None
def snowflake_table(
name=None,
input_tables=None,
other_input_defs=None,
tags=None,
required_resource_keys=None,
description=None,
):
tags = check.opt_dict_param(tags, 'tags')
tags['lakehouse_type'] = 'snowflake_table'
tags['kind'] = 'snowflake'
required_resource_keys = check.opt_set_param(required_resource_keys, 'required_resource_keys')
required_resource_keys.add('snowflake')
if callable(name):
fn = name
return create_lakehouse_table_def(
name=fn.__name__,
lakehouse_fn=fn,
input_tables=[],
required_resource_keys=required_resource_keys,
)
def _wrap(fn):
return create_lakehouse_table_def(
name=name if name is not None else fn.__name__,
lakehouse_fn=fn,
input_tables=input_tables,
other_input_defs=other_input_defs,
tags=tags,
description=description,
required_resource_keys=required_resource_keys,
)
return _wrap
| 26.777778
| 98
| 0.670816
| 168
| 1,446
| 5.357143
| 0.285714
| 0.16
| 0.2
| 0.124444
| 0.368889
| 0.368889
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0.253804
| 1,446
| 53
| 99
| 27.283019
| 0.834106
| 0
| 0
| 0.142857
| 0
| 0
| 0.05325
| 0.015214
| 0
| 0
| 0
| 0
| 0
| 1
| 0.119048
| false
| 0.02381
| 0.071429
| 0.071429
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbf19789118428ff5f8d3aa59b32b64fa444b8b7
| 984
|
py
|
Python
|
agent_based_models/abm_allelopathy/plot_data.py
|
mattsmart/biomodels
|
237f87489553fa1ebf5c676fab563166dd0c39e9
|
[
"MIT"
] | null | null | null |
agent_based_models/abm_allelopathy/plot_data.py
|
mattsmart/biomodels
|
237f87489553fa1ebf5c676fab563166dd0c39e9
|
[
"MIT"
] | null | null | null |
agent_based_models/abm_allelopathy/plot_data.py
|
mattsmart/biomodels
|
237f87489553fa1ebf5c676fab563166dd0c39e9
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import os
def data_plotter(lattice_dict, datafile_dir, plot_dir):
# total spaces on grid implies grid size
total_cells = lattice_dict['E'][0] + lattice_dict['D_a'][0] + lattice_dict['D_b'][0] + lattice_dict['B'][0]
n = int(total_cells**0.5)
plt.figure(1)
plt.plot(lattice_dict['time'], lattice_dict['E'], label='Empty lattice points')
plt.plot(lattice_dict['time'], lattice_dict['D_a'], label='Donors (Type A)')
plt.plot(lattice_dict['time'], lattice_dict['D_b'], label='Donors (Type B)')
plt.plot(lattice_dict['time'], lattice_dict['B'], label='Debris')
ax = plt.gca()
ax.set_title('Cell Populations over time (n = %d)' % n)
ax.set_ylabel('Number of cells')
ax.set_xlabel('Time (h)')
plt.legend()
f = plt.gcf()
f.set_size_inches(20.0, 8.0) # alternative: 20.0, 8.0
f.tight_layout()
plt.savefig(os.path.join(plot_dir, 'population_vs_time.png'))
plt.clf()
return
| 30.75
| 111
| 0.650407
| 159
| 984
| 3.830189
| 0.421384
| 0.234811
| 0.078818
| 0.118227
| 0.220033
| 0.220033
| 0.220033
| 0.111658
| 0
| 0
| 0
| 0.02091
| 0.17378
| 984
| 31
| 112
| 31.741935
| 0.728167
| 0.061992
| 0
| 0
| 0
| 0
| 0.182609
| 0.023913
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.095238
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbf3d541561ba11217ad33d7f2e880d8ae1b4729
| 1,567
|
py
|
Python
|
FOR/Analisador-completo/main.py
|
lucasf5/Python
|
c5649121e2af42922e2d9c19cec98322e132bdab
|
[
"MIT"
] | 1
|
2021-09-28T13:11:56.000Z
|
2021-09-28T13:11:56.000Z
|
FOR/Analisador-completo/main.py
|
lucasf5/Python
|
c5649121e2af42922e2d9c19cec98322e132bdab
|
[
"MIT"
] | null | null | null |
FOR/Analisador-completo/main.py
|
lucasf5/Python
|
c5649121e2af42922e2d9c19cec98322e132bdab
|
[
"MIT"
] | null | null | null |
# Exercício Python 56: Desenvolva um programa que leia o nome, idade e sexo de 4 pessoas. No final do programa, mostre: a média de idade do grupo, qual é o nome do homem mais velho e quantas mulheres têm menos de 20 anos.
mediaidade = ''
nomelista = []
idadelista = []
sexolista = []
homens = []
mulherescommenosde20 = 0
nomedelas = []
# -------------------------------------------------------------------
for i in range(1,5):
print(f'{i} PESSOA')
nome = (input('Seu nome: '))
idade = int(input('Sua idade: '))
sexo = int(input('Sexo? [0]Masculino [1]Feminino: '))
if sexo == 1 and idade < 20:
nomedelas.append(nome)
mulherescommenosde20 += 1
elif sexo == 0:
homens.append(nome)
# Adcionei todas idades em uma lista
idadelista.append(idade)
# Tirei a média dessas idades //Primeira parte
mediaidade = ((sum(idadelista))/4)
# Adcionei todos os nomes em uma lista
nomelista.append(nome)
# -------------------------------------------------------------------
# Armazenei em maximo o maior valor encontrado dentro de uma lista
maximo = max(idadelista)
# Armazenei em idadexidade o INDEX do maior valor
indexidade = idadelista.index(maximo)
# Armazenei em indexnome a posição de quem tem a maior idade
indexnome = nomelista[indexidade]
# -------------------------------------------------------------------
print(f'A media das idades é: {mediaidade}')
print(f'A pessoa que tem a maior idade, com {maximo} é essa: {indexnome}')
print(f'As mulheres que possuem menos de 20 anos: {mulherescommenosde20} e são: {nomedelas}')
| 27.982143
| 221
| 0.612636
| 201
| 1,567
| 4.776119
| 0.477612
| 0.025
| 0.01875
| 0.027083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018405
| 0.167837
| 1,567
| 55
| 222
| 28.490909
| 0.717791
| 0.454371
| 0
| 0
| 0
| 0
| 0.2891
| 0.026066
| 0
| 0
| 0
| 0.018182
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e0023b6272774adf06f1384bdb4cb510043c4a82
| 224
|
py
|
Python
|
task/w2/trenirovka/12-rivnist 2.py
|
beregok/pythontask
|
50394ff2b52ab4f3273ec9ddc4b504d1f7b3159e
|
[
"MIT"
] | 1
|
2019-09-29T14:19:54.000Z
|
2019-09-29T14:19:54.000Z
|
task/w2/trenirovka/12-rivnist 2.py
|
beregok/pythontask
|
50394ff2b52ab4f3273ec9ddc4b504d1f7b3159e
|
[
"MIT"
] | null | null | null |
task/w2/trenirovka/12-rivnist 2.py
|
beregok/pythontask
|
50394ff2b52ab4f3273ec9ddc4b504d1f7b3159e
|
[
"MIT"
] | null | null | null |
a = int(input())
b = int(input())
c = int(input())
d = int(input())
if a == 0 and b == 0:
print("INF")
else:
if (d - b * c / a) != 0 and (- b / a) == (- b // a):
print(- b // a)
else:
print("NO")
| 18.666667
| 56
| 0.397321
| 38
| 224
| 2.342105
| 0.342105
| 0.359551
| 0.11236
| 0.134831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02027
| 0.339286
| 224
| 11
| 57
| 20.363636
| 0.581081
| 0
| 0
| 0.181818
| 0
| 0
| 0.022321
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.272727
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e01a5f16e11613ae6cff496ae606faff7b1d0e27
| 460
|
py
|
Python
|
home/push/mipush/APIError.py
|
he0119/smart-home
|
bdd3a59a8c46c0fdc07ac3049bf589c7f95a2683
|
[
"MIT"
] | null | null | null |
home/push/mipush/APIError.py
|
he0119/smart-home
|
bdd3a59a8c46c0fdc07ac3049bf589c7f95a2683
|
[
"MIT"
] | 223
|
2020-02-21T06:16:56.000Z
|
2022-03-01T22:24:19.000Z
|
home/push/mipush/APIError.py
|
he0119/smart-home
|
bdd3a59a8c46c0fdc07ac3049bf589c7f95a2683
|
[
"MIT"
] | null | null | null |
class APIError(Exception):
"""
raise APIError if receiving json message indicating failure.
"""
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
Exception.__init__(self, error)
def __str__(self):
return "APIError: %s: %s, request: %s" % (
self.error_code,
self.error,
self.request,
)
| 25.555556
| 64
| 0.576087
| 49
| 460
| 5.081633
| 0.387755
| 0.216867
| 0.156627
| 0.144578
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.321739
| 460
| 17
| 65
| 27.058824
| 0.798077
| 0.130435
| 0
| 0
| 0
| 0
| 0.075521
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0.083333
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e0201884251a727105b3a8b3946ca3bc3aefd73d
| 480
|
py
|
Python
|
devito/passes/iet/languages/C.py
|
guaacoelho/devito
|
7e0b873114675752c4a49ed9076ee5d52997833c
|
[
"MIT"
] | 199
|
2016-08-18T23:33:05.000Z
|
2019-12-24T07:08:48.000Z
|
devito/passes/iet/languages/C.py
|
guaacoelho/devito
|
7e0b873114675752c4a49ed9076ee5d52997833c
|
[
"MIT"
] | 949
|
2016-04-25T11:41:34.000Z
|
2019-12-27T10:43:40.000Z
|
devito/passes/iet/languages/C.py
|
guaacoelho/devito
|
7e0b873114675752c4a49ed9076ee5d52997833c
|
[
"MIT"
] | 78
|
2016-08-30T07:42:34.000Z
|
2019-12-13T20:34:45.000Z
|
from devito.ir import Call
from devito.passes.iet.definitions import DataManager
from devito.passes.iet.langbase import LangBB
__all__ = ['CBB', 'CDataManager']
class CBB(LangBB):
mapper = {
'aligned': lambda i:
'__attribute__((aligned(%d)))' % i,
'host-alloc': lambda i, j, k:
Call('posix_memalign', (i, j, k)),
'host-free': lambda i:
Call('free', (i,)),
}
class CDataManager(DataManager):
lang = CBB
| 21.818182
| 53
| 0.591667
| 57
| 480
| 4.824561
| 0.508772
| 0.109091
| 0.116364
| 0.138182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.260417
| 480
| 21
| 54
| 22.857143
| 0.774648
| 0
| 0
| 0
| 0
| 0
| 0.18125
| 0.058333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.133333
| 0.2
| 0
| 0.466667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e0215d4c222f248ad7105000615a748c88340354
| 2,026
|
py
|
Python
|
tests/_test_image.py
|
Freakwill/ell
|
8aa510cefb5d63db35071820208971013fac154c
|
[
"MIT"
] | null | null | null |
tests/_test_image.py
|
Freakwill/ell
|
8aa510cefb5d63db35071820208971013fac154c
|
[
"MIT"
] | null | null | null |
tests/_test_image.py
|
Freakwill/ell
|
8aa510cefb5d63db35071820208971013fac154c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""Test methods about image process
Make sure the existance of the images
"""
from ell import *
import numpy as np
_filter = Filter.from_name('db4')
def test_resize():
chennal=0
c = ImageRGB.open('src/lenna.jpg')
d=c.resize(minInd=(-100,-100), maxInd=(100,100))
d.to_image()
assert True
def test_quantize():
im = ImageRGB.open('src/lenna.jpg')
d = im.quantize(128)
d.to_image()
assert True
def test_convolve():
im = ImageRGB.open('src/lenna.jpg')
d = (im @ _filter.H).D
# print(f"{d:i}, {d.shape}")
assert True
def test_filter():
im = ImageRGB.open('src/lenna.jpg')
rec = (im @ _filter.H).D.U @ _filter
assert True
def test_rec():
im = ImageRGB.open('src/lenna.jpg')
def _f(im, h1, h2=None):
if h2 is None: h2 = h1
return (im.conv1d(h1.H, axis=0).conv1d(h2.H, axis=1)).P.conv1d(h1, axis=0).conv1d(h2, axis=1)
rec = _f(im, _filter) + _f(im, _filter.H) + _f(im, _filter, _filter.H) + _f(im, _filter.H, _filter)
assert True
def test_rec2():
im = ImageRGB.open('../src/lenna.jpg')
def _f(im, h1, h2=None):
if h2 is None: h2 = h1
# return (im @ h1.tensor(h2).H).P @ h1.tensor(h2)
return (im.conv1d(h1.H, axis=0).conv1d(h2.H, axis=1)).P.conv1d(h1, axis=0).conv1d(h2, axis=1)
im1 = _f(im, _filter)
rec1 = _f(im1, _filter) + _f(im1, _filter.H) + _f(im1, _filter, _filter.H) + _f(im1, _filter.H, _filter)
rec2 = rec1 + _f(im, _filter.H) + _f(im, _filter, _filter.H) + _f(im, _filter.H, _filter)
assert True
def test_rec3():
im = ImageRGB.open('src/lenna.jpg')
def _f(im, h1, h2=None):
if h2 is None: h2 = h1
f = h1.tensor(h2)
return im.reduce(f).expand(f)
im1 = im.reduce(_filter)
rec1 = _f(im1, _filter) + _f(im1, _filter.H) + _f(im1, _filter, _filter.H) + _f(im1, _filter.H, _filter)
rec2 = rec1.expand(_filter) + _f(im, _filter.H) + _f(im, _filter, _filter.H) + _f(im, _filter.H, _filter)
assert True
| 28.535211
| 109
| 0.605133
| 338
| 2,026
| 3.428994
| 0.204142
| 0.102675
| 0.085418
| 0.120794
| 0.713546
| 0.669543
| 0.627265
| 0.584124
| 0.535807
| 0.535807
| 0
| 0.051169
| 0.218657
| 2,026
| 70
| 110
| 28.942857
| 0.680985
| 0.082922
| 0
| 0.5
| 0
| 0
| 0.052489
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 1
| 0.208333
| false
| 0
| 0.041667
| 0
| 0.3125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e026dd61a71f4c0236cf71cd04ff440228426371
| 1,303
|
py
|
Python
|
bot/views.py
|
eyobofficial/COVID-19-Mutual-Aid
|
42d30ce95b0e9c717c5eda3ecaafea2812ec34f7
|
[
"MIT"
] | null | null | null |
bot/views.py
|
eyobofficial/COVID-19-Mutual-Aid
|
42d30ce95b0e9c717c5eda3ecaafea2812ec34f7
|
[
"MIT"
] | 5
|
2020-03-19T17:49:50.000Z
|
2021-06-10T20:06:14.000Z
|
bot/views.py
|
eyobofficial/COVID-19-Mutual-Aid
|
42d30ce95b0e9c717c5eda3ecaafea2812ec34f7
|
[
"MIT"
] | null | null | null |
import telegram
from django.conf import settings
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.views.generic import View
from django.views.decorators.csrf import csrf_exempt
from braces.views import CsrfExemptMixin
from rest_framework.authentication import BasicAuthentication
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import AllowAny
from .bots import TelegramBot
from .models import TelegramUser as User
@method_decorator(csrf_exempt, name='dispatch')
class TelegramBotView(APIView):
permission_classes = (AllowAny, )
def post(self, request, *args, **kwargs):
context = request.data
bot = TelegramBot(context)
user, _ = User.objects.get_or_create(
id=bot.sender['id'],
defaults={
'first_name': bot.sender['first_name'],
'last_name': bot.sender.get('last_name', ''),
'username': bot.sender.get('username', ''),
'is_bot': bot.sender.get('is_bot', False)
}
)
user.access_count += 1
user.save()
bot.process(user)
return Response(status=status.HTTP_200_OK)
| 29.613636
| 61
| 0.692249
| 154
| 1,303
| 5.714286
| 0.454545
| 0.056818
| 0.096591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003922
| 0.217191
| 1,303
| 43
| 62
| 30.302326
| 0.858824
| 0
| 0
| 0
| 0
| 0
| 0.058462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.424242
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
e0274bb01109146cf480d663260e32b7e8a8cc2d
| 580
|
py
|
Python
|
portfolio/urls.py
|
ramza007/Ramza.io
|
2172d9ac13e87becbc8644ad5755070f48fab8da
|
[
"Apache-2.0"
] | 3
|
2019-12-16T16:47:16.000Z
|
2020-07-28T19:47:34.000Z
|
portfolio/urls.py
|
ramza007/Ramza.io
|
2172d9ac13e87becbc8644ad5755070f48fab8da
|
[
"Apache-2.0"
] | 15
|
2019-12-05T03:38:19.000Z
|
2022-03-13T02:35:30.000Z
|
portfolio/urls.py
|
ramza007/Ramza.io
|
2172d9ac13e87becbc8644ad5755070f48fab8da
|
[
"Apache-2.0"
] | null | null | null |
from django.conf.urls import url
from django.urls import path, include,re_path
from . import views
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
path('', views.index, name='index'),
path('about', views.about, name='about'),
path('projects', views.projects, name='projects'),
path('photos', views.photos, name='photos'),
re_path(r'^api/projects/$', views.ProjectList.as_view()),
re_path(r'^api-token-auth/', obtain_auth_token),
re_path(r'api/project/project-id/(?P<pk>[0-9]+)/$', views.ProjectDescription.as_view()),
]
| 34.117647
| 92
| 0.7
| 83
| 580
| 4.759036
| 0.409639
| 0.060759
| 0.053165
| 0.075949
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003922
| 0.12069
| 580
| 16
| 93
| 36.25
| 0.770588
| 0
| 0
| 0
| 0
| 0
| 0.194828
| 0.067241
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.307692
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
e0315471bd1a35e31c6a9cdd93a2a2a27365d479
| 2,702
|
py
|
Python
|
TWLight/emails/views.py
|
jajodiaraghav/TWLight
|
22359ab0b95ee3653e8ffa0eb698acd7bb8ebf70
|
[
"MIT"
] | 1
|
2019-10-24T04:49:52.000Z
|
2019-10-24T04:49:52.000Z
|
TWLight/emails/views.py
|
jajodiaraghav/TWLight
|
22359ab0b95ee3653e8ffa0eb698acd7bb8ebf70
|
[
"MIT"
] | 1
|
2019-03-29T15:29:45.000Z
|
2019-03-29T15:57:20.000Z
|
TWLight/emails/views.py
|
jajodiaraghav/TWLight
|
22359ab0b95ee3653e8ffa0eb698acd7bb8ebf70
|
[
"MIT"
] | 1
|
2019-09-26T14:40:27.000Z
|
2019-09-26T14:40:27.000Z
|
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse, reverse_lazy
from django.core.mail import BadHeaderError, send_mail
from django.http import HttpResponse, HttpResponseRedirect
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import FormView
from TWLight.emails.forms import ContactUsForm
from TWLight.emails.signals import ContactUs
@method_decorator(login_required, name='post')
class ContactUsView(FormView):
template_name = 'emails/contact.html'
form_class = ContactUsForm
success_url = reverse_lazy('contact')
def get_initial(self):
initial = super(ContactUsView, self).get_initial()
# @TODO: This sort of gets repeated in ContactUsForm.
# We could probably be factored out to a common place for DRYness.
if self.request.user.is_authenticated():
if self.request.user.email:
initial.update({
'email': self.request.user.email,
})
if ('message' in self.request.GET):
initial.update({
'message': self.request.GET['message'],
})
initial.update({
'next': reverse_lazy('contact'),
})
return initial
def form_valid(self, form):
# Adding an extra check to ensure the user is a wikipedia editor.
try:
assert self.request.user.editor
email = form.cleaned_data['email']
message = form.cleaned_data['message']
carbon_copy = form.cleaned_data['cc']
ContactUs.new_email.send(
sender=self.__class__,
user_email=email,
cc=carbon_copy,
editor_wp_username=self.request.user.editor.wp_username,
body=message
)
messages.add_message(self.request, messages.SUCCESS,
# Translators: Shown to users when they successfully submit a new message using the contact us form.
_('Your message has been sent. We\'ll get back to you soon!'))
return HttpResponseRedirect(reverse('contact'))
except (AssertionError, AttributeError) as e:
messages.add_message (self.request, messages.WARNING,
# Translators: This message is shown to non-wikipedia editors who attempt to post data to the contact us form.
_('You must be a Wikipedia editor to do that.'))
raise PermissionDenied
return self.request.user.editor
| 43.580645
| 126
| 0.657661
| 315
| 2,702
| 5.530159
| 0.422222
| 0.063146
| 0.051665
| 0.036165
| 0.04248
| 0.04248
| 0
| 0
| 0
| 0
| 0
| 0
| 0.263879
| 2,702
| 62
| 127
| 43.580645
| 0.875817
| 0.143597
| 0
| 0.115385
| 0
| 0
| 0.069727
| 0
| 0
| 0
| 0
| 0.016129
| 0.038462
| 1
| 0.038462
| false
| 0
| 0.211538
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e035deed8737a8c4ccc24d990b915152d4728210
| 3,115
|
py
|
Python
|
cogs/events.py
|
rompdodger/RompDodger
|
9c8b481d9f69e05c15f01271f6c18e09ab2723e6
|
[
"MIT"
] | null | null | null |
cogs/events.py
|
rompdodger/RompDodger
|
9c8b481d9f69e05c15f01271f6c18e09ab2723e6
|
[
"MIT"
] | null | null | null |
cogs/events.py
|
rompdodger/RompDodger
|
9c8b481d9f69e05c15f01271f6c18e09ab2723e6
|
[
"MIT"
] | null | null | null |
import json
import discord
from utils.time import format_time
from utils import utilities
from discord.ext import commands
from discord import Embed
class Events(commands.Cog):
"""Event Handler for RompDodger"""
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
if hasattr(ctx.command, 'on_error'):
return
if isinstance(error, (commands.CommandNotFound, commands.NoPrivateMessage)):
return
elif isinstance(error, commands.MissingRequiredArgument):
await ctx.send(embed=await utilities.generate_embed(f"Command {ctx.prefix} {ctx.command} requires **{error.param.name}** argument, but you missed giving that"))
elif isinstance(error, commands.BotMissingPermissions):
perms = "".join(error.missing_perms)
await ctx.send(embed=await utilities.generate_embed(f"To finish the command bot must have {perms} permission, give the bot appropriate permissions and re-try"))
self.bot.logger.critical(f"Ignoring Exception in {ctx.command}\nError: {error}")
@commands.Cog.listener()
async def on_guild_join(self, guild):
#TODO: implement blacklist sytem
self.bot.logger.info(f"Joined on {guild} > Total Guilds: {len(self.bot.guilds)}")
@commands.Cog.listener()
async def on_guild_remove(self, guild):
self.bot.logger.info(f"Removed on {guild} > Total Guilds: {len(self.bot.guilds)}")
@commands.Cog.listener()
async def on_member_join(self, member):
cursor = await self.bot.db.execute(f"SELECT channel FROM welcomer WHERE guild_id = {member.guild.id}")
chrow = await cursor.fetchone()
if chrow is None:
return
else:
msgrow = await self.bot.db.execute(f"SELECT message FROM welcomer WHERE guild_id = {member.guild.id}")
msg = await msgrow.fetchone()
name = member.name
mention = member.mention
members = member.guild.member_count
server = member.guild
embed = discord.Embed(color=discord.Color.dark_green(), description=msg[0].format(name=name, mention=mention, members=members, server=server))
embed.set_thumbnail(url=f"{member.avatar_url_as(format='png', size=2048)}")
created = format_time(member.created_at)
embed.set_footer(text=f"{member.name} Created on {created}")
ch = self.bot.get_channel(int(chrow[0]))
await ch.send(embed=embed)
await cursor.close()
@commands.Cog.listener()
async def on_member_remove(self, member):
cursor = await self.bot.db.execute(f"SELECT channel FROM leaver WHERE guild_id = {ctx.guild.id}")
chrow = await cursor.fetchone()
if chrow is None:
return
else:
msg = await self.bot.db.execute(f"SELECT msg FROM leaver WHERE guild_id = {member.guild.id}")
name = member.name
mention = member.mention
server = member.server
members = member.guild.member_count
embed.set_thumbnail(url=f"{member.avatar_url_as(format='png', size=2048)}")
created = format_time(member.joined_at)
embed.set_footer(text=f"{member.name} Created joined on {joined}")
ch = self.bot.get_channel(int(chrow[0]))
await ch.send(embed=embed)
await cursor.close()
def setup(bot):
bot.add_cog(Events(bot))
| 39.43038
| 163
| 0.733547
| 452
| 3,115
| 4.964602
| 0.285398
| 0.040553
| 0.042335
| 0.053476
| 0.552139
| 0.497772
| 0.443405
| 0.382799
| 0.354724
| 0.280749
| 0
| 0.004104
| 0.139647
| 3,115
| 79
| 164
| 39.43038
| 0.833209
| 0.019262
| 0
| 0.432836
| 0
| 0.029851
| 0.258033
| 0.051475
| 0
| 0
| 0
| 0.012658
| 0
| 1
| 0.029851
| false
| 0
| 0.089552
| 0
| 0.19403
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e036f44b7fa0f2862267ed2ae2bb354dffc8bc0b
| 260
|
py
|
Python
|
setup.py
|
clin366/airpollutionnowcast
|
f9152583eebc4ad747c8d0510460334a5fb23ff9
|
[
"MIT"
] | null | null | null |
setup.py
|
clin366/airpollutionnowcast
|
f9152583eebc4ad747c8d0510460334a5fb23ff9
|
[
"MIT"
] | 9
|
2020-03-24T18:12:45.000Z
|
2022-02-10T00:36:57.000Z
|
setup.py
|
clin366/airpollutionnowcast
|
f9152583eebc4ad747c8d0510460334a5fb23ff9
|
[
"MIT"
] | null | null | null |
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Project: Nowcasting the air pollution using online search log',
author='Emory University(IR Lab)',
license='MIT',
)
| 23.636364
| 80
| 0.692308
| 33
| 260
| 5.393939
| 0.848485
| 0.134831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014151
| 0.184615
| 260
| 10
| 81
| 26
| 0.825472
| 0
| 0
| 0
| 0
| 0
| 0.369231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e0404632a7378b088279de3e94aac11c26a9e183
| 1,540
|
py
|
Python
|
monasca_persister/conf/influxdb.py
|
zhangjianweibj/monasca-persister
|
0c5d8a7c5553001f2d38227347f482201f92c8e1
|
[
"Apache-2.0"
] | null | null | null |
monasca_persister/conf/influxdb.py
|
zhangjianweibj/monasca-persister
|
0c5d8a7c5553001f2d38227347f482201f92c8e1
|
[
"Apache-2.0"
] | 1
|
2020-03-13T12:30:29.000Z
|
2020-03-13T12:38:16.000Z
|
monasca_persister/conf/influxdb.py
|
zhangjianweibj/monasca-persister
|
0c5d8a7c5553001f2d38227347f482201f92c8e1
|
[
"Apache-2.0"
] | null | null | null |
# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
influxdb_opts = [
cfg.StrOpt('database_name',
help='database name where metrics are stored',
default='mon'),
cfg.HostAddressOpt('ip_address',
help='Valid IP address or hostname '
'to InfluxDB instance'),
cfg.PortOpt('port',
help='port to influxdb',
default=8086),
cfg.StrOpt('user',
help='influxdb user ',
default='mon_persister'),
cfg.StrOpt('password',
secret=True,
help='influxdb password')]
influxdb_group = cfg.OptGroup(name='influxdb',
title='influxdb')
def register_opts(conf):
conf.register_group(influxdb_group)
conf.register_opts(influxdb_opts, influxdb_group)
def list_opts():
return influxdb_group, influxdb_opts
| 32.765957
| 69
| 0.653896
| 190
| 1,540
| 5.221053
| 0.573684
| 0.060484
| 0.02621
| 0.032258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.25974
| 1,540
| 46
| 70
| 33.478261
| 0.852632
| 0.398701
| 0
| 0
| 0
| 0
| 0.225275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0.083333
| 0.041667
| 0.041667
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e044ab975c816db8531273f338dcef5b52d8c7ce
| 1,061
|
py
|
Python
|
src/geneflow/extend/local_workflow.py
|
jhphan/geneflow2
|
a39ab97e6425ee45584cfc15b5740e94a5bf7512
|
[
"Apache-2.0"
] | 7
|
2019-04-11T03:50:51.000Z
|
2020-03-27T15:59:04.000Z
|
src/geneflow/extend/local_workflow.py
|
jhphan/geneflow2
|
a39ab97e6425ee45584cfc15b5740e94a5bf7512
|
[
"Apache-2.0"
] | 1
|
2019-05-06T14:18:42.000Z
|
2019-05-08T22:06:12.000Z
|
src/geneflow/extend/local_workflow.py
|
jhphan/geneflow2
|
a39ab97e6425ee45584cfc15b5740e94a5bf7512
|
[
"Apache-2.0"
] | 6
|
2019-04-10T20:25:27.000Z
|
2021-12-16T15:59:59.000Z
|
"""This module contains the GeneFlow LocalWorkflow class."""
class LocalWorkflow:
"""
A class that represents the Local Workflow objects.
"""
def __init__(
self,
job,
config,
parsed_job_work_uri
):
"""
Instantiate LocalWorkflow class.
"""
self._job = job
self._config = config
self._parsed_job_work_uri = parsed_job_work_uri
def initialize(self):
"""
Initialize the LocalWorkflow class.
This workflow class has no additional functionality.
Args:
None.
Returns:
True.
"""
return True
def init_data(self):
"""
Initialize any data specific to this context.
"""
return True
def get_context_options(self):
"""
Return dict of options specific for this context.
Args:
None.
Returns:
{} - no options specific for this context.
"""
return {}
| 18.293103
| 60
| 0.524034
| 100
| 1,061
| 5.37
| 0.42
| 0.100559
| 0.072626
| 0.089385
| 0.108007
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.402451
| 1,061
| 57
| 61
| 18.614035
| 0.847003
| 0.410933
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e0453a8ff093c7c5f6bb2239656a47c98c50cec7
| 2,849
|
py
|
Python
|
S12/tensornet/engine/ops/lr_scheduler.py
|
abishek-raju/EVA4B2
|
189f4062c85d91f43c1381087a9c89ff794e5428
|
[
"Apache-2.0"
] | 4
|
2020-06-18T13:07:19.000Z
|
2022-01-07T10:51:10.000Z
|
S12/tensornet/engine/ops/lr_scheduler.py
|
abishek-raju/EVA4B2
|
189f4062c85d91f43c1381087a9c89ff794e5428
|
[
"Apache-2.0"
] | 1
|
2021-07-31T04:34:46.000Z
|
2021-08-11T05:55:57.000Z
|
S12/tensornet/engine/ops/lr_scheduler.py
|
abishek-raju/EVA4B2
|
189f4062c85d91f43c1381087a9c89ff794e5428
|
[
"Apache-2.0"
] | 4
|
2020-08-09T07:10:46.000Z
|
2021-01-16T14:57:23.000Z
|
from torch.optim.lr_scheduler import StepLR, ReduceLROnPlateau, OneCycleLR
def step_lr(optimizer, step_size, gamma=0.1, last_epoch=-1):
"""Create LR step scheduler.
Args:
optimizer (torch.optim): Model optimizer.
step_size (int): Frequency for changing learning rate.
gamma (float): Factor for changing learning rate. (default: 0.1)
last_epoch (int): The index of last epoch. (default: -1)
Returns:
StepLR: Learning rate scheduler.
"""
return StepLR(optimizer, step_size=step_size, gamma=gamma, last_epoch=last_epoch)
def reduce_lr_on_plateau(optimizer, factor=0.1, patience=10, verbose=False, min_lr=0):
"""Create LR plateau reduction scheduler.
Args:
optimizer (torch.optim): Model optimizer.
factor (float, optional): Factor by which the learning rate will be reduced.
(default: 0.1)
patience (int, optional): Number of epoch with no improvement after which learning
rate will be will be reduced. (default: 10)
verbose (bool, optional): If True, prints a message to stdout for each update.
(default: False)
min_lr (float, optional): A scalar or a list of scalars. A lower bound on the
learning rate of all param groups or each group respectively. (default: 0)
Returns:
ReduceLROnPlateau instance.
"""
return ReduceLROnPlateau(
optimizer, factor=factor, patience=patience, verbose=verbose, min_lr=min_lr
)
def one_cycle_lr(
optimizer, max_lr, epochs, steps_per_epoch, pct_start=0.5, div_factor=10.0, final_div_factor=10000
):
"""Create One Cycle Policy for Learning Rate.
Args:
optimizer (torch.optim): Model optimizer.
max_lr (float): Upper learning rate boundary in the cycle.
epochs (int): The number of epochs to train for. This is used along with
steps_per_epoch in order to infer the total number of steps in the cycle.
steps_per_epoch (int): The number of steps per epoch to train for. This is
used along with epochs in order to infer the total number of steps in the cycle.
pct_start (float, optional): The percentage of the cycle (in number of steps)
spent increasing the learning rate. (default: 0.5)
div_factor (float, optional): Determines the initial learning rate via
initial_lr = max_lr / div_factor. (default: 10.0)
final_div_factor (float, optional): Determines the minimum learning rate via
min_lr = initial_lr / final_div_factor. (default: 1e4)
Returns:
OneCycleLR instance.
"""
return OneCycleLR(
optimizer, max_lr, epochs=epochs, steps_per_epoch=steps_per_epoch,
pct_start=pct_start, div_factor=div_factor, final_div_factor=final_div_factor
)
| 40.7
| 102
| 0.679537
| 394
| 2,849
| 4.774112
| 0.27665
| 0.070175
| 0.041467
| 0.036683
| 0.239766
| 0.184476
| 0.127592
| 0.078682
| 0.047847
| 0.047847
| 0
| 0.01532
| 0.243945
| 2,849
| 69
| 103
| 41.289855
| 0.857939
| 0.667954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.071429
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e048929c57d8279d48bbfdb7b6430abd2459ceab
| 243
|
py
|
Python
|
Others/code_festival/code-festival-2015-final-open/a.py
|
KATO-Hiro/AtCoder
|
cbbdb18e95110b604728a54aed83a6ed6b993fde
|
[
"CC0-1.0"
] | 2
|
2020-06-12T09:54:23.000Z
|
2021-05-04T01:34:07.000Z
|
Others/code_festival/code-festival-2015-final-open/a.py
|
KATO-Hiro/AtCoder
|
cbbdb18e95110b604728a54aed83a6ed6b993fde
|
[
"CC0-1.0"
] | 961
|
2020-06-23T07:26:22.000Z
|
2022-03-31T21:34:52.000Z
|
Others/code_festival/code-festival-2015-final-open/a.py
|
KATO-Hiro/AtCoder
|
cbbdb18e95110b604728a54aed83a6ed6b993fde
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
def main():
s, t, u = map(str, input().split())
if len(s) == 5 and len(t) == 7 and len(u) == 5:
print('valid')
else:
print('invalid')
if __name__ == '__main__':
main()
| 16.2
| 52
| 0.440329
| 33
| 243
| 3
| 0.666667
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025157
| 0.345679
| 243
| 14
| 53
| 17.357143
| 0.597484
| 0.08642
| 0
| 0
| 0
| 0
| 0.097087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0
| 0
| 0
| 0.125
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e04d583757322341dcf56eb5852389f9fd5b2748
| 1,634
|
py
|
Python
|
mistral/tests/unit/utils/test_utils.py
|
shubhamdang/mistral
|
3c83837f6ce1e4ab74fb519a63e82eaae70f9d2d
|
[
"Apache-2.0"
] | 205
|
2015-06-21T11:51:47.000Z
|
2022-03-05T04:00:04.000Z
|
mistral/tests/unit/utils/test_utils.py
|
shubhamdang/mistral
|
3c83837f6ce1e4ab74fb519a63e82eaae70f9d2d
|
[
"Apache-2.0"
] | 8
|
2015-06-23T14:47:58.000Z
|
2021-01-28T06:06:44.000Z
|
mistral/tests/unit/utils/test_utils.py
|
shubhamdang/mistral
|
3c83837f6ce1e4ab74fb519a63e82eaae70f9d2d
|
[
"Apache-2.0"
] | 110
|
2015-06-14T03:34:38.000Z
|
2021-11-11T12:12:56.000Z
|
# Copyright 2013 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
# Copyright 2015 - Huawei Technologies Co. Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mistral import exceptions as exc
from mistral.tests.unit import base
from mistral.utils import ssh_utils
from mistral_lib import utils
class UtilsTest(base.BaseTest):
def test_itersubclasses(self):
class A(object):
pass
class B(A):
pass
class C(A):
pass
class D(C):
pass
self.assertEqual([B, C, D], list(utils.iter_subclasses(A)))
def test_paramiko_to_private_key(self):
self.assertRaises(
exc.DataAccessException,
ssh_utils._to_paramiko_private_key,
"../dir"
)
self.assertRaises(
exc.DataAccessException,
ssh_utils._to_paramiko_private_key,
"..\\dir"
)
self.assertIsNone(
ssh_utils._to_paramiko_private_key(private_key_filename=None,
password='pass')
)
| 29.178571
| 77
| 0.632191
| 200
| 1,634
| 5.04
| 0.53
| 0.059524
| 0.029762
| 0.053571
| 0.168651
| 0.168651
| 0.140873
| 0.140873
| 0.140873
| 0.140873
| 0
| 0.013865
| 0.293758
| 1,634
| 55
| 78
| 29.709091
| 0.859619
| 0.403305
| 0
| 0.333333
| 0
| 0
| 0.017727
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 1
| 0.066667
| false
| 0.166667
| 0.133333
| 0
| 0.366667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e04ec585b764ff6cb1ec40221ed614d384e735f8
| 581
|
py
|
Python
|
django_app_permissions/management/commands/resolve_app_groups.py
|
amp89/django-app-permissions
|
11f576d2118f5b73fdbefa0675acc3374a5a9749
|
[
"MIT"
] | 2
|
2020-09-04T04:12:30.000Z
|
2020-10-20T00:12:01.000Z
|
django_app_permissions/management/commands/resolve_app_groups.py
|
amp89/django-app-permissions
|
11f576d2118f5b73fdbefa0675acc3374a5a9749
|
[
"MIT"
] | 4
|
2020-09-06T22:29:18.000Z
|
2020-09-11T01:19:50.000Z
|
django_app_permissions/management/commands/resolve_app_groups.py
|
amp89/django-app-permissions
|
11f576d2118f5b73fdbefa0675acc3374a5a9749
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand, no_translations
from django.contrib.auth.models import Group
from django.conf import settings
import sys
class Command(BaseCommand):
def handle(self, *args, **options):
sys.stdout.write("\nResolving app groups")
app_list = [app_name.lower() for app_name in settings.ACCESS_CONTROLLED_INSTALLED_APPS]
for app_name in app_list:
created = Group.objects.get_or_create(name=app_name)
sys.stdout.write(f"\n{app_name}, new={created}")
sys.stdout.write("\n")
| 32.277778
| 95
| 0.693632
| 79
| 581
| 4.936709
| 0.56962
| 0.089744
| 0.107692
| 0.061538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203098
| 581
| 18
| 96
| 32.277778
| 0.842333
| 0
| 0
| 0
| 0
| 0
| 0.087629
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
e053d242f75ab9ddd50217184c0c2cd558a9aad9
| 5,591
|
py
|
Python
|
library/__mozilla__/pyjamas/DOM.py
|
certik/pyjamas
|
5bb72e63e50f09743ac986f4c9690ba50c499ba9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
library/__mozilla__/pyjamas/DOM.py
|
certik/pyjamas
|
5bb72e63e50f09743ac986f4c9690ba50c499ba9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
library/__mozilla__/pyjamas/DOM.py
|
certik/pyjamas
|
5bb72e63e50f09743ac986f4c9690ba50c499ba9
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-08-13T20:32:25.000Z
|
2019-08-13T20:32:25.000Z
|
def buttonClick(button):
JS("""
var doc = button.ownerDocument;
if (doc != null) {
var evt = doc.createEvent('MouseEvents');
evt.initMouseEvent('click', true, true, null, 0, 0,
0, 0, 0, false, false, false, false, 0, null);
button.dispatchEvent(evt);
}
""")
def compare(elem1, elem2):
JS("""
if (!elem1 && !elem2) {
return true;
} else if (!elem1 || !elem2) {
return false;
}
if (!elem1.isSameNode) {
return (elem1 == elem2);
}
return (elem1.isSameNode(elem2));
""")
def eventGetButton(evt):
JS("""
var button = evt.which;
if(button == 2) {
return 4;
} else if (button == 3) {
return 2;
} else {
return button || 0;
}
""")
# This is what is in GWT 1.5 for getAbsoluteLeft. err...
#"""
# // We cannot use DOMImpl here because offsetLeft/Top return erroneous
# // values when overflow is not visible. We have to difference screenX
# // here due to a change in getBoxObjectFor which causes inconsistencies
# // on whether the calculations are inside or outside of the element's
# // border.
# try {
# return $doc.getBoxObjectFor(elem).screenX
# - $doc.getBoxObjectFor($doc.documentElement).screenX;
# } catch (e) {
# // This works around a bug in the FF3 betas. The bug
# // should be fixed before they release, so this can
# // be removed at a later date.
# // https://bugzilla.mozilla.org/show_bug.cgi?id=409111
# // DOMException.WRONG_DOCUMENT_ERR == 4
# if (e.code == 4) {
# return 0;
# }
# throw e;
# }
#"""
def getAbsoluteLeft(elem):
JS("""
// Firefox 3 expects getBoundingClientRect
// getBoundingClientRect can be float: 73.1 instead of 74, see
// gwt's workaround at user/src/com/google/gwt/dom/client/DOMImplMozilla.java:47
// Please note, their implementation has 1px offset.
if ( typeof elem.getBoundingClientRect == 'function' ) {
var left = Math.ceil(elem.getBoundingClientRect().left);
return left + $doc.body.scrollLeft + $doc.documentElement.scrollLeft;
}
// Older Firefox can use getBoxObjectFor
else {
var left = $doc.getBoxObjectFor(elem).x;
var parent = elem.parentNode;
while (parent) {
if (parent.scrollLeft > 0) {
left = left - parent.scrollLeft;
}
parent = parent.parentNode;
}
return left + $doc.body.scrollLeft + $doc.documentElement.scrollLeft;
}
""")
# This is what is in GWT 1.5 for getAbsoluteTop. err...
#"""
# // We cannot use DOMImpl here because offsetLeft/Top return erroneous
# // values when overflow is not visible. We have to difference screenY
# // here due to a change in getBoxObjectFor which causes inconsistencies
# // on whether the calculations are inside or outside of the element's
# // border.
# try {
# return $doc.getBoxObjectFor(elem).screenY
# - $doc.getBoxObjectFor($doc.documentElement).screenY;
# } catch (e) {
# // This works around a bug in the FF3 betas. The bug
# // should be fixed before they release, so this can
# // be removed at a later date.
# // https://bugzilla.mozilla.org/show_bug.cgi?id=409111
# // DOMException.WRONG_DOCUMENT_ERR == 4
# if (e.code == 4) {
# return 0;
# }
# throw e;
# }
#"""
def getAbsoluteTop(elem):
JS("""
// Firefox 3 expects getBoundingClientRect
if ( typeof elem.getBoundingClientRect == 'function' ) {
var top = Math.ceil(elem.getBoundingClientRect().top);
return top + $doc.body.scrollTop + $doc.documentElement.scrollTop;
}
// Older Firefox can use getBoxObjectFor
else {
var top = $doc.getBoxObjectFor(elem).y;
var parent = elem.parentNode;
while (parent) {
if (parent.scrollTop > 0) {
top -= parent.scrollTop;
}
parent = parent.parentNode;
}
return top + $doc.body.scrollTop + $doc.documentElement.scrollTop;
}
""")
def getChildIndex(parent, child):
JS("""
var count = 0, current = parent.firstChild;
while (current) {
if (! current.isSameNode) {
if (current == child) {
return count;
}
}
else if (current.isSameNode(child)) {
return count;
}
if (current.nodeType == 1) {
++count;
}
current = current.nextSibling;
}
return -1;
""")
def isOrHasChild(parent, child):
JS("""
while (child) {
if ((!parent.isSameNode)) {
if (parent == child) {
return true;
}
}
else if (parent.isSameNode(child)) {
return true;
}
try {
child = child.parentNode;
} catch(e) {
// Give up on 'Permission denied to get property
// HTMLDivElement.parentNode'
// See https://bugzilla.mozilla.org/show_bug.cgi?id=208427
return false;
}
if (child && (child.nodeType != 1)) {
child = null;
}
}
return false;
""")
def releaseCapture(elem):
JS("""
if ((DOM.sCaptureElem != null) && DOM.compare(elem, DOM.sCaptureElem))
DOM.sCaptureElem = null;
if (!elem.isSameNode) {
if (elem == $wnd.__captureElem) {
$wnd.__captureElem = null;
}
}
else if (elem.isSameNode($wnd.__captureElem)) {
$wnd.__captureElem = null;
}
""")
| 29.119792
| 84
| 0.571275
| 609
| 5,591
| 5.220033
| 0.280788
| 0.033973
| 0.027682
| 0.021705
| 0.505505
| 0.485373
| 0.431268
| 0.406103
| 0.301353
| 0.287512
| 0
| 0.017414
| 0.301556
| 5,591
| 191
| 85
| 29.272251
| 0.796671
| 0.296548
| 0
| 0.330827
| 0
| 0.007519
| 0.898584
| 0.183784
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06015
| false
| 0
| 0
| 0
| 0.195489
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e0554c3395746111d418fbf380163f0e080e4265
| 1,260
|
py
|
Python
|
pytorch_gleam/search/rerank_format.py
|
Supermaxman/pytorch-gleam
|
8b0d8dddc812e8ae120c9760fd44fe93da3f902d
|
[
"Apache-2.0"
] | null | null | null |
pytorch_gleam/search/rerank_format.py
|
Supermaxman/pytorch-gleam
|
8b0d8dddc812e8ae120c9760fd44fe93da3f902d
|
[
"Apache-2.0"
] | null | null | null |
pytorch_gleam/search/rerank_format.py
|
Supermaxman/pytorch-gleam
|
8b0d8dddc812e8ae120c9760fd44fe93da3f902d
|
[
"Apache-2.0"
] | null | null | null |
import torch
import argparse
from collections import defaultdict
import os
import json
def load_predictions(input_path):
pred_list = []
for file_name in os.listdir(input_path):
if file_name.endswith('.pt'):
preds = torch.load(os.path.join(input_path, file_name))
pred_list.extend(preds)
question_scores = defaultdict(lambda: defaultdict(dict))
p_count = 0
u_count = 0
for prediction in pred_list:
doc_pass_id = prediction['id']
q_p_id = prediction['question_id']
# score = prediction['pos_score']
score = prediction['pos_score'] - prediction['neg_score']
if doc_pass_id not in question_scores or q_p_id not in question_scores[doc_pass_id]:
p_count += 1
u_count += 1
question_scores[doc_pass_id][q_p_id] = score
print(f'{p_count} unique predictions')
print(f'{u_count} total predictions')
return question_scores
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input_path', required=True)
parser.add_argument('-o', '--output_path', required=True)
args = parser.parse_args()
input_path = args.input_path
output_path = args.output_path
question_scores = load_predictions(input_path)
with open(output_path, 'w') as f:
json.dump(question_scores, f)
if __name__ == '__main__':
main()
| 25.714286
| 86
| 0.743651
| 191
| 1,260
| 4.586387
| 0.350785
| 0.071918
| 0.041096
| 0.054795
| 0.084475
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003683
| 0.138095
| 1,260
| 48
| 87
| 26.25
| 0.802947
| 0.024603
| 0
| 0
| 0
| 0
| 0.103589
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0.081081
| 0.135135
| 0
| 0.216216
| 0.054054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e055f89145eb203a0a63bfdad54931948d02ec37
| 388
|
py
|
Python
|
des036.py
|
LeonardoPereirajr/Curso_em_video_Python
|
9d8a97ba3389c8e86b37dfd089fab5d04adc146d
|
[
"MIT"
] | null | null | null |
des036.py
|
LeonardoPereirajr/Curso_em_video_Python
|
9d8a97ba3389c8e86b37dfd089fab5d04adc146d
|
[
"MIT"
] | null | null | null |
des036.py
|
LeonardoPereirajr/Curso_em_video_Python
|
9d8a97ba3389c8e86b37dfd089fab5d04adc146d
|
[
"MIT"
] | null | null | null |
casa = int(input('Qual o valor da casa? '))
sal = int(input('Qual seu salario? '))
prazo = int(input('Quantos meses deseja pagar ? '))
parcela = casa/prazo
margem = sal* (30/100)
if parcela > margem:
print('Este negocio não foi aprovado, aumente o prazo .')
else:
print("Negocio aprovado pois a parcela é de R$ {} e voce pode pagar R$ {} mensais".format(parcela,margem))
| 38.8
| 111
| 0.664948
| 59
| 388
| 4.372881
| 0.644068
| 0.093023
| 0.093023
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016026
| 0.195876
| 388
| 9
| 112
| 43.111111
| 0.810897
| 0
| 0
| 0
| 0
| 0
| 0.503958
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.