hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e6b492a8569382b674ad38edea3efaacc9af93df
| 412
|
py
|
Python
|
tests/lightgbm/model-server/serve.py
|
basisai/bedrock-express
|
273b6377f080e1f6125dfd8ec465a8aaf3dee468
|
[
"Apache-2.0"
] | 9
|
2020-10-22T06:42:38.000Z
|
2020-10-22T08:38:17.000Z
|
tests/lightgbm/model-server/serve.py
|
basisai/bedrock-express
|
273b6377f080e1f6125dfd8ec465a8aaf3dee468
|
[
"Apache-2.0"
] | 69
|
2020-10-23T02:15:36.000Z
|
2022-03-31T00:03:18.000Z
|
tests/lightgbm/model-server/serve.py
|
basisai/bedrock-express
|
273b6377f080e1f6125dfd8ec465a8aaf3dee468
|
[
"Apache-2.0"
] | 1
|
2021-09-28T01:36:41.000Z
|
2021-09-28T01:36:41.000Z
|
import pickle
from typing import List, Optional
from bedrock_client.bedrock.model import BaseModel
class Model(BaseModel):
def __init__(self, path: Optional[str] = None):
with open(path or "/artefact/model.pkl", "rb") as f:
self.model = pickle.load(f)
def predict(self, features: List[List[float]]) -> List[float]:
return self.model.predict_proba(features)[:, 0].tolist()
| 29.428571
| 66
| 0.679612
| 56
| 412
| 4.892857
| 0.589286
| 0.065693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002994
| 0.18932
| 412
| 13
| 67
| 31.692308
| 0.817365
| 0
| 0
| 0
| 0
| 0
| 0.050971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0.111111
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
e6fd74084df825010079f5238c86ccfc9f4993cb
| 420
|
py
|
Python
|
venv/lib/python3.8/site-packages/mathpy/numerical/__init__.py
|
sonakshibhalla/sonakshicode
|
5242d1b128a6be3d184b5c64cf5f9448ccdc49be
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/mathpy/numerical/__init__.py
|
sonakshibhalla/sonakshicode
|
5242d1b128a6be3d184b5c64cf5f9448ccdc49be
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/mathpy/numerical/__init__.py
|
sonakshibhalla/sonakshicode
|
5242d1b128a6be3d184b5c64cf5f9448ccdc49be
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from .differentiation import forward_difference, backward_difference, central_difference, \
approximate_derivative_finite
from .integration import trapezoidal_rule, simpsons_rule, composite_simpsons_rule, \
composite_trapezoidal
from .polynomial import horner_eval, lagrange_interpolate, neville, divided_differences
from .roots import newtonraph, bisection, secant
| 52.5
| 92
| 0.845238
| 45
| 420
| 7.488889
| 0.644444
| 0.071217
| 0.124629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 420
| 8
| 93
| 52.5
| 0.905914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
fc09f0423cd4757c3a8df0a3a16ed0b68bde9af9
| 155
|
py
|
Python
|
objects/__init__.py
|
ionicsolutions/pyfecs
|
38bf28decc5caf6c1f94263c9788880dd9c17707
|
[
"Apache-2.0"
] | 1
|
2021-07-31T04:27:09.000Z
|
2021-07-31T04:27:09.000Z
|
objects/__init__.py
|
ionicsolutions/pyfecs
|
38bf28decc5caf6c1f94263c9788880dd9c17707
|
[
"Apache-2.0"
] | null | null | null |
objects/__init__.py
|
ionicsolutions/pyfecs
|
38bf28decc5caf6c1f94263c9788880dd9c17707
|
[
"Apache-2.0"
] | 1
|
2018-10-30T01:09:08.000Z
|
2018-10-30T01:09:08.000Z
|
"""PyFECS models a FECS sequence as a structure of instances of Python classes.
This structure can then be modified and compiled to FPGA instructions.
"""
| 38.75
| 79
| 0.787097
| 24
| 155
| 5.083333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 155
| 3
| 80
| 51.666667
| 0.938462
| 0.948387
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fc1314cdf21eebc306d3ca68c6a19aea68021097
| 5,750
|
py
|
Python
|
python/find_vsan_storage_ctrl_queue_depth.py
|
droorda/vmware-scripts
|
1bdd04ca419b874d6ebd90ba11bf44de13e886d4
|
[
"BSD-2-Clause"
] | null | null | null |
python/find_vsan_storage_ctrl_queue_depth.py
|
droorda/vmware-scripts
|
1bdd04ca419b874d6ebd90ba11bf44de13e886d4
|
[
"BSD-2-Clause"
] | null | null | null |
python/find_vsan_storage_ctrl_queue_depth.py
|
droorda/vmware-scripts
|
1bdd04ca419b874d6ebd90ba11bf44de13e886d4
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# Author: William Lam
# Website: www.williamlam.com
# Product: VMware vSphere + VSAN
# Description: This script extracts the queue depth of a VSAN Storage Controller if found in the VSAN HCL (offline list)
# Reference: http://www.williamlam.com/2014/06/community-vsan-storage-controller-queue-depth-list.html
import json
import os
from xml.etree import ElementTree as ET
show_non_vsan_hcl_ctr = False
#VSAN HCL Supported Controllers
vsan_controllers = '{"103C:323B:103c:3355": "Smart Array P220i", "1000:005B:1014:041D": "ServeRAID M5115 SAS/SATA Controller for IBM Flex System (90Y4390)", "1000:0065:1000:30C0": "SAS9201-16i", "1000:0079:8086:9261": "Intel RAID Controller RS2BL080", "1000:005B:8086:351D": "Intel RAID Controller RMS25CB080N", "1000:005B:1000:1F31": "PERC H710P Adapter", "1000:005B:1734:11E5": "SAS RAID HDD Module (D2816C )", "1000:005B:1734:11E4": "RAID Ctrl SAS 6G 1GB (D3116C)", "1000:005B:1000:9270": "MegaRAID SAS 9270-8i", "1000:005B:1000:9271": "UCS-RAID9271CV-8I", "1000:005B:1000:9272": "MegaRAID SAS 9272-8i", "1000:0072:1000:3040": "SAS9210-8i", "1000:005B:1000:9275": "MegaRAID SAS 9271-8iCC", "1000:005B:1000:9276": "MegaRAID SAS 9271-4i", "1000:0087:1000:3040": "SAS9207-4i4e", "1000:0079:8086:9276": "Intel RAID Controller RS2WG160", "103C:323B:103c:3351": "Smart Array P420", "1000:0079:1000:9268": "LSI MegaRAID SAS 9260CV-8i", "103c:323b:103c:3353": "Smart Array P822", "103C:323B:103c:3354": "Smart Array P420i", "1000:0087:8086:3060": "Intel RAID Controller RS25FB044", "1000:005B:1000:9268": "MegaRAID SAS 9265CV-8i", "1000:0087:1000:3050": "SAS9217-8i", "1000:0079:1000:9267": "LSI MegaRAID SAS 9260CV-4i", "1000:0079:1000:9264": "LSI MegaRAID SAS 9264-8i", "1000:0087:1000:3020": "SAS9207-8i", "1000:0079:1000:9263": "LSI MegaRAID SAS 9261-8i", "1000:0079:15d9:0070": "SMC2108", "1000:005B:1028:1F35": "PERC H710 Adapter", "1000:005B:15d9:0690": "SMC2208", "1000:0079:1000:9282": "LSI MegaRAID SAS 9280-4i4e", "1000:005B:1014:040B": "ServeRAID M5110 SAS/SATA Controller for IBM System x (81Y4481)", "1000:005b:1137:008d": "LSI 2208R", "1000:0070:1000:3010": "SAS9211-4i", "1000:0087:1000:3060": "SAS9217-4i4e", "1000:0079:1000:9277": "LSI MegaRAID SAS 9280-16i4e", "1000:005B:1734:11D3": "RAID Ctrl SAS 6G 1GB (D3116)", "1000:005B:1734:11D4": "SAS RAID HDD Module (D2816)", "1000:0072:1000:3050": "SAS9211-8i", "1000:005B:8086:9265": "Intel RAID Controller RS25DB080", "1000:005B:1000:9267": "MegaRAID SAS 9267-8i", "1000:005B:1000:9266": "MegaRAID SAS 9266-8i", "1000:005B:1000:9265": "MegaRAID SAS 9265-8i", "1000:0086:15d9:0691": "SMC2308", "1000:0079:8086:9290": "Intel RAID Controller RS2SG244", "1000:005B:1000:9269": "MegaRAID SAS 9266-4i", "1000:005B:8086:351C": "Intel RAID Controller RMS25PB080N", "1000:0087:8086:3518": "Intel RAID Controller RMS25KB080", "1000:0087:8086:3519": "Intel RAID Controller RMS25KB040", "1000:0072:1000:3020": "SAS9211-8i", "1000:0079:1734:1176": "RAID Crtl SAS 6G 5/6 512MB", "1000:0079:1014:03c7": "IBM ServeRAID-M5014 SAS/SATA Controller", "1000:0079:8086:350B": "Intel RAID Controller RMS2MH080", "1000:0079:1734:11B3": "PY SAS RAID Mezz Card 6Gb", "1000:0079:1000:9276": "LSI MegaRAID SAS 9260-16i", "1000:0087:8087:3516": "Intel RAID Controller RMS25JB080", "1000:0079:1000:9261": "LSI MegaRAID SAS 9260-8i", "1000:0079:1014:03b3": "ServeRAID M5025 SAS/SATA Controller (46M0830)", "1000:0079:1014:0411": "ServeRAID M5016 SAS/SATA Controller for IBM System x (90Y4304)", "1000:005b:1000:9273": "MegaRAID SAS 9270CV-8i", "1000:0087:8086:3517": "Intel RAID Controller RMS25JB040", "1000:0079:1000:9262": "LSI MegaRAID SAS 9262-8i", "1000:005B:8086:3514": "Intel RAID Controller RMS25CB040", "1000:005B:8086:3515": "Intel RAID Controller RMS25CB080", "1000:0072:1028:1F1D": "Dell PERC H200 Adapter", "1000:0079:1000:9290": "LSI MegaRAID SAS 9280-24i4e", "1000:005B:8086:3510": "Intel RAID Controller RMS25PB080", "1000:0072:1000:3060": "SAS9212-4i4e"}'
json_data = json.loads(vsan_controllers)
#run esxcfg-info -s -F xml and store output to /tmp/esxcfginfo.xml
os.system("esxcfg-info -s -F xml > /tmp/esxcfginfo.xml")
# Load up XML output
root = ET.parse("/tmp/esxcfginfo.xml").getroot()
# SCSI Adatpers root starts at 'all-scsi-iface'
for allscsiadapters in root.findall('all-scsi-iface'):
for allscsiadapter in allscsiadapters:
scsiinterfaces = allscsiadapter.find('scsi-interface')
for scsiinterface in scsiinterfaces:
if scsiinterface.get('name') == 'queue-depth':
queue_depth = scsiinterface.text
pcidevices = scsiinterfaces.find('pci-device')
if pcidevices != None:
for pcidevice in pcidevices:
if pcidevice.get('name') == 'vendor-id':
vendor_id = pcidevice.text
if pcidevice.get('name') == 'device-id':
device_id = pcidevice.text
if pcidevice.get('name') == 'sub-vendor-id':
sub_vendor_id = pcidevice.text
if pcidevice.get('name') == 'sub-device-id':
sub_device_id = pcidevice.text
if pcidevice.get('name') == 'vendor-name':
vendor_name = pcidevice.text
if pcidevice.get('name') == 'device-name':
device_name = pcidevice.text
# used for non-VSAN HCL storage controllers
adapter = vendor_name + " " + device_name
custom_pci_id = (vendor_id + ":" + device_id + ":" + sub_vendor_id + ":" + sub_device_id).replace('0x','')
if custom_pci_id in json_data:
print "VSAN HCL: Yes"
print "Adapter: " + json_data[custom_pci_id]
print "Identifier: " + custom_pci_id
print "QueueDepth: " + queue_depth + "\n"
if show_non_vsan_hcl_ctr:
print "VSAN HCL: No"
print "Adapter: " + adapter
print "Identifier: " + custom_pci_id
print "QueueDepth: " + queue_depth + "\n"
| 95.833333
| 3,503
| 0.706957
| 855
| 5,750
| 4.705263
| 0.311111
| 0.053691
| 0.070843
| 0.026846
| 0.12876
| 0.091723
| 0.091723
| 0.06612
| 0.025851
| 0.025851
| 0
| 0.275195
| 0.128522
| 5,750
| 59
| 3,504
| 97.457627
| 0.527639
| 0.090609
| 0
| 0.097561
| 0
| 0.02439
| 0.725949
| 0.286892
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.073171
| null | null | 0.195122
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fc38b2952fbf119278b23ba53a38c4e21b95fc1b
| 1,263
|
py
|
Python
|
Tests/test_statistics.py
|
jimishapatel/statistic
|
73430aeb5c11ae2683047bab16dc976cc7b3e403
|
[
"MIT"
] | null | null | null |
Tests/test_statistics.py
|
jimishapatel/statistic
|
73430aeb5c11ae2683047bab16dc976cc7b3e403
|
[
"MIT"
] | null | null | null |
Tests/test_statistics.py
|
jimishapatel/statistic
|
73430aeb5c11ae2683047bab16dc976cc7b3e403
|
[
"MIT"
] | 1
|
2019-12-22T07:27:47.000Z
|
2019-12-22T07:27:47.000Z
|
import unittest
from Statistics.statistics import Statistics
class MyTestCase(unittest.TestCase):
def setUp(self) -> None:
self.statistics = Statistics('Tests/Data/statistics.csv')
def test_instantiate_calculator(self):
self.assertIsInstance(self.statistics, Statistics)
def test_decorator_calculator(self):
self.assertIsInstance(self.statistics, Statistics)
def test_mean(self):
self.assertEqual(self.statistics.mean(), 0.25)
def test_median(self):
self.assertEqual(self.statistics.median(), 4)
def test_mode(self):
self.assertEqual(self.statistics.mod(), 1)
def test_popstand(self):
self.assertEqual(round(self.statistics.popstand(), 4), 0.2233)
def test_vpop(self):
self.assertEqual(self.statistics.vpop(), 0.0498442)
def test_confidence(self):
self.assertEqual(self.statistics.confidence(), 0.2232581)
def test_popuvar(self):
self.assertEqual(self.statistics.confidence(), 0.2232581)
def test_samplestand(self):
self.assertEqual(self.statistics.confidence(), 0.2232581)
def test_zscore(self):
self.assertEqual(self.statistics.confidence(), 0.2232581)
if __name__ == '__main__':
unittest.main()
| 29.372093
| 70
| 0.697546
| 146
| 1,263
| 5.890411
| 0.280822
| 0.195349
| 0.198837
| 0.213953
| 0.566279
| 0.412791
| 0.412791
| 0.412791
| 0.353488
| 0.202326
| 0
| 0.049371
| 0.182106
| 1,263
| 43
| 71
| 29.372093
| 0.783156
| 0
| 0
| 0.206897
| 0
| 0
| 0.026108
| 0.019778
| 0
| 0
| 0
| 0
| 0.37931
| 1
| 0.413793
| false
| 0
| 0.068966
| 0
| 0.517241
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
fc4ef406430e8f41589fc87b90a4fe7776f6b861
| 28
|
py
|
Python
|
venv-lib/lib/python3.7/enum.py
|
migmaciasdiaz/venvs
|
bcdbb75931cb27fc4b5b30f12fc44be85952157e
|
[
"MIT"
] | 2
|
2020-03-30T14:17:10.000Z
|
2020-10-04T12:33:00.000Z
|
venv-lib/lib/python3.7/enum.py
|
migmaciasdiaz/venvs
|
bcdbb75931cb27fc4b5b30f12fc44be85952157e
|
[
"MIT"
] | 1
|
2020-11-24T03:31:13.000Z
|
2020-11-24T03:31:13.000Z
|
venv/lib/python3.7/enum.py
|
wensu425/aws-eb-webapp
|
4b149c75c11fe5b33c9a080313ec336fabb45824
|
[
"MIT"
] | 1
|
2021-05-04T09:18:22.000Z
|
2021-05-04T09:18:22.000Z
|
/usr/lib64/python3.7/enum.py
| 28
| 28
| 0.785714
| 6
| 28
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 28
| 1
| 28
| 28
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fc59ea037d6526648755b25f4fb9aba7764b26c7
| 344
|
py
|
Python
|
src/network/bo/messages/connection_accepted.py
|
TimmMoetz/blockchain-lab
|
02bb55cc201586dbdc8fdc252a32381f525e83ff
|
[
"RSA-MD"
] | 2
|
2021-11-08T12:00:02.000Z
|
2021-11-12T18:37:52.000Z
|
src/network/bo/messages/connection_accepted.py
|
TimmMoetz/blockchain-lab
|
02bb55cc201586dbdc8fdc252a32381f525e83ff
|
[
"RSA-MD"
] | null | null | null |
src/network/bo/messages/connection_accepted.py
|
TimmMoetz/blockchain-lab
|
02bb55cc201586dbdc8fdc252a32381f525e83ff
|
[
"RSA-MD"
] | 1
|
2022-03-28T13:49:37.000Z
|
2022-03-28T13:49:37.000Z
|
from .message import Message
class Connection_accepted(Message):
def __init__(self) -> None:
super().__init__()
self._name = "connection-accepted"
def to_dict(self):
return {
"name": self.get_name()
}
@staticmethod
def from_dict(dict):
return Connection_accepted()
| 20.235294
| 42
| 0.590116
| 35
| 344
| 5.4
| 0.485714
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.305233
| 344
| 17
| 43
| 20.235294
| 0.790795
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.083333
| 0.166667
| 0.583333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
5d826d4c386cd459637ce9e23eb52cc44c3880f1
| 121
|
py
|
Python
|
tests/cases/test_indexing.py
|
MiguelMarcelino/py2many
|
9b040b2a157e265df9c053eaf3e5cd644d3e30d0
|
[
"MIT"
] | 2
|
2022-02-02T11:37:53.000Z
|
2022-03-30T18:19:06.000Z
|
tests/cases/test_indexing.py
|
MiguelMarcelino/py2many
|
9b040b2a157e265df9c053eaf3e5cd644d3e30d0
|
[
"MIT"
] | 25
|
2022-02-28T21:19:11.000Z
|
2022-03-23T21:26:20.000Z
|
tests/cases/test_indexing.py
|
MiguelMarcelino/py2many
|
9b040b2a157e265df9c053eaf3e5cd644d3e30d0
|
[
"MIT"
] | null | null | null |
if __name__ == "__main__":
a = [1,2,3]
i = -1
print(a[-1])
for i in range(-1,-4,-1):
print(a[i])
| 17.285714
| 29
| 0.429752
| 22
| 121
| 2
| 0.590909
| 0.090909
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 0.330579
| 121
| 7
| 30
| 17.285714
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5d852685fb3ae99a9448f0ad9bc94151a4a37757
| 264
|
py
|
Python
|
abc032_d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
abc032_d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
abc032_d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
N, W = map(int, input().split())
vw = [list(map(int, input().split())) for _ in range(N)]
dp = [[0] * W for _ in range(N)]
for i in range(N):
for w in range(W):
if w >= vw[i][1]:
dp[i + 1][w] = max(dp[i][w - vw[i][1]] + vw[i][0], dp[i][w])
| 33
| 72
| 0.465909
| 55
| 264
| 2.2
| 0.309091
| 0.231405
| 0.198347
| 0.264463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02551
| 0.257576
| 264
| 7
| 73
| 37.714286
| 0.591837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5d8c423482f860eb783401948ef4ef79bec03dc3
| 224
|
py
|
Python
|
djangosaml2/apps.py
|
mz-techops/banhammer
|
02476db3d2bb617dbe50827687065fbea7553caf
|
[
"BSD-3-Clause"
] | 3
|
2018-03-09T23:29:25.000Z
|
2020-11-25T15:34:13.000Z
|
djangosaml2/apps.py
|
whyallyn/banhammer
|
59fc81b15d9950a7a40279a9d1df8101c58df569
|
[
"BSD-3-Clause"
] | 3
|
2018-05-08T01:10:43.000Z
|
2021-03-19T21:56:36.000Z
|
djangosaml2/apps.py
|
whyallyn/banhammer
|
59fc81b15d9950a7a40279a9d1df8101c58df569
|
[
"BSD-3-Clause"
] | 2
|
2018-05-10T15:07:24.000Z
|
2018-06-20T16:24:00.000Z
|
"""Register Djangosaml2 as Django app."""
from __future__ import unicode_literals
from django.apps import AppConfig
class Djangosaml2Config(AppConfig):
"""BanHammer Djangosaml2 Django app."""
name = 'djangosaml2'
| 22.4
| 43
| 0.758929
| 24
| 224
| 6.875
| 0.666667
| 0.109091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020942
| 0.147321
| 224
| 9
| 44
| 24.888889
| 0.842932
| 0.308036
| 0
| 0
| 0
| 0
| 0.076389
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
5da9aedf0ea72f033604e2d2cea3c170efe10297
| 124
|
py
|
Python
|
alignments/forms.py
|
LDWLab/DESIRE
|
fda3e10c6fdfe5ce47c151d506239dbc9f65f2f0
|
[
"MIT"
] | null | null | null |
alignments/forms.py
|
LDWLab/DESIRE
|
fda3e10c6fdfe5ce47c151d506239dbc9f65f2f0
|
[
"MIT"
] | null | null | null |
alignments/forms.py
|
LDWLab/DESIRE
|
fda3e10c6fdfe5ce47c151d506239dbc9f65f2f0
|
[
"MIT"
] | null | null | null |
from django import forms
class ParalogForm(forms.Form):
your_name = forms.CharField(label='Your name', max_length=100)
| 24.8
| 66
| 0.766129
| 18
| 124
| 5.166667
| 0.777778
| 0.172043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.129032
| 124
| 4
| 67
| 31
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.072581
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
5dad52037c697b1dec7a92cbb77cab50e0aa001c
| 177
|
py
|
Python
|
setup.py
|
thomascbrs/mpc-tsid
|
a20da07fd285a628c6dd32afd76075e3963bf005
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
thomascbrs/mpc-tsid
|
a20da07fd285a628c6dd32afd76075e3963bf005
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
thomascbrs/mpc-tsid
|
a20da07fd285a628c6dd32afd76075e3963bf005
|
[
"BSD-2-Clause"
] | 1
|
2021-06-30T06:31:26.000Z
|
2021-06-30T06:31:26.000Z
|
from distutils.core import setup
from Cython.Build import cythonize
modules = ["FootTrajectoryGenerator.py"]
setup(name='MPC TSID app',
ext_modules=cythonize(modules))
| 19.666667
| 40
| 0.768362
| 22
| 177
| 6.136364
| 0.727273
| 0.237037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 177
| 8
| 41
| 22.125
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0.214689
| 0.146893
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
5db24472973d7bd82732f63f526e4a3fbf0abe5c
| 96
|
py
|
Python
|
code/arc020_1_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/arc020_1_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/arc020_1_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
A,B=map(int,input().split())
A,B=abs(A),abs(B)
print("Ant" if A<B else "Bug" if A>B else "Draw")
| 32
| 49
| 0.614583
| 24
| 96
| 2.458333
| 0.541667
| 0.135593
| 0.135593
| 0.271186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 96
| 3
| 49
| 32
| 0.686047
| 0
| 0
| 0
| 0
| 0
| 0.103093
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5dde527c31104a93d7ee67e853c6a768a778e966
| 291
|
py
|
Python
|
jupyter/jupyter_config.py
|
eleflow/estudo_cartola
|
ce1d3d7cac4be4cdda9afcd1b826d3cb506051c2
|
[
"Apache-2.0"
] | 1
|
2022-01-25T21:40:00.000Z
|
2022-01-25T21:40:00.000Z
|
jupyter/jupyter_config.py
|
eleflow/estudo_cartola
|
ce1d3d7cac4be4cdda9afcd1b826d3cb506051c2
|
[
"Apache-2.0"
] | null | null | null |
jupyter/jupyter_config.py
|
eleflow/estudo_cartola
|
ce1d3d7cac4be4cdda9afcd1b826d3cb506051c2
|
[
"Apache-2.0"
] | null | null | null |
# Kernel config
c.IPKernelApp.pylab = 'inline' # if you want plotting support always in your notebook
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.NotebookApp.token = ''
c.NotebookApp.password = u''
c.notebookApp.open_browser = True
| 32.333333
| 86
| 0.752577
| 42
| 291
| 5.142857
| 0.642857
| 0.333333
| 0.148148
| 0.212963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015748
| 0.127148
| 291
| 9
| 87
| 32.333333
| 0.834646
| 0.226804
| 0
| 0
| 0
| 0
| 0.03139
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
5de372e7bf5ec6726b3342d073bea54dd533dd1b
| 353
|
py
|
Python
|
slixmpp/plugins/xep_0422/__init__.py
|
anirudhrata/slixmpp
|
1fcee0e80a212eeb274d2f560e69099d8a61bf7f
|
[
"BSD-3-Clause"
] | 86
|
2016-07-04T13:26:02.000Z
|
2022-02-19T10:26:21.000Z
|
slixmpp/plugins/xep_0422/__init__.py
|
anirudhrata/slixmpp
|
1fcee0e80a212eeb274d2f560e69099d8a61bf7f
|
[
"BSD-3-Clause"
] | 10
|
2016-09-30T18:55:41.000Z
|
2020-05-01T14:22:47.000Z
|
slixmpp/plugins/xep_0422/__init__.py
|
anirudhrata/slixmpp
|
1fcee0e80a212eeb274d2f560e69099d8a61bf7f
|
[
"BSD-3-Clause"
] | 45
|
2016-09-30T18:48:41.000Z
|
2022-03-18T21:39:33.000Z
|
# Slixmpp: The Slick XMPP Library
# Copyright (C) 2020 Mathieu Pasquet <mathieui@mathieui.net>
# This file is part of Slixmpp.
# See the file LICENSE for copying permission.
from slixmpp.plugins.base import register_plugin
from slixmpp.plugins.xep_0422.stanza import *
from slixmpp.plugins.xep_0422.fastening import XEP_0422
register_plugin(XEP_0422)
| 32.090909
| 60
| 0.810198
| 53
| 353
| 5.283019
| 0.603774
| 0.1
| 0.192857
| 0.15
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0.121813
| 353
| 10
| 61
| 35.3
| 0.83871
| 0.467422
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
5defa2a16accaa03aefed3c4e8c56728f8986a20
| 616
|
py
|
Python
|
tests/fixtures/python3_marshmallow/string_property_default.py
|
expobrain/json-schema-codegen
|
e22b386333c6230e5d6f5984fd947fdd7b947e82
|
[
"MIT"
] | 21
|
2018-06-15T16:08:57.000Z
|
2022-02-11T16:16:11.000Z
|
tests/fixtures/python3_marshmallow/string_property_default.py
|
expobrain/json-schema-codegen
|
e22b386333c6230e5d6f5984fd947fdd7b947e82
|
[
"MIT"
] | 14
|
2018-08-09T18:02:19.000Z
|
2022-01-24T18:04:17.000Z
|
tests/fixtures/python3_marshmallow/string_property_default.py
|
expobrain/json-schema-codegen
|
e22b386333c6230e5d6f5984fd947fdd7b947e82
|
[
"MIT"
] | 4
|
2018-11-30T18:19:10.000Z
|
2021-11-18T04:04:36.000Z
|
from marshmallow import Schema, fields as fields_, post_load
from typing import Optional, List, Any
class TestSchema(Schema):
x = fields_.String(default="42")
@post_load
def make_test(self, test):
return Test(test)
class Test:
def __init__(self, test: dict):
self.x: str = test.get("x", "42")
def to_json(self):
return TestSchema(strict=True).dumps(self).data
def to_dict(self):
return TestSchema(strict=True).dump(self).data
@staticmethod
def from_json(json: str, only=None):
return TestSchema(strict=True, only=only).loads(json).data
| 23.692308
| 66
| 0.665584
| 86
| 616
| 4.627907
| 0.44186
| 0.120603
| 0.165829
| 0.19598
| 0.150754
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.214286
| 616
| 25
| 67
| 24.64
| 0.81405
| 0
| 0
| 0
| 0
| 0
| 0.008117
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0
| 0.117647
| 0.235294
| 0.823529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
5df61c4fd0bc46260132ad4d0503d0c1e21aabe5
| 146
|
py
|
Python
|
demo.py
|
Sjord/whey
|
f06477f32820679bc9e860d77a0f1d46c7adade4
|
[
"MIT"
] | 16
|
2021-03-11T06:32:46.000Z
|
2022-02-04T22:31:17.000Z
|
demo.py
|
Sjord/whey
|
f06477f32820679bc9e860d77a0f1d46c7adade4
|
[
"MIT"
] | 24
|
2021-02-22T21:07:28.000Z
|
2022-03-19T03:11:48.000Z
|
demo.py
|
Sjord/whey
|
f06477f32820679bc9e860d77a0f1d46c7adade4
|
[
"MIT"
] | 2
|
2021-08-08T17:34:39.000Z
|
2021-09-21T08:53:23.000Z
|
# stdlib
from pprint import pprint
# this package
from whey.config import load_toml
config = load_toml("example_pyproject.toml")
pprint(config)
| 16.222222
| 44
| 0.794521
| 21
| 146
| 5.380952
| 0.571429
| 0.141593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130137
| 146
| 8
| 45
| 18.25
| 0.889764
| 0.130137
| 0
| 0
| 0
| 0
| 0.177419
| 0.177419
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 4
|
5d1432ad4d14446fda8f8193c6c0c3dc34fb1ae3
| 103
|
py
|
Python
|
src/jazzit/settings.py
|
gtaylor/jazzit
|
52256d5d9a477dc95da5c41daaf31020a662cb90
|
[
"Apache-2.0"
] | null | null | null |
src/jazzit/settings.py
|
gtaylor/jazzit
|
52256d5d9a477dc95da5c41daaf31020a662cb90
|
[
"Apache-2.0"
] | null | null | null |
src/jazzit/settings.py
|
gtaylor/jazzit
|
52256d5d9a477dc95da5c41daaf31020a662cb90
|
[
"Apache-2.0"
] | null | null | null |
import os
os.environ["PYGAME_HIDE_SUPPORT_PROMPT"] = "hide"
current_dir, _ = os.path.split(__file__)
| 17.166667
| 49
| 0.757282
| 15
| 103
| 4.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106796
| 103
| 5
| 50
| 20.6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.291262
| 0.252427
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
5d1785f4be7d5bb3de775b36a61eb4856ff1615c
| 34
|
py
|
Python
|
data/studio21_generated/introductory/4083/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/4083/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/4083/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
def performant_smallest(arr, n):
| 17
| 32
| 0.764706
| 5
| 34
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 2
| 33
| 17
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5d1ad2ffc5e8705faf35b91223fca80efa0cf3ef
| 450
|
py
|
Python
|
pettingzoo/utils/__init__.py
|
mlanas/PettingZoo
|
58d47c68057bdf37720f961c1a372b4671b8b777
|
[
"Apache-2.0"
] | 1
|
2021-09-13T17:47:48.000Z
|
2021-09-13T17:47:48.000Z
|
pettingzoo/utils/__init__.py
|
mlanas/PettingZoo
|
58d47c68057bdf37720f961c1a372b4671b8b777
|
[
"Apache-2.0"
] | null | null | null |
pettingzoo/utils/__init__.py
|
mlanas/PettingZoo
|
58d47c68057bdf37720f961c1a372b4671b8b777
|
[
"Apache-2.0"
] | null | null | null |
from .agent_selector import agent_selector
from .average_total_reward import average_total_reward
from .conversions import from_parallel, to_parallel
from .env import AECEnv, ParallelEnv
from .random_demo import random_demo
from .save_observation import save_observation
from .wrappers import (AssertOutOfBoundsWrapper, BaseWrapper, CaptureStdoutWrapper,
ClipOutOfBoundsWrapper, OrderEnforcingWrapper, TerminateIllegalWrapper)
| 50
| 94
| 0.835556
| 47
| 450
| 7.744681
| 0.510638
| 0.071429
| 0.098901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128889
| 450
| 8
| 95
| 56.25
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0
| true
| 0
| 0.875
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
5d1bd4e074bd20cc1ae5c7735f06f86bc04863ec
| 447
|
py
|
Python
|
perilib/hal/__init__.py
|
perilib/perilib-python-core
|
a67de15ab715186f8072782f09ac0acff76a2955
|
[
"MIT"
] | 2
|
2020-06-11T16:30:35.000Z
|
2021-12-03T04:23:43.000Z
|
perilib/hal/__init__.py
|
perilib/perilib-python-hal
|
a67de15ab715186f8072782f09ac0acff76a2955
|
[
"MIT"
] | null | null | null |
perilib/hal/__init__.py
|
perilib/perilib-python-hal
|
a67de15ab715186f8072782f09ac0acff76a2955
|
[
"MIT"
] | null | null | null |
"""
This module provides the lowest-level framework for defining protocols and
packets, including data type definitions that all protocols inherit.
Submodules include extended classes for streaming protocols such as what you
typically need for devices that communicate over UART or USB CDC (virtual
serial).
TODO: Support for register-based protocols used by many I2C slaves
"""
# .py files
from .UartManager import *
from .UartStream import *
| 29.8
| 76
| 0.800895
| 63
| 447
| 5.68254
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002646
| 0.154362
| 447
| 14
| 77
| 31.928571
| 0.944444
| 0.856823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
5d44a1ff6af7b25ca22ad63b4c87af03a52bdfdb
| 250
|
py
|
Python
|
tests/test_byUsername.py
|
crazygmr101/TikTok-Api
|
32734422fecd9c3fb4eadad451d050fe1ab22e2a
|
[
"MIT"
] | 9
|
2020-08-27T18:35:22.000Z
|
2022-01-18T20:19:22.000Z
|
tests/test_byUsername.py
|
crazygmr101/TikTok-Api
|
32734422fecd9c3fb4eadad451d050fe1ab22e2a
|
[
"MIT"
] | null | null | null |
tests/test_byUsername.py
|
crazygmr101/TikTok-Api
|
32734422fecd9c3fb4eadad451d050fe1ab22e2a
|
[
"MIT"
] | 1
|
2021-06-11T06:48:33.000Z
|
2021-06-11T06:48:33.000Z
|
from TikTokApi import TikTokApi
def test_trending():
api = TikTokApi()
assert abs(len(api.byUsername('therock', 5))-5) <= 2
assert abs(len(api.byUsername('therock', 10))-10) <= 2
assert abs(len(api.byUsername('therock', 20))-20) <= 2
| 35.714286
| 58
| 0.66
| 36
| 250
| 4.555556
| 0.444444
| 0.164634
| 0.219512
| 0.27439
| 0.597561
| 0.597561
| 0.402439
| 0
| 0
| 0
| 0
| 0.061905
| 0.16
| 250
| 7
| 59
| 35.714286
| 0.719048
| 0
| 0
| 0
| 0
| 0
| 0.083665
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
536f7aa75d0bc8fe5b540ba6765621f8996993dc
| 1,008
|
py
|
Python
|
website/website.py
|
alomk/FSDD
|
071b6654298699aa340575cdd63ed24514ecdb31
|
[
"MIT"
] | null | null | null |
website/website.py
|
alomk/FSDD
|
071b6654298699aa340575cdd63ed24514ecdb31
|
[
"MIT"
] | null | null | null |
website/website.py
|
alomk/FSDD
|
071b6654298699aa340575cdd63ed24514ecdb31
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_login import LoginManager
app = Flask(__name__)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = "users.login"
login_manager.login_message = u"hello"
@login_manager.user_loader
def load_user(user_id):
return User.get(user_id)
@app.route("/")
@app.route("/index")
@app.route("/logout")
def index():
return render_template('index.html')
@app.route("/search", methods=['GET', 'POST'])
def search():
return render_template('search.html')
@app.route("/fresh")
def fresh():
return render_template('fresh.html')
@app.route("/expire")
def deliver():
return render_template('expire.html')
@app.route("/location")
def stock():
return render_template('location.html')
@app.route('/login', methods=['GET', 'POST'])
def login():
return render_template('login.html')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| 22.4
| 46
| 0.725198
| 138
| 1,008
| 5.094203
| 0.304348
| 0.159317
| 0.199147
| 0.048364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010078
| 0.114087
| 1,008
| 44
| 47
| 22.909091
| 0.777156
| 0
| 0
| 0
| 0
| 0
| 0.150794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.235294
| false
| 0
| 0.088235
| 0.235294
| 0.558824
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
537c0c673c75c20dace9d4cbc7c043487b8f44de
| 284
|
py
|
Python
|
django_cockroachdb_gis/introspection.py
|
mlazowik/django-cockroachdb
|
e6a01c093f6f22f3764f845f04504434cb6aaa8c
|
[
"Apache-2.0"
] | 94
|
2019-11-21T06:52:08.000Z
|
2022-01-03T14:14:40.000Z
|
django_cockroachdb_gis/introspection.py
|
mlazowik/django-cockroachdb
|
e6a01c093f6f22f3764f845f04504434cb6aaa8c
|
[
"Apache-2.0"
] | 83
|
2019-11-14T19:25:28.000Z
|
2022-03-12T17:41:57.000Z
|
django_cockroachdb_gis/introspection.py
|
mlazowik/django-cockroachdb
|
e6a01c093f6f22f3764f845f04504434cb6aaa8c
|
[
"Apache-2.0"
] | 15
|
2019-11-21T06:52:48.000Z
|
2022-02-06T02:22:05.000Z
|
from django.contrib.gis.db.backends.postgis.introspection import (
PostGISIntrospection,
)
from django_cockroachdb.introspection import (
DatabaseIntrospection as CockroachIntrospection,
)
class DatabaseIntrospection(CockroachIntrospection, PostGISIntrospection):
pass
| 23.666667
| 74
| 0.823944
| 23
| 284
| 10.130435
| 0.695652
| 0.085837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116197
| 284
| 11
| 75
| 25.818182
| 0.928287
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.125
| 0.25
| 0
| 0.375
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
5385cc621aaafc0c7547b40aa9945c1d37f2c4a2
| 25,600
|
py
|
Python
|
egret/model_library/transmission/tx_calc.py
|
kdheepak/Egret
|
f982247637137e098191453e682376adc1b4c2ac
|
[
"BSD-3-Clause"
] | null | null | null |
egret/model_library/transmission/tx_calc.py
|
kdheepak/Egret
|
f982247637137e098191453e682376adc1b4c2ac
|
[
"BSD-3-Clause"
] | 1
|
2019-05-23T02:56:29.000Z
|
2019-05-23T02:56:29.000Z
|
egret/model_library/transmission/tx_calc.py
|
kdheepak/Egret
|
f982247637137e098191453e682376adc1b4c2ac
|
[
"BSD-3-Clause"
] | 2
|
2019-11-18T20:18:51.000Z
|
2020-05-08T15:56:17.000Z
|
# ___________________________________________________________________________
#
# EGRET: Electrical Grid Research and Engineering Tools
# Copyright 2019 National Technology & Engineering Solutions of Sandia, LLC
# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S.
# Government retains certain rights in this software.
# This software is distributed under the Revised BSD License.
# ___________________________________________________________________________
"""
This module collects some helper functions useful for performing
different computations for transmission models
"""
import math
import numpy as np
import scipy as sp
from math import cos, sin
from egret.model_library.defn import BasePointType, ApproximationType
def calculate_conductance(branch):
rs = branch['resistance']
xs = branch['reactance']
return rs / (rs**2 + xs**2)
def calculate_susceptance(branch):
rs = branch['resistance']
xs = branch['reactance']
return -xs / (rs**2 + xs**2)
def calculate_y_matrix_from_branch(branch):
rs = branch['resistance']
xs = branch['reactance']
bs = branch['charging_susceptance']
tau = 1.0
shift = 0.0
if branch['branch_type'] == 'transformer':
tau = branch['transformer_tap_ratio']
shift = branch['transformer_phase_shift']
return calculate_y_matrix(rs, xs, bs, tau, shift)
def calculate_y_matrix(rs, xs, bc, tau, shift):
"""
Compute the y matrix from various branch properties
Parameters
----------
rs : float
Branch resistance
xs : float
Branch reactance
bc : float
Branch charging susceptance
tau : float
Branch transformer tap ratio
shift : float
Branch transformer phase shift
Returns
-------
list : list of floats representing the y matrix
[Y(ifr,vfr), Y(ifr,vfj), Y(ifr,vtr), Y(ifr,vtj),
Y(ifj,vfr), Y(ifj,vfj), Y(ifj,vtr), Y(ifj,vtj),
Y(itr,vfr), Y(itr,vfj), Y(itr,vtr), Y(itr,vtj),
Y(itj,vfr), Y(itj,vfj), Y(itj,vtr), Y(itj,vtj)]
"""
bc = bc/2
tr = tau * math.cos(math.radians(shift))
tj = tau * math.sin(math.radians(shift))
mag = rs**2 + xs**2
a = rs/(tau**2*mag) # c1
b = (1/tau**2) * (xs/mag - bc) # c2
c = (-rs*tr - xs*tj)/(tau**2 * mag) # c3
d = (rs*tj - xs*tr)/(tau**2 * mag) # c4
e = -b # -c2
f = a # c1
g = -d # -c4
h = c # c3
i = (xs*tj - rs*tr)/(tau**2 * mag) # c7
j = (-rs*tj - xs*tr)/(tau**2 * mag) # c8
k = rs/mag # c5
l = xs/mag - bc # c6
m = -j # -c8
n = i # c7
o = -l # -c6
p = k # c5
# y = [a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p]
y_dict = {}
y_dict[('ifr', 'vfr')] = a
y_dict[('ifr', 'vfj')] = b
y_dict[('ifr', 'vtr')] = c
y_dict[('ifr', 'vtj')] = d
y_dict[('ifj', 'vfr')] = e
y_dict[('ifj', 'vfj')] = f
y_dict[('ifj', 'vtr')] = g
y_dict[('ifj', 'vtj')] = h
y_dict[('itr', 'vfr')] = i
y_dict[('itr', 'vfj')] = j
y_dict[('itr', 'vtr')] = k
y_dict[('itr', 'vtj')] = l
y_dict[('itj', 'vfr')] = m
y_dict[('itj', 'vfj')] = n
y_dict[('itj', 'vtr')] = o
y_dict[('itj', 'vtj')] = p
return y_dict
def calculate_ifr(vfr, vfj, vtr, vtj, y_matrix):
"""
Compute ifr from voltages and the y_matrix (computed
from the branch properties using :py:meth:`calculate_branch_y_matrix`)
"""
ifr = y_matrix['ifr', 'vfr'] * vfr + y_matrix['ifr', 'vfj'] * vfj + \
y_matrix['ifr', 'vtr'] * vtr + y_matrix['ifr', 'vtj'] * vtj
return ifr
def calculate_ifj(vfr, vfj, vtr, vtj, y_matrix):
"""
Compute ify from voltages and the y_matrix (computed
from the branch properties using :py:meth:`calculate_branch_y_matrix`)
"""
ifj = y_matrix['ifj', 'vfr'] * vfr + y_matrix['ifj', 'vfj'] * vfj + \
y_matrix['ifj', 'vtr'] * vtr + y_matrix['ifj', 'vtj'] * vtj
return ifj
def calculate_itr(vfr, vfj, vtr, vtj, y_matrix):
"""
Compute itr from voltages and the y_matrix (computed
from the branch properties using :py:meth:`calculate_branch_y_matrix`)
"""
itr = y_matrix['itr', 'vfr'] * vfr + y_matrix['itr', 'vfj'] * vfj + \
y_matrix['itr', 'vtr'] * vtr + y_matrix['itr', 'vtj'] * vtj
return itr
def calculate_itj(vfr, vfj, vtr, vtj, y_matrix):
"""
Compute itj from voltages and the y_matrix (computed
from the branch properties using :py:meth:`calculate_branch_y_matrix`)
"""
itj = y_matrix['itj', 'vfr'] * vfr + y_matrix['itj', 'vfj'] * vfj + \
y_matrix['itj', 'vtr'] * vtr + y_matrix['itj', 'vtj'] * vtj
return itj
def calculate_ir(p, q, vr, vj):
"""
Compute ir from power flows and voltages
"""
ir = (q*vj+p*vr)/(vj**2 + vr**2)
return ir
def calculate_ij(p, q, vr, vj):
"""
Compute ij from power flows and voltages
"""
ij = (p*vj-q*vr)/(vj**2 + vr**2)
return ij
def calculate_p(ir, ij, vr, vj):
"""
Compute real power flow from currents and voltages
"""
p = vr * ir + vj * ij
return p
def calculate_q(ir, ij, vr, vj):
"""
Compute reactive power flow from currents and voltages
"""
q = vj * ir - vr * ij
return q
def calculate_vr_from_vm_va(vm, va):
"""
Compute the value of vr from vm and va
Parameters
----------
vm : float
The value of voltage magnitude (per)
va : float
The value of voltage angle (degrees)
Returns
-------
float : the value of vr or None if
either vm or va (or both) is None
"""
if vm is not None and va is not None:
vr = vm * math.cos(va*math.pi/180)
return vr
return None
def calculate_vj_from_vm_va(vm, va):
"""
Compute the value of vj from vm and va
Parameters
----------
vm : float
The value of voltage magnitude (per)
va : float
The value of voltage angle (degrees)
Returns
-------
float : the value of vj or None if
either vm or va (or both) is None
"""
if vm is not None and va is not None:
vj = vm * math.sin(va*math.pi/180)
return vj
return None
def calculate_vm_from_vj_vr(vj,vr):
"""
Compute the value of vm from vj and vr
Parameters
----------
vj : float
The value of the imaginary part of the voltage phasor (per)
vr : float
The value of the real part of the voltage phasor (per)
Returns
-------
float : the value of the voltage magnitude vm or None if
either vj or vr (or both) is None
"""
if vj is not None and vr is not None:
vm = math.sqrt(vj**2 + vr**2)
return vm
return None
def calculate_va_from_vj_vr(vj, vr):
"""
Compute the value of va from vj and vr
Parameters
----------
vj : float
The value of the imaginary part of the voltage phasor (per)
vr : float
The value of the real part of the voltage phasor (per)
Returns
-------
float : the value of the voltage angle va in degrees or None if
either vj or vr (or both) is None
"""
if vj is not None and vr is not None:
va = math.degrees(math.atan(vj/vr))
return va
return None
def _calculate_J11(branches,buses,index_set_branch,index_set_bus,mapping_bus_to_idx,base_point=BasePointType.FLATSTART,approximation_type=ApproximationType.PTDF):
"""
Compute the power flow Jacobian for partial derivative of real power flow to voltage angle
"""
_len_bus = len(index_set_bus)
_len_branch = len(index_set_branch)
data = []
row = []
col = []
for idx_row, branch_name in enumerate(index_set_branch):
branch = branches[branch_name]
from_bus = branch['from_bus']
to_bus = branch['to_bus']
tau = 1.0
if branch['branch_type'] == 'transformer':
tau = branch['transformer_tap_ratio']
if approximation_type == ApproximationType.PTDF:
x = branch['reactance']
b = -1/(tau*x)
elif approximation_type == ApproximationType.PTDF_LOSSES:
b = calculate_susceptance(branch)/tau
if base_point == BasePointType.FLATSTART:
vn = 1.
vm = 1.
tn = 0.
tm = 0.
elif base_point == BasePointType.SOLUTION: # TODO: check that we are loading the correct values (or results)
vn = buses[from_bus]['vm']
vm = buses[to_bus]['vm']
tn = buses[from_bus]['va']
tm = buses[to_bus]['va']
val = -b * vn * vm * cos(tn - tm)
idx_col = mapping_bus_to_idx[from_bus]
row.append(idx_row)
col.append(idx_col)
data.append(val)
idx_col = mapping_bus_to_idx[to_bus]
row.append(idx_row)
col.append(idx_col)
data.append(-val)
J11 = sp.sparse.coo_matrix( (data, (row,col)), shape=(_len_branch, _len_bus))
return J11.tocsr()
def _calculate_L11(branches,buses,index_set_branch,index_set_bus,mapping_bus_to_idx,base_point=BasePointType.FLATSTART):
"""
Compute the power flow Jacobian for partial derivative of real power losses to voltage angle
"""
_len_bus = len(index_set_bus)
_len_branch = len(index_set_branch)
row = []
col = []
data = []
for idx_row, branch_name in enumerate(index_set_branch):
branch = branches[branch_name]
from_bus = branch['from_bus']
to_bus = branch['to_bus']
tau = 1.0
if branch['branch_type'] == 'transformer':
tau = branch['transformer_tap_ratio']
g = calculate_conductance(branch)/tau
if base_point == BasePointType.FLATSTART:
vn = 1.
vm = 1.
tn = 0.
tm = 0.
elif base_point == BasePointType.SOLUTION: # TODO: check that we are loading the correct values (or results)
vn = buses[from_bus]['vm']
vm = buses[to_bus]['vm']
tn = buses[from_bus]['va']
tm = buses[to_bus]['va']
val = 2 * g * vn * vm * sin(tn - tm)
idx_col = mapping_bus_to_idx[from_bus]
row.append(idx_row)
col.append(idx_col)
data.append(val)
idx_col = mapping_bus_to_idx[to_bus]
row.append(idx_row)
col.append(idx_col)
data.append(-val)
L11 = sp.sparse.coo_matrix((data,(row,col)),shape=(_len_branch,_len_bus))
return L11.tocsr()
def calculate_phi_constant(branches,index_set_branch,index_set_bus,approximation_type=ApproximationType.PTDF, mapping_bus_to_idx=None):
"""
Compute the phase shifter constant for fixed phase shift transformers
"""
_len_bus = len(index_set_bus)
if mapping_bus_to_idx is None:
mapping_bus_to_idx = {bus_n: i for i, bus_n in enumerate(index_set_bus)}
_len_branch = len(index_set_branch)
row_from = []
row_to = []
col = []
data = []
for idx_col, branch_name in enumerate(index_set_branch):
branch = branches[branch_name]
from_bus = branch['from_bus']
to_bus = branch['to_bus']
tau = 1.0
shift = 0.0
if branch['branch_type'] == 'transformer':
tau = branch['transformer_tap_ratio']
shift = math.radians(branch['transformer_phase_shift'])
b = 0.
if approximation_type == ApproximationType.PTDF:
x = branch['reactance']
b = -(1/x)*(shift/tau)
elif approximation_type == ApproximationType.PTDF_LOSSES:
b = calculate_susceptance(branch)*(shift/tau)
row_from.append(mapping_bus_to_idx[from_bus])
row_to.append(mapping_bus_to_idx[to_bus])
col.append(idx_col)
data.append(b)
phi_from = sp.sparse.coo_matrix((data,(row_from,col)), shape=(_len_bus,_len_branch))
phi_to = sp.sparse.coo_matrix((data,(row_to,col)), shape=(_len_bus,_len_branch))
return phi_from.tocsr(), phi_to.tocsr()
def calculate_phi_loss_constant(branches,index_set_branch,index_set_bus,approximation_type=ApproximationType.PTDF_LOSSES, mapping_bus_to_idx=None):
"""
Compute the phase shifter constant for fixed phase shift transformers
"""
_len_bus = len(index_set_bus)
if mapping_bus_to_idx is None:
mapping_bus_to_idx = {bus_n: i for i, bus_n in enumerate(index_set_bus)}
_len_branch = len(index_set_branch)
row_from = []
row_to = []
col = []
data = []
for idx_col, branch_name in enumerate(index_set_branch):
branch = branches[branch_name]
from_bus = branch['from_bus']
to_bus = branch['to_bus']
tau = 1.0
shift = 0.0
if branch['branch_type'] == 'transformer':
tau = branch['transformer_tap_ratio']
shift = math.radians(branch['transformer_phase_shift'])
g = 0.
if approximation_type == ApproximationType.PTDF:
r = branch['resistance']
g = (1/r)*(1/tau)*shift**2
elif approximation_type == ApproximationType.PTDF_LOSSES:
g = calculate_conductance(branch)*(1/tau)*shift**2
row_from.append(mapping_bus_to_idx[from_bus])
row_to.append(mapping_bus_to_idx[to_bus])
col.append(idx_col)
data.append(g)
phi_loss_from = sp.sparse.coo_matrix((data,(row_from,col)),shape=(_len_bus,_len_branch))
phi_loss_to = sp.sparse.coo_matrix((data,(row_to,col)),shape=(_len_bus,_len_branch))
return phi_loss_from.tocsr(), phi_loss_to.tocsr()
def _calculate_pf_constant(branches,buses,index_set_branch,base_point=BasePointType.FLATSTART):
"""
Compute the power flow constant for the taylor series expansion of real power flow as
a convex combination of the from/to directions, i.e.,
pf = 0.5*g*((tau*vn)^2 - vm^2) - tau*vn*vm*b*sin(tn-tm-shift)
"""
_len_branch = len(index_set_branch)
## this will be fully dense
pf_constant = np.zeros(_len_branch)
for idx_row, branch_name in enumerate(index_set_branch):
branch = branches[branch_name]
from_bus = branch['from_bus']
to_bus = branch['to_bus']
tau = 1.0
shift = 0.0
if branch['branch_type'] == 'transformer':
tau = branch['transformer_tap_ratio']
shift = math.radians(branch['transformer_phase_shift'])
g = calculate_conductance(branch)
b = calculate_susceptance(branch)/tau
if base_point == BasePointType.FLATSTART:
vn = 1.
vm = 1.
tn = 0.
tm = 0.
elif base_point == BasePointType.SOLUTION: # TODO: check that we are loading the correct values (or results)
vn = buses[from_bus]['vm']
vm = buses[to_bus]['vm']
tn = buses[from_bus]['va']
tm = buses[to_bus]['va']
pf_constant[idx_row] = 0.5 * g * ((vn/tau) ** 2 - vm ** 2) \
- b * vn * vm * (sin(tn - tm + shift) - cos(tn - tm + shift)*(tn - tm))
return pf_constant
def _calculate_pfl_constant(branches,buses,index_set_branch,base_point=BasePointType.FLATSTART):
"""
Compute the power losses constant for the taylor series expansion of real power losses as
a convex combination of the from/to directions, i.e.,
pfl = g*((tau*vn)^2 + vm^2) - 2*tau*vn*vm*g*cos(tn-tm-shift)
"""
_len_branch = len(index_set_branch)
## this will be fully dense
pfl_constant = np.zeros(_len_branch)
for idx_row, branch_name in enumerate(index_set_branch):
branch = branches[branch_name]
from_bus = branch['from_bus']
to_bus = branch['to_bus']
tau = 1.0
shift = 0.0
if branch['branch_type'] == 'transformer':
tau = branch['transformer_tap_ratio']
shift = math.radians(branch['transformer_phase_shift'])
_g = calculate_conductance(branch)
g = _g/tau
g2 = _g/tau**2
if base_point == BasePointType.FLATSTART:
vn = 1.
vm = 1.
tn = 0.
tm = 0.
elif base_point == BasePointType.SOLUTION: # TODO: check that we are loading the correct values (or results)
vn = buses[from_bus]['vm']
vm = buses[to_bus]['vm']
tn = buses[from_bus]['va']
tm = buses[to_bus]['va']
pfl_constant[idx_row] = g2 * (vn ** 2) + _g * (vm ** 2) \
- 2 * g * vn * vm * (sin(tn - tm + shift) * (tn - tm) + cos(tn - tm + shift))
return pfl_constant
def calculate_ptdf(branches,buses,index_set_branch,index_set_bus,reference_bus,base_point=BasePointType.FLATSTART,sparse_index_set_branch=None,mapping_bus_to_idx=None):
"""
Calculates the sensitivity of the voltage angle to real power injections
Parameters
----------
branches: dict{}
The dictionary of branches for the test case
buses: dict{}
The dictionary of buses for the test case
index_set_branch: list
The list of keys for branches for the test case
index_set_bus: list
The list of keys for buses for the test case
reference_bus: key value
The reference bus key value
base_point: egret.model_library_defn.BasePointType
The base-point type for calculating the PTDF matrix
sparse_index_set_branch: list
The list of keys for branches needed to compute a sparse PTDF matrix
If this is None, a dense PTDF matrix is returned
mapping_bus_to_idx: dict
A map from bus names to indices for matrix construction. If None,
will be inferred from index_set_bus.
"""
_len_bus = len(index_set_bus)
if mapping_bus_to_idx is None:
mapping_bus_to_idx = {bus_n: i for i, bus_n in enumerate(index_set_bus)}
_len_branch = len(index_set_branch)
_ref_bus_idx = mapping_bus_to_idx[reference_bus]
J = _calculate_J11(branches,buses,index_set_branch,index_set_bus,mapping_bus_to_idx,base_point,approximation_type=ApproximationType.PTDF)
A = calculate_adjacency_matrix_transpose(branches,index_set_branch,index_set_bus,mapping_bus_to_idx)
M = A@J
ref_bus_row = sp.sparse.coo_matrix(([1],([0],[_ref_bus_idx])), shape=(1,_len_bus))
ref_bus_col = sp.sparse.coo_matrix(([1],([_ref_bus_idx],[0])), shape=(_len_bus,1))
J0 = sp.sparse.bmat([[M,ref_bus_col],[ref_bus_row,0]], format='coo')
if sparse_index_set_branch is None or len(sparse_index_set_branch) == _len_branch:
## the resulting matrix after inversion will be fairly dense,
## the scipy documenation recommends using dense for the inversion
## as well
try:
SENSI = np.linalg.inv(J0.A)
except np.linalg.LinAlgError:
print("Matrix not invertible. Calculating pseudo-inverse instead.")
SENSI = np.linalg.pinv(J0.A,rcond=1e-7)
SENSI = SENSI[:-1,:-1]
PTDF = np.matmul(J.A,SENSI)
elif len(sparse_index_set_branch) < _len_branch:
B = np.array([], dtype=np.int64).reshape(_len_bus + 1,0)
_sparse_mapping_branch = {i: branch_n for i, branch_n in enumerate(index_set_branch) if branch_n in sparse_index_set_branch}
## TODO: Maybe just keep the sparse PTDFs as a dict of ndarrays?
## Right now the return type depends on the options
## passed in
for idx, branch_name in _sparse_mapping_branch.items():
b = np.zeros((_len_branch,1))
b[idx] = 1
_tmp = np.matmul(J.transpose(),b)
_tmp = np.vstack([_tmp,0])
B = np.concatenate((B,_tmp), axis=1)
row_idx = list(_sparse_mapping_branch.keys())
PTDF = sp.sparse.lil_matrix((_len_branch,_len_bus))
_ptdf = sp.sparse.linalg.spsolve(J0.transpose().tocsr(), B).T
PTDF[row_idx] = _ptdf[:,:-1]
return PTDF
def calculate_ptdf_ldf(branches,buses,index_set_branch,index_set_bus,reference_bus,base_point=BasePointType.SOLUTION,sparse_index_set_branch=None,mapping_bus_to_idx=None):
"""
Calculates the sensitivity of the voltage angle to real power injections and losses on the lines. Includes the
calculation of the constant term for the quadratic losses on the lines.
Parameters
----------
branches: dict{}
The dictionary of branches for the test case
buses: dict{}
The dictionary of buses for the test case
index_set_branch: list
The list of keys for branches for the test case
index_set_bus: list
The list of keys for buses for the test case
reference_bus: key value
The reference bus key value
base_point: egret.model_library_defn.BasePointType
The base-point type for calculating the PTDF and LDF matrix
sparse_index_set_branch: list
The list of keys for branches needed to compute a sparse PTDF matrix
mapping_bus_to_idx: dict
A map from bus names to indices for matrix construction. If None,
will be inferred from index_set_bus.
"""
_len_bus = len(index_set_bus)
if mapping_bus_to_idx is None:
mapping_bus_to_idx = {bus_n: i for i, bus_n in enumerate(index_set_bus)}
_len_branch = len(index_set_branch)
_ref_bus_idx = mapping_bus_to_idx[reference_bus]
J = _calculate_J11(branches,buses,index_set_branch,index_set_bus,mapping_bus_to_idx,base_point,approximation_type=ApproximationType.PTDF_LOSSES)
L = _calculate_L11(branches,buses,index_set_branch,index_set_bus,mapping_bus_to_idx,base_point)
Jc = _calculate_pf_constant(branches,buses,index_set_branch,base_point)
Lc = _calculate_pfl_constant(branches,buses,index_set_branch,base_point)
if np.all(Jc == 0) and np.all(Lc == 0):
return np.zeros((_len_branch, _len_bus)), np.zeros((_len_branch, _len_bus)), np.zeros((1,_len_branch))
A = calculate_adjacency_matrix_transpose(branches,index_set_branch,index_set_bus, mapping_bus_to_idx)
AA = calculate_absolute_adjacency_matrix(A)
M1 = A@J
M2 = AA@L
M = M1 + 0.5 * M2
ref_bus_row = sp.sparse.coo_matrix(([1],([0],[_ref_bus_idx])), shape=(1,_len_bus))
ref_bus_col = sp.sparse.coo_matrix(([1],([_ref_bus_idx],[0])), shape=(_len_bus,1))
J0 = sp.sparse.bmat([[M,ref_bus_col],[ref_bus_row,0]], format='coo')
if sparse_index_set_branch is None or len(sparse_index_set_branch) == _len_branch:
## the resulting matrix after inversion will be fairly dense,
## the scipy documenation recommends using dense for the inversion
## as well
try:
SENSI = np.linalg.inv(J0.A)
except np.linalg.LinAlgError:
print("Matrix not invertible. Calculating pseudo-inverse instead.")
SENSI = np.linalg.pinv(J0.A,rcond=1e-7)
pass
SENSI = SENSI[:-1,:-1]
PTDF = np.matmul(J.A, SENSI)
LDF = np.matmul(L.A, SENSI)
elif len(sparse_index_set_branch) < _len_branch:
B_J = np.array([], dtype=np.int64).reshape(_len_bus + 1, 0)
B_L = np.array([], dtype=np.int64).reshape(_len_bus + 1, 0)
_sparse_mapping_branch = {i: branch_n for i, branch_n in enumerate(index_set_branch) if branch_n in sparse_index_set_branch}
for idx, branch_name in _sparse_mapping_branch.items():
b = np.zeros((_len_branch, 1))
b[idx] = 1
_tmp_J = np.matmul(J.transpose(), b)
_tmp_J = np.vstack([_tmp_J, 0])
B_J = np.concatenate((B_J, _tmp_J), axis=1)
_tmp_L = np.matmul(L.transpose(), b)
_tmp_L = np.vstack([_tmp_L, 0])
B_L = np.concatenate((B_L, _tmp_L), axis=1)
row_idx = list(_sparse_mapping_branch.keys())
PTDF = sp.sparse.lil_matrix((_len_branch, _len_bus))
_ptdf = sp.sparse.linalg.spsolve(J0.transpose().tocsr(), B_J).T
PTDF[row_idx] = _ptdf[:, :-1]
LDF = sp.sparse.lil_matrix((_len_branch, _len_bus))
_ldf = sp.sparselinalg.spsolve(J0.transpose().tocsr(), B_L).T
LDF[row_idx] = _ldf[:, :-1]
M1 = A@Jc
M2 = AA@Lc
M = M1 + 0.5 * M2
LDF_constant = -LDF@M + Lc
return PTDF, LDF, LDF_constant
def calculate_adjacency_matrix_transpose(branches,index_set_branch,index_set_bus, mapping_bus_to_idx):
"""
Calculates the adjacency matrix where (-1) represents flow from the bus and (1) represents flow to the bus
for a given branch
"""
_len_bus = len(index_set_bus)
_len_branch = len(index_set_branch)
row = []
col = []
data = []
for idx_col, branch_name in enumerate(index_set_branch):
branch = branches[branch_name]
from_bus = branch['from_bus']
row.append(mapping_bus_to_idx[from_bus])
col.append(idx_col)
data.append(-1)
to_bus = branch['to_bus']
row.append(mapping_bus_to_idx[to_bus])
col.append(idx_col)
data.append(1)
adjacency_matrix = sp.sparse.coo_matrix((data,(row,col)), shape=(_len_bus, _len_branch))
return adjacency_matrix.tocsr()
def calculate_absolute_adjacency_matrix(adjacency_matrix):
"""
Calculates the absolute value of the adjacency matrix
"""
return sp.absolute(adjacency_matrix)
| 33.684211
| 171
| 0.616563
| 3,659
| 25,600
| 4.040448
| 0.089642
| 0.040584
| 0.045455
| 0.034497
| 0.769413
| 0.748985
| 0.726326
| 0.709483
| 0.695211
| 0.678774
| 0
| 0.011366
| 0.267969
| 25,600
| 759
| 172
| 33.72859
| 0.777535
| 0.251836
| 0
| 0.563415
| 0
| 0
| 0.05268
| 0.014303
| 0
| 0
| 0
| 0.006588
| 0
| 1
| 0.063415
| false
| 0.002439
| 0.012195
| 0
| 0.15122
| 0.004878
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
538b88713dcad7a49225b45caca487a524f4c2ec
| 160
|
py
|
Python
|
Section07_Bridge/Practice/VectorRenderer.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | 1
|
2020-10-20T07:41:51.000Z
|
2020-10-20T07:41:51.000Z
|
Section07_Bridge/Practice/VectorRenderer.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
Section07_Bridge/Practice/VectorRenderer.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
from Section07_Bridge.Practice.Renderer import Renderer
class VectorRenderer(Renderer):
@property
def what_to_render_as(self):
return 'lines'
| 20
| 55
| 0.75
| 19
| 160
| 6.105263
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015267
| 0.18125
| 160
| 7
| 56
| 22.857143
| 0.870229
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
5395418f585b04fb034a3d5be657cd9a946c3eab
| 2,914
|
py
|
Python
|
netforce_general/netforce_general/models/__init__.py
|
nfco/netforce
|
35252eecd0a6633ab9d82162e9e3ff57d4da029a
|
[
"MIT"
] | 27
|
2015-09-30T23:53:30.000Z
|
2021-06-07T04:56:25.000Z
|
netforce_general/netforce_general/models/__init__.py
|
nfco/netforce
|
35252eecd0a6633ab9d82162e9e3ff57d4da029a
|
[
"MIT"
] | 191
|
2015-10-08T11:46:30.000Z
|
2019-11-14T02:24:36.000Z
|
netforce_general/netforce_general/models/__init__.py
|
nfco/netforce
|
35252eecd0a6633ab9d82162e9e3ff57d4da029a
|
[
"MIT"
] | 32
|
2015-10-01T03:59:43.000Z
|
2022-01-13T07:31:05.000Z
|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from . import tag
from . import tag_type
from . import base_user
from . import permission
from . import profile
from . import model
from . import field
from . import profile_access
from . import field_access
from . import share_access
from . import role
from . import login
from . import product_categ
from . import product
from . import settings
from . import uom
from . import cron_job
from . import activity
from . import translation
from . import translation_field
from . import import_data
from . import sequence
from . import sequence_running
from . import country
from . import province
from . import district
from . import subdistrict
from . import postal_code
from . import language
from . import feedback
from . import attach
from . import log
from . import view
from . import change_passwd
from . import forgot_passwd
from . import user_group
from . import report_template
from . import workflow_rule
from . import address
from . import bank_account
from . import company_type
from . import bank
from . import share_record
from . import ws_listener
from . import ws_event
from . import inline_help
from . import wkf_rule
from . import create_db
from . import copy_db
from . import upgrade_db
from . import delete_db
from . import field_cache
from . import user_pref
from . import company
from . import select_company
from . import field_value
from . import field_default
from . import view_layout
from . import action
from . import update_ui
from . import update_db
from . import print_wizard
from . import send_wizard
from . import approve_wizard
from . import template
from . import theme
from . import module
from . import import_module
from . import script
from . import import_inline_help
from . import reason_code
from . import menu_access
from . import ui_params
from . import report_custom
| 30.673684
| 80
| 0.78792
| 437
| 2,914
| 5.15103
| 0.393593
| 0.328743
| 0.033319
| 0.01777
| 0.023101
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003296
| 0.167124
| 2,914
| 94
| 81
| 31
| 0.924186
| 0.365477
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.027027
| 1
| 0
| 1
| 0.013514
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
53999565d746fdb9f01307d8f9db882556e4efc8
| 182
|
py
|
Python
|
books/python-3-oop-packt/Chapter9/9_23_generate_colors.py
|
phiratio/lpthw
|
a32240d4355fb331805d515f96e1d009914e5c47
|
[
"MIT"
] | 73
|
2016-09-15T23:07:04.000Z
|
2022-03-05T15:09:48.000Z
|
books/python-3-oop-packt/Chapter9/9_23_generate_colors.py
|
phiratio/lpthw
|
a32240d4355fb331805d515f96e1d009914e5c47
|
[
"MIT"
] | 34
|
2019-12-16T16:53:24.000Z
|
2022-01-13T02:29:30.000Z
|
books/python-3-oop-packt/Chapter9/9_23_generate_colors.py
|
phiratio/lpthw
|
a32240d4355fb331805d515f96e1d009914e5c47
|
[
"MIT"
] | 51
|
2016-10-07T20:47:51.000Z
|
2021-12-22T21:00:24.000Z
|
from random import random
def generate_colors(count=100):
for i in range(count):
yield (random(), random(), random())
for color in generate_colors():
print(color)
| 18.2
| 44
| 0.67033
| 25
| 182
| 4.8
| 0.6
| 0.233333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.208791
| 182
| 9
| 45
| 20.222222
| 0.8125
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.333333
| 0.166667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
53b2d5e83eb0462eaa3f62c2a5d55353f00658a5
| 837
|
py
|
Python
|
collectors/NSXTMgmtServiceAlertCollector.py
|
sapcc/vrops-exporter
|
c342f319e1f69590fa514ba07c783e586aeb223a
|
[
"Apache-2.0"
] | 18
|
2019-10-24T03:36:50.000Z
|
2022-01-22T20:39:42.000Z
|
collectors/NSXTMgmtServiceAlertCollector.py
|
richardtief/vrops-exporter
|
7e7543f5561f85aab1828e4c9d6fada4b687639f
|
[
"Apache-2.0"
] | 55
|
2019-10-16T09:51:36.000Z
|
2022-03-28T11:46:08.000Z
|
collectors/NSXTMgmtServiceAlertCollector.py
|
richardtief/vrops-exporter
|
7e7543f5561f85aab1828e4c9d6fada4b687639f
|
[
"Apache-2.0"
] | 19
|
2019-10-15T14:07:27.000Z
|
2022-02-17T21:41:14.000Z
|
from collectors.AlertCollector import AlertCollector
class NSXTMgmtServiceAlertCollector(AlertCollector):
def __init__(self):
super().__init__()
self.vrops_entity_name = 'nsxt_mgmt_service'
self.label_names = ['nsxt_mgmt_cluster', 'nsxt_adapter', 'nsxt_mgmt_node', 'nsxt_mgmt_service']
self.resourcekind = ["ManagementService"]
def get_resource_uuids(self):
return self.get_nsxt_mgmt_service_by_target()
def get_labels(self, resource_id, project_ids):
return [self.nsxt_mgmt_service[resource_id]['mgmt_cluster_name'],
self.nsxt_mgmt_service[resource_id]['nsxt_adapter_name'],
self.nsxt_mgmt_service[resource_id]['mgmt_node_name'],
self.nsxt_mgmt_service[resource_id]['name']] if resource_id in self.nsxt_mgmt_service else []
| 44.052632
| 109
| 0.718041
| 102
| 837
| 5.392157
| 0.352941
| 0.145455
| 0.218182
| 0.172727
| 0.247273
| 0.247273
| 0.247273
| 0
| 0
| 0
| 0
| 0
| 0.181601
| 837
| 19
| 109
| 44.052632
| 0.80292
| 0
| 0
| 0
| 0
| 0
| 0.174224
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.071429
| 0.142857
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
53cda44ed70383d7e26e0707e6a0013540c209e7
| 336
|
py
|
Python
|
acloud-guru/func.py
|
wiltonpaulo/python-fullcourse
|
5befe60221a2e6f8a567a11e2f449245c11b3447
|
[
"MIT"
] | null | null | null |
acloud-guru/func.py
|
wiltonpaulo/python-fullcourse
|
5befe60221a2e6f8a567a11e2f449245c11b3447
|
[
"MIT"
] | null | null | null |
acloud-guru/func.py
|
wiltonpaulo/python-fullcourse
|
5befe60221a2e6f8a567a11e2f449245c11b3447
|
[
"MIT"
] | null | null | null |
class Car:
"""
Docstring describing the class
"""
def __init__(self, car_name) -> None:
self.car_name = car_name
def __str__(self) -> str:
return f"Soh uma string mano {self.car_name}"
def anda(self):
return f"Anda {self.car_name}"
car = Car("ecosport")
print(car.anda())
print(car)
| 17.684211
| 53
| 0.60119
| 47
| 336
| 4.021277
| 0.425532
| 0.185185
| 0.232804
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264881
| 336
| 18
| 54
| 18.666667
| 0.765182
| 0.089286
| 0
| 0
| 0
| 0
| 0.217241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0
| 0.2
| 0.6
| 0.2
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
53d3eb49383b275cbf7c7041d9e96584de27c7cf
| 99
|
py
|
Python
|
matplotsoccer/__init__.py
|
TomDecroos/matplotsoccer
|
fac4c5554d873f57a8997d735220da653bb3593b
|
[
"MIT"
] | 78
|
2019-08-24T12:47:44.000Z
|
2022-02-10T06:26:47.000Z
|
matplotsoccer/__init__.py
|
eddwebster/matplotsoccer
|
fac4c5554d873f57a8997d735220da653bb3593b
|
[
"MIT"
] | 3
|
2019-08-23T10:24:16.000Z
|
2021-04-18T07:50:46.000Z
|
matplotsoccer/__init__.py
|
eddwebster/matplotsoccer
|
fac4c5554d873f57a8997d735220da653bb3593b
|
[
"MIT"
] | 11
|
2019-08-29T10:39:15.000Z
|
2021-09-30T17:28:01.000Z
|
from matplotsoccer.fns import (
spadl_config, field, heatmap, heatmap_green, actions, count,
)
| 33
| 64
| 0.757576
| 12
| 99
| 6.083333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 99
| 3
| 65
| 33
| 0.869048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
54ccab2d303d6e6c5cdb84bf878f4d7012f41a43
| 704
|
py
|
Python
|
tha2/nn/batch_module/batch_input_model_factory.py
|
jyoonab/EasyVtuber
|
6a1341b632f1cf56a570bbf2689cbd568360a46c
|
[
"MIT"
] | 151
|
2021-12-15T15:24:01.000Z
|
2022-03-28T07:40:57.000Z
|
tha2/nn/batch_module/batch_input_model_factory.py
|
jyoonab/EasyVtuber
|
6a1341b632f1cf56a570bbf2689cbd568360a46c
|
[
"MIT"
] | 2
|
2021-12-19T04:04:20.000Z
|
2021-12-25T06:33:02.000Z
|
tha2/nn/batch_module/batch_input_model_factory.py
|
jyoonab/EasyVtuber
|
6a1341b632f1cf56a570bbf2689cbd568360a46c
|
[
"MIT"
] | 14
|
2021-12-16T20:17:27.000Z
|
2022-01-04T14:16:10.000Z
|
from typing import Dict, Set
from tha2.nn.batch_module.batch_input_module import BatchInputModule, BatchInputModuleFactory
class BatchInputModelFactory:
def __init__(self, module_factories: Dict[str, BatchInputModuleFactory]):
self.module_factories = module_factories
def get_module_names(self) -> Set[str]:
return set(self.module_factories.keys())
def create(self) -> Dict[str, BatchInputModule]:
output = {}
for name in self.module_factories:
output[name] = self.module_factories[name].create()
return output
def get_module_factory(self, module_name) -> BatchInputModuleFactory:
return self.module_factories[module_name]
| 35.2
| 93
| 0.728693
| 80
| 704
| 6.1625
| 0.3625
| 0.141988
| 0.231237
| 0.10142
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001745
| 0.18608
| 704
| 20
| 94
| 35.2
| 0.858639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
54fbf6107ef062b972ad67d8d4db3e9f59e246c9
| 1,011
|
py
|
Python
|
module_04_datetime/psl_04.01_dt_03_formatting_date.py
|
CodingGearsCourses/Python-3-Standard-Library-Essentials
|
8b80bc8b77fa477b6ccbe2886ed9239c2defdfda
|
[
"Apache-2.0"
] | null | null | null |
module_04_datetime/psl_04.01_dt_03_formatting_date.py
|
CodingGearsCourses/Python-3-Standard-Library-Essentials
|
8b80bc8b77fa477b6ccbe2886ed9239c2defdfda
|
[
"Apache-2.0"
] | null | null | null |
module_04_datetime/psl_04.01_dt_03_formatting_date.py
|
CodingGearsCourses/Python-3-Standard-Library-Essentials
|
8b80bc8b77fa477b6ccbe2886ed9239c2defdfda
|
[
"Apache-2.0"
] | null | null | null |
# --------------------------------
# CodingGears.io
# --------------------------------
# Datetime Module
from datetime import datetime
# TODO: datetime.now
now = datetime.now()
print("Now : {}".format(now))
# TODO: Formatting - Day of the week
print("Day of the week : " + now.strftime("%a"))
print("Day of the week : " + now.strftime("%A"))
# TODO: Formatting - Month
print("Month : " + now.strftime("%m"))
print("Month : " + now.strftime("%b"))
print("Month : " + now.strftime("%B"))
# TODO: Formatting - Day
print("Day : " + now.strftime("%d"))
print("Month : " + now.strftime("%B"))
print("Day : " + now.strftime("%A, %B %d"))
# TODO: Formatting - Year
print("Year YY : " + now.strftime("%y"))
print("Year YYYY : " + now.strftime("%Y"))
# TODO: Formatting - Date
print("Date : " + now.strftime("%A, %B %d, %y"))
print("Date : " + now.strftime("%A, %B %d, %Y"))
print("Date : " + now.strftime("%m-%d-%Y"))
print("Date : " + now.strftime("%m/%d/%Y"))
print("Date : " + now.strftime("%b/%d/%Y"))
| 26.605263
| 48
| 0.544016
| 133
| 1,011
| 4.135338
| 0.195489
| 0.3
| 0.109091
| 0.181818
| 0.467273
| 0.410909
| 0.312727
| 0.312727
| 0.207273
| 0.207273
| 0
| 0
| 0.153314
| 1,011
| 37
| 49
| 27.324324
| 0.642523
| 0.243323
| 0
| 0.111111
| 0
| 0
| 0.301061
| 0
| 0
| 0
| 0
| 0.027027
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0.888889
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
07012362c3c536035406bfb2731ddb1b140e054c
| 217
|
py
|
Python
|
apps/node/src/app/main/routes/general.py
|
hivecell-io/federated_learning
|
e251bfa65c32abd83359c2b6847b9d0b62c4f5c3
|
[
"Apache-2.0"
] | 7
|
2020-04-20T22:22:08.000Z
|
2020-07-25T17:32:08.000Z
|
apps/node/src/app/main/routes/general.py
|
hivecell-io/federated_learning
|
e251bfa65c32abd83359c2b6847b9d0b62c4f5c3
|
[
"Apache-2.0"
] | 3
|
2020-04-24T21:20:57.000Z
|
2020-05-28T09:17:02.000Z
|
apps/node/src/app/main/routes/general.py
|
hivecell-io/federated_learning
|
e251bfa65c32abd83359c2b6847b9d0b62c4f5c3
|
[
"Apache-2.0"
] | 4
|
2020-04-24T22:32:37.000Z
|
2020-05-25T19:29:20.000Z
|
"""All Network routes (REST API)."""
from flask import render_template
from .. import main_routes
@main_routes.route("/", methods=["GET"])
def index():
"""Main Page."""
return render_template("index.html")
| 19.727273
| 40
| 0.672811
| 28
| 217
| 5.071429
| 0.678571
| 0.197183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147465
| 217
| 10
| 41
| 21.7
| 0.767568
| 0.18894
| 0
| 0
| 0
| 0
| 0.084848
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
072858bf1bc0e8144d9e75151c8c5c8aa93df558
| 20
|
py
|
Python
|
processlst/__init__.py
|
bucricket/projectMASlst2
|
9976ce0958e9dff7d5d8475a9242f743edc8a6b3
|
[
"BSD-3-Clause"
] | null | null | null |
processlst/__init__.py
|
bucricket/projectMASlst2
|
9976ce0958e9dff7d5d8475a9242f743edc8a6b3
|
[
"BSD-3-Clause"
] | null | null | null |
processlst/__init__.py
|
bucricket/projectMASlst2
|
9976ce0958e9dff7d5d8475a9242f743edc8a6b3
|
[
"BSD-3-Clause"
] | 2
|
2017-05-25T09:06:02.000Z
|
2018-12-03T11:33:30.000Z
|
__version__='0.4.0'
| 10
| 19
| 0.7
| 4
| 20
| 2.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0.05
| 20
| 1
| 20
| 20
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
4adffb15a6230bdf98eea40adca1afdf593262df
| 38
|
py
|
Python
|
DjangoStrannikVk/DjangoStrannikVk/__init__.py
|
StrannikVK/strannikvk.github.io
|
8c76c54b36da292b4341e2bc3ab5602d098a53e6
|
[
"MIT"
] | null | null | null |
DjangoStrannikVk/DjangoStrannikVk/__init__.py
|
StrannikVK/strannikvk.github.io
|
8c76c54b36da292b4341e2bc3ab5602d098a53e6
|
[
"MIT"
] | null | null | null |
DjangoStrannikVk/DjangoStrannikVk/__init__.py
|
StrannikVK/strannikvk.github.io
|
8c76c54b36da292b4341e2bc3ab5602d098a53e6
|
[
"MIT"
] | null | null | null |
"""
Package for DjangoStrannikVk.
"""
| 9.5
| 29
| 0.684211
| 3
| 38
| 8.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 3
| 30
| 12.666667
| 0.787879
| 0.763158
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
4af1a1aa84b2fe0447c5ec459f04ea22b47e22e6
| 668
|
py
|
Python
|
tests/test_unittests.py
|
retxxxirt/django-fixtures
|
8a8d3c1ac49291716c02efe56ed0b9697b93370c
|
[
"MIT"
] | null | null | null |
tests/test_unittests.py
|
retxxxirt/django-fixtures
|
8a8d3c1ac49291716c02efe56ed0b9697b93370c
|
[
"MIT"
] | null | null | null |
tests/test_unittests.py
|
retxxxirt/django-fixtures
|
8a8d3c1ac49291716c02efe56ed0b9697b93370c
|
[
"MIT"
] | null | null | null |
from django_fixtures.decorators import exclude_fixtures
from django_fixtures.unittests import FixturesTestCase
class FixturesUnittestsTestCase(FixturesTestCase):
fixtures = ('app_a.GeneratedNumbers', 'app_a.OSMFullData')
def test_count_of_numbers(self):
self.assertEqual(len(self.fixtures.generated_numbers), 100)
def test_count_of_objects(self):
from tests.project.app_a.models import OSMData
self.assertEqual(OSMData.objects.count(), 8)
@exclude_fixtures('app_a.OSMFullData')
def test_exclude_fixtures(self):
from tests.project.app_a.models import OSMData
self.assertEqual(OSMData.objects.count(), 0)
| 35.157895
| 67
| 0.760479
| 81
| 668
| 6.049383
| 0.382716
| 0.040816
| 0.073469
| 0.073469
| 0.404082
| 0.314286
| 0.314286
| 0.314286
| 0.314286
| 0.314286
| 0
| 0.008803
| 0.149701
| 668
| 18
| 68
| 37.111111
| 0.853873
| 0
| 0
| 0.153846
| 1
| 0
| 0.083832
| 0.032934
| 0
| 0
| 0
| 0
| 0.230769
| 1
| 0.230769
| false
| 0
| 0.307692
| 0
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
4afbb18e83fe583c88cd10e2330649c5bd3947c6
| 118
|
py
|
Python
|
readable/settings/development.py
|
amalchuk/readable
|
ce5397039516299a105ed975d79d9e62d0fe747f
|
[
"MIT"
] | null | null | null |
readable/settings/development.py
|
amalchuk/readable
|
ce5397039516299a105ed975d79d9e62d0fe747f
|
[
"MIT"
] | null | null | null |
readable/settings/development.py
|
amalchuk/readable
|
ce5397039516299a105ed975d79d9e62d0fe747f
|
[
"MIT"
] | null | null | null |
from readable.settings.common import *
# Core Settings:
DEBUG: bool = True
DEBUG_PROPAGATE_EXCEPTIONS: bool = True
| 14.75
| 39
| 0.771186
| 15
| 118
| 5.933333
| 0.733333
| 0.179775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 118
| 7
| 40
| 16.857143
| 0.89
| 0.118644
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ab0527ac26c6a03751f5e5d8b55a155d59e8af59
| 5,789
|
py
|
Python
|
python3_sample_project/app/migrations/0001_initial.py
|
laborautonomo/django-admin-sortable
|
ba552549c26461bcbd0e624e4b45e078e2c51d6c
|
[
"MS-PL",
"Naumen",
"Condor-1.1",
"Apache-1.1"
] | 1
|
2015-11-05T17:33:04.000Z
|
2015-11-05T17:33:04.000Z
|
python3_sample_project/app/migrations/0001_initial.py
|
laborautonomo/django-admin-sortable
|
ba552549c26461bcbd0e624e4b45e078e2c51d6c
|
[
"MS-PL",
"Naumen",
"Condor-1.1",
"Apache-1.1"
] | null | null | null |
python3_sample_project/app/migrations/0001_initial.py
|
laborautonomo/django-admin-sortable
|
ba552549c26461bcbd0e624e4b45e078e2c51d6c
|
[
"MS-PL",
"Naumen",
"Condor-1.1",
"Apache-1.1"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import adminsortable.fields
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('title', models.CharField(max_length=50)),
],
options={
'ordering': ['order'],
'verbose_name_plural': 'Categories',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Component',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('title', models.CharField(max_length=50)),
],
options={
'ordering': ['order'],
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Credit',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
],
options={
'ordering': ['order'],
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='GenericNote',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('title', models.CharField(max_length=50)),
('object_id', models.PositiveIntegerField(verbose_name='Content id')),
('content_type', models.ForeignKey(related_name='generic_notes', verbose_name='Content type', to='contenttypes.ContentType')),
],
options={
'ordering': ['order'],
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('text', models.CharField(max_length=100)),
],
options={
'ordering': ['order'],
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('is_board_member', models.BooleanField(verbose_name='Board Member', default=False)),
],
options={
'ordering': ['order'],
'verbose_name_plural': 'People',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('title', models.CharField(max_length=50)),
('description', models.TextField()),
('category', adminsortable.fields.SortableForeignKey(to='app.Category')),
],
options={
'ordering': ['order'],
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Widget',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('order', models.PositiveIntegerField(editable=False, default=1, db_index=True)),
('title', models.CharField(max_length=50)),
],
options={
'ordering': ['order'],
'abstract': False,
},
bases=(models.Model,),
),
migrations.AddField(
model_name='note',
name='project',
field=models.ForeignKey(to='app.Project'),
preserve_default=True,
),
migrations.AddField(
model_name='credit',
name='project',
field=models.ForeignKey(to='app.Project'),
preserve_default=True,
),
migrations.AddField(
model_name='component',
name='widget',
field=adminsortable.fields.SortableForeignKey(to='app.Widget'),
preserve_default=True,
),
]
| 39.380952
| 142
| 0.520988
| 487
| 5,789
| 6.047228
| 0.174538
| 0.048557
| 0.061121
| 0.081494
| 0.782343
| 0.75382
| 0.711036
| 0.711036
| 0.674363
| 0.674363
| 0
| 0.008887
| 0.339091
| 5,789
| 146
| 143
| 39.650685
| 0.760847
| 0.003628
| 0
| 0.7
| 0
| 0
| 0.112903
| 0.004162
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021429
| 0
| 0.042857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ab3e5ec01fafc1a1d808ddfa7236ee80623e262d
| 3,265
|
py
|
Python
|
generated/machine.py
|
cpwood/Pico-Stub
|
176af2962b4701805c81afed2e540d39e1adad82
|
[
"Apache-2.0"
] | 19
|
2021-01-25T23:56:09.000Z
|
2022-02-21T13:55:16.000Z
|
generated/machine.py
|
cpwood/Pico-Stub
|
176af2962b4701805c81afed2e540d39e1adad82
|
[
"Apache-2.0"
] | 18
|
2021-02-06T09:03:09.000Z
|
2021-10-04T16:36:35.000Z
|
generated/machine.py
|
cpwood/Pico-Stub
|
176af2962b4701805c81afed2e540d39e1adad82
|
[
"Apache-2.0"
] | 6
|
2021-01-26T08:41:47.000Z
|
2021-04-27T11:33:33.000Z
|
"""
Module: 'machine' on micropython-rp2-1.15
"""
# MCU: {'family': 'micropython', 'sysname': 'rp2', 'version': '1.15.0', 'build': '', 'mpy': 5637, 'port': 'rp2', 'platform': 'rp2', 'name': 'micropython', 'arch': 'armv7m', 'machine': 'Raspberry Pi Pico with RP2040', 'nodename': 'rp2', 'ver': '1.15', 'release': '1.15.0'}
# Stubber: 1.3.9
class ADC:
''
CORE_TEMP = 4
def read_u16():
pass
class I2C:
''
def init():
pass
def readfrom():
pass
def readfrom_into():
pass
def readfrom_mem():
pass
def readfrom_mem_into():
pass
def readinto():
pass
def scan():
pass
def start():
pass
def stop():
pass
def write():
pass
def writeto():
pass
def writeto_mem():
pass
def writevto():
pass
class PWM:
''
def deinit():
pass
def duty_ns():
pass
def duty_u16():
pass
def freq():
pass
PWRON_RESET = 1
class Pin:
''
ALT = 3
IN = 0
IRQ_FALLING = 4
IRQ_RISING = 8
OPEN_DRAIN = 2
OUT = 1
PULL_DOWN = 2
PULL_UP = 1
def high():
pass
def init():
pass
def irq():
pass
def low():
pass
def off():
pass
def on():
pass
def toggle():
pass
def value():
pass
class SPI:
''
LSB = 0
MSB = 1
def deinit():
pass
def init():
pass
def read():
pass
def readinto():
pass
def write():
pass
def write_readinto():
pass
class Signal:
''
def off():
pass
def on():
pass
def value():
pass
class SoftI2C:
''
def init():
pass
def readfrom():
pass
def readfrom_into():
pass
def readfrom_mem():
pass
def readfrom_mem_into():
pass
def readinto():
pass
def scan():
pass
def start():
pass
def stop():
pass
def write():
pass
def writeto():
pass
def writeto_mem():
pass
def writevto():
pass
class SoftSPI:
''
LSB = 0
MSB = 1
def deinit():
pass
def init():
pass
def read():
pass
def readinto():
pass
def write():
pass
def write_readinto():
pass
class Timer:
''
ONE_SHOT = 0
PERIODIC = 1
def deinit():
pass
def init():
pass
class UART:
''
INV_RX = 2
INV_TX = 1
def any():
pass
def read():
pass
def readinto():
pass
def readline():
pass
def sendbreak():
pass
def write():
pass
class WDT:
''
def feed():
pass
WDT_RESET = 3
def bootloader():
pass
def deepsleep():
pass
def disable_irq():
pass
def enable_irq():
pass
def freq():
pass
def idle():
pass
def lightsleep():
pass
mem16 = None
mem32 = None
mem8 = None
def reset():
pass
def reset_cause():
pass
def soft_reset():
pass
def time_pulse_us():
pass
def unique_id():
pass
| 11.416084
| 271
| 0.464931
| 370
| 3,265
| 4.013514
| 0.308108
| 0.292256
| 0.080808
| 0.047138
| 0.497643
| 0.461953
| 0.461953
| 0.415488
| 0.393266
| 0.393266
| 0
| 0.032139
| 0.418683
| 3,265
| 285
| 272
| 11.45614
| 0.750263
| 0.100153
| 0
| 0.737113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.386598
| false
| 0.386598
| 0
| 0
| 0.530928
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
ab4036034b5cd8ee94bb3335e14992b7b52653ec
| 71
|
py
|
Python
|
Contest/ABC138/a/main.py
|
mpses/AtCoder
|
9c101fcc0a1394754fcf2385af54b05c30a5ae2a
|
[
"CC0-1.0"
] | null | null | null |
Contest/ABC138/a/main.py
|
mpses/AtCoder
|
9c101fcc0a1394754fcf2385af54b05c30a5ae2a
|
[
"CC0-1.0"
] | null | null | null |
Contest/ABC138/a/main.py
|
mpses/AtCoder
|
9c101fcc0a1394754fcf2385af54b05c30a5ae2a
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
print("red" if int(input()) < 3200 else input())
| 35.5
| 48
| 0.661972
| 12
| 71
| 3.916667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079365
| 0.112676
| 71
| 2
| 48
| 35.5
| 0.666667
| 0.295775
| 0
| 0
| 0
| 0
| 0.06
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
ab5e0f4adf474568f3f9176f20ed81b0da1f7063
| 174
|
py
|
Python
|
Event/EventLogger.py
|
dimiroul/BacktestFramePublic
|
15c48c98e67cb32550d4bfca3ce40e2e0c84c67d
|
[
"MIT"
] | null | null | null |
Event/EventLogger.py
|
dimiroul/BacktestFramePublic
|
15c48c98e67cb32550d4bfca3ce40e2e0c84c67d
|
[
"MIT"
] | null | null | null |
Event/EventLogger.py
|
dimiroul/BacktestFramePublic
|
15c48c98e67cb32550d4bfca3ce40e2e0c84c67d
|
[
"MIT"
] | null | null | null |
from Logger.Logger import (LoggerStringUnit)
# 定义EVENT_LOGGER为回测框架使用的事件记录模块,作为全局变量
EVENT_LOGGER: LoggerStringUnit = LoggerStringUnit(head_="event_datetime,event_type,info")
| 34.8
| 89
| 0.856322
| 18
| 174
| 8
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063218
| 174
| 4
| 90
| 43.5
| 0.883436
| 0.201149
| 0
| 0
| 0
| 0
| 0.218978
| 0.218978
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
db65924b53241dee41fead4ede71f27d988d5915
| 1,960
|
py
|
Python
|
mlist/test/test_binary_search.py
|
fwu03/Software_Testing_Python
|
51d559c9649481fdff94f35e027c3424e9fd600f
|
[
"MIT"
] | null | null | null |
mlist/test/test_binary_search.py
|
fwu03/Software_Testing_Python
|
51d559c9649481fdff94f35e027c3424e9fd600f
|
[
"MIT"
] | 5
|
2019-02-15T23:31:10.000Z
|
2019-03-05T20:17:03.000Z
|
mlist/test/test_binary_search.py
|
fwu03/Software_Testing_Python
|
51d559c9649481fdff94f35e027c3424e9fd600f
|
[
"MIT"
] | null | null | null |
# test_binary_search.py
import pytest
from mlist import binary_search
def test_format():
with pytest.raises(TypeError):
binary_search.binary_search("hello", [1,2,3,4,6,8]) # return ERROR if input x is a string
with pytest.raises(TypeError):
binary_search.binary_search(3.0, [1,3,8,9,15]) # return ERROR if input x is a float
with pytest.raises(TypeError):
binary_search.binary_search(4, [[1,2], [3, 5]]) # return ERROR if input lst is a nested list
with pytest.raises(TypeError):
binary_search.binary_search(4, [1,2, "hello",6,8]) # return ERROR if input lst contains strings
with pytest.raises(TypeError):
binary_search.binary_search(3, [1,3.0,8,9,15]) # return ERROR if input lst contains float
with pytest.raises(ValueError):
binary_search.binary_search(3, [3,1,7,2,8]) # return ERROR if input lst is not sorted
with pytest.raises(TypeError):
binary_search.binary_search(3, 'a') # return ERROR if input lst is not a list
def test_values():
with pytest.raises(ValueError):
binary_search.binary_search(3000, [1,3,25,36,800,900]) # return ERROR if input x is over 1000
with pytest.raises(ValueError):
binary_search.binary_search(3, [1,3,25,36,800,5550]) # return ERROR if input lst contains values over 1000
def test_output():
# assert return contain correct values
assert binary_search.binary_search(3, [1,2,3,4]) == [True,3,2], "Assertion Failed, the output is wrong"
# assert return contain correct values
assert binary_search.binary_search(3, [1,2,4]) == [False,3,None], "Assertion Failed, the output is wrong"
# assert return is a list
assert isinstance(binary_search.binary_search(3, [1,2,3,4]), list), "Assertion Failed, the output should be a list"
# assert return length is 3
assert len(binary_search.binary_search(2, [2,3])) == 3, "Assertion Failed, the output has incorrect length"
| 49
| 119
| 0.694898
| 313
| 1,960
| 4.249201
| 0.220447
| 0.252632
| 0.17594
| 0.234586
| 0.714286
| 0.702256
| 0.613534
| 0.518797
| 0.388722
| 0.17594
| 0
| 0.062343
| 0.189796
| 1,960
| 39
| 120
| 50.25641
| 0.775189
| 0.263265
| 0
| 0.333333
| 0
| 0
| 0.12535
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.111111
| true
| 0
| 0.074074
| 0
| 0.185185
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
db6b94d3861119df93a29a4660786f1d427f50b8
| 156
|
py
|
Python
|
Krakatau-master/Krakatau/Krakatau/ssa/ssa_jumps/placeholder.py
|
orneryhippo/saturdays
|
525ce086452e96a01d1762418c79d4c84fd605b5
|
[
"Apache-2.0"
] | null | null | null |
Krakatau-master/Krakatau/Krakatau/ssa/ssa_jumps/placeholder.py
|
orneryhippo/saturdays
|
525ce086452e96a01d1762418c79d4c84fd605b5
|
[
"Apache-2.0"
] | null | null | null |
Krakatau-master/Krakatau/Krakatau/ssa/ssa_jumps/placeholder.py
|
orneryhippo/saturdays
|
525ce086452e96a01d1762418c79d4c84fd605b5
|
[
"Apache-2.0"
] | null | null | null |
from .base import BaseJump
class Placeholder(BaseJump):
def __init__(self, parent, *args, **kwargs):
super(Placeholder, self).__init__(parent)
| 26
| 49
| 0.711538
| 18
| 156
| 5.722222
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 156
| 5
| 50
| 31.2
| 0.792308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
db8b9b6974df6efe7b7e6e5f091bc2811d20b17f
| 101
|
py
|
Python
|
mzcli/__main__.py
|
quodlibetor/mzcli
|
8c69419a36d73ab990f8bba4fb26a11b549699cc
|
[
"BSD-3-Clause"
] | 8
|
2020-05-04T22:23:52.000Z
|
2022-03-21T04:02:35.000Z
|
mzcli/__main__.py
|
quodlibetor/mzcli
|
8c69419a36d73ab990f8bba4fb26a11b549699cc
|
[
"BSD-3-Clause"
] | 7
|
2020-02-18T18:21:42.000Z
|
2021-12-10T14:00:38.000Z
|
mzcli/__main__.py
|
quodlibetor/mzcli
|
8c69419a36d73ab990f8bba4fb26a11b549699cc
|
[
"BSD-3-Clause"
] | null | null | null |
"""
mzcli package main entry point
"""
from .main import cli
if __name__ == "__main__":
cli()
| 10.1
| 30
| 0.633663
| 13
| 101
| 4.307692
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227723
| 101
| 9
| 31
| 11.222222
| 0.717949
| 0.29703
| 0
| 0
| 0
| 0
| 0.126984
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
db91d8112dba93ea9883b738997a69fb0c8e77d2
| 45,565
|
py
|
Python
|
PyLib/GameUnitTests.py
|
Lyapunov/Adventure-engine
|
f8786e6fc2bbb8b363178520759ef42a46b02615
|
[
"MIT"
] | null | null | null |
PyLib/GameUnitTests.py
|
Lyapunov/Adventure-engine
|
f8786e6fc2bbb8b363178520759ef42a46b02615
|
[
"MIT"
] | null | null | null |
PyLib/GameUnitTests.py
|
Lyapunov/Adventure-engine
|
f8786e6fc2bbb8b363178520759ef42a46b02615
|
[
"MIT"
] | null | null | null |
import unittest
import json
from GameObject import Game
from GameObject import GameObject
from GameObject import GameObjectAttribute
from GameObject import GameObjectUseAction
from GameObject import GameObjectRevealAction
from GameObject import GamePassageRevealAction
from GameObject import GamePassage
from GameObject import GameSyntaxChecker
from GameObject import GameSolver
from GameObject import GameEncoder
from GameObject import GameDecoder
class GameUnitTests(unittest.TestCase):
# TODO: (IDEA) descriptions and images, the entire view should be a completely separated layer,
# which just portrays the game objects according to their attributes
# TODO: Write serializer
# TODO: Write view layer - first step is an object that returns an empty hash of texts indexed by game names
def setUp( self ):
# Test game1, just to start with something
self.game1_text_blueprints = """
[
[{"obj_content": {"attributes": [],
"childObjects": [{"obj_content": {"attributes": ["immobile"], "childObjects": [], "name": "table"}, "obj_name": "GameObject"},
{"obj_content": {"attributes": [], "childObjects": [], "name": "candle"}, "obj_name": "GameObject"},
{"obj_content": {"attributes": [], "childObjects": [], "name": "match"}, "obj_name": "GameObject"},
{"obj_content": {"attributes": [], "childObjects": [], "name": "bird"}, "obj_name": "GameObject"},
{"obj_content": {"attributes": [], "childObjects": [], "name": "stone"}, "obj_name": "GameObject"},
{"obj_content": {"attributes": ["immobile", "invisible"], "childObjects": [], "name": "picture"}, "obj_name": "GameObject"}],
"name": "dark_room"},
"obj_name": "GameObject"},
{"obj_content": {"attributes": [],
"childObjects": [{"obj_content": {"attributes": ["immobile"],
"childObjects": [{"obj_content": {"attributes": [], "childObjects": [], "name": "knife"}, "obj_name": "GameObject"}],
"name": "cabinet"},
"obj_name": "GameObject"}],
"name": "bathroom"},
"obj_name": "GameObject"},
{"obj_content": {"attributes": [],
"childObjects": [],
"name": "secret_room"},
"obj_name": "GameObject"}],
[{"obj_content": {"attributes": [],
"childObjects": [],
"name": "burning_candle"},
"obj_name": "GameObject"},
{"obj_content": {"attributes": [],
"childObjects": [],
"name": "injured_bird"},
"obj_name": "GameObject"}],
[{"obj_content": {"room_name2": "bathroom", "room_name1": "dark_room", "direction2": "S", "attributes": [], "direction1": "N", "identifier": 11},
"obj_name": "GamePassage"},
{"obj_content": {"room_name2": "secret_room", "room_name1": "dark_room", "direction2": "E", "attributes": ["invisible"], "direction1": "W", "identifier": 12},
"obj_name": "GamePassage"}],
[{"obj_content": {"subjectname": "candle", "toolname": "match", "resultname": "burning_candle"},
"obj_name": "GameObjectUseAction"},
{"obj_content": {"subjectname": "bird", "toolname": "stone", "resultname": "injured_bird"},
"obj_name": "GameObjectUseAction"},
{"obj_content": {"subjectname": "picture", "toolname": "", "identifier": 12},
"obj_name": "GamePassageRevealAction"}],
[{"obj_content": {"subjectname": "picture", "toolname": "burning_candle"},
"obj_name": "GameObjectRevealAction"}],
"secret_room",
{"go#dark_room": "dark_room", "go#bathroom": "bathroom"}
]
"""
self.game1 = Game( [ [ GameObject( 'dark_room', [], [ GameObject( 'table', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'candle' ),
GameObject( 'match' ),
GameObject( 'bird' ),
GameObject( 'stone' ),
GameObject( 'picture', [GameObjectAttribute.IMMOBILE, GameObjectAttribute.INVISIBLE] ) ] ),
GameObject( 'bathroom', [], [ GameObject( 'cabinet', [GameObjectAttribute.IMMOBILE], [ GameObject( 'knife' ) ] ) ] ),
GameObject( 'secret_room' ) ],
[ GameObject( 'burning_candle' ), GameObject( 'injured_bird' ) ],
[ GamePassage( 11, 'dark_room', 'bathroom' , 'N', 'S' ),
GamePassage( 12, 'dark_room', 'secret_room', 'W', 'E', [GameObjectAttribute.INVISIBLE] ) ],
[ GameObjectUseAction( 'candle', 'match', 'burning_candle' ),
GameObjectUseAction( 'bird', 'stone', 'injured_bird' ),
GamePassageRevealAction( 'picture', '', 12 ) ],
[ GameObjectRevealAction( 'picture', 'burning_candle' ) ],
'secret_room',
{ 'go#dark_room' : 'dark_room', 'go#bathroom' : 'bathroom' } ] );
assert ( self.game1.look() == 'dark_room' )
assert ( self.game1.has( 'burning_candle' ) is None )
assert ( self.game1.has( 'candle' ) is None )
assert ( self.game1.has( 'match' ) is None )
assert ( 'candle' in self.game1.stuffs() )
assert ( 'match' in self.game1.stuffs() )
assert ( 'table' in self.game1.stuffs() )
assert ( not 'picture' in self.game1.stuffs() )
assert ( self.game1.directions() == [['N', 'bathroom']] )
assert ( self.game1.won() == 0 )
def test_syntax_checker_wrong_game_1(self):
# there is no room
game_internal = Game( [ [], [], [], [], [], '', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'must have at least one room' )
def test_syntax_checker_wrong_game_2(self):
# starting in the ending room
game_internal = Game( [ [ GameObject( 'room1', [], []) ], [], [], [], [], 'room1', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'cannot start in the ending room' )
def test_syntax_checker_wrong_game_3(self):
# starting in the ending room
game_internal = Game( [ [ GameObject( 'room1', [], []) ], [], [], [], [], 'final_room', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'final room does not exist' )
def test_syntax_checker_wrong_game_4(self):
game_internal = Game( [ [ GameObject( 'starting_room' ), GameObject( 'final_room' ) ], [], [], [], [], 'final_room', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'final room is not reachable' )
def test_syntax_checker_wrong_game_5(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ),
GameObject( 'roomC' ), GameObject( 'roomD' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomC', 'roomD', 'N', 'S' ) ],
[], [], 'roomD', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'final room is not reachable' )
def test_syntax_checker_wrong_game_6(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ),
GameObject( 'roomC' ), GameObject( 'roomD' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomB', 'roomC', 'N', 'S' ) ],
[], [], 'roomD', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'final room is not reachable' )
def test_syntax_checker_wrong_game_7(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ),
GameObject( 'roomC' ), GameObject( 'roomD' ),
GameObject( 'roomE' ), GameObject( 'roomF' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomA', 'roomE', 'E', 'W' ),
GamePassage(13, 'roomE', 'roomB', 'N', 'E' ),
GamePassage(14, 'roomD', 'roomE', 'N', 'S' ),
GamePassage(15, 'roomC', 'roomF', 'E', 'W' ) ],
[], [], 'roomF', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'final room is not reachable' )
def test_syntax_checker_wrong_game_8(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomA', 'roomB', 'W', 'S' ) ], [], [], 'roomB', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'multiple passages between the rooms roomA, roomB' )
def test_syntax_checker_wrong_game_9(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomB', 'roomA', 'W', 'S' ) ], [], [], 'roomB', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'multiple passages between the rooms roomA, roomB' )
def test_syntax_checker_wrong_game_10(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ), GameObject( 'roomC' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(11, 'roomB', 'roomC', 'W', 'S' ) ], [], [], 'roomC', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'passage identifiers are not unique, 11' )
def test_syntax_checker_wrong_game_11(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ),
GameObject( 'roomC' ), GameObject( 'roomD' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomC', 'roomD', 'N', 'S' ) ],
[], [], 'roomB', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'not all rooms are accessible, roomC' )
def test_syntax_checker_wrong_game_12(self):
game_internal = Game( [ [ GameObject( 'roomA', [], [ GameObject( 'button', [GameObjectAttribute.IMMOBILE], [] ) ] ),
GameObject( 'roomB' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'button', '', 13 ) ],
[],
'roomB',
{} ] )
assert ( GameSyntaxChecker().check( game_internal ) == 'invalid passage identifiers in an action, 13' )
def test_syntax_checker_wrong_game_13(self):
game_internal = Game( [ [ GameObject( 'roomA',[], [ GameObject( 'button1', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'button2', [GameObjectAttribute.IMMOBILE], [] ) ] ),
GameObject( 'roomB' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'button', '', 11 ) ],
[],
'roomB',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found invalid object in an action, button' )
def test_syntax_checker_wrong_game_14(self):
game_internal = Game( [ [ GameObject( 'roomA', [], [ GameObject( 'button1', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'button1', [GameObjectAttribute.IMMOBILE], [] ) ] ),
GameObject( 'roomB' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'button1', '', 11 ) ],
[],
'roomB',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found two objects with the same name, button1' )
def test_syntax_checker_wrong_game_15(self):
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomC' ),
GameObject( 'roomB' ), GameObject( 'roomC' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomB', 'roomC', 'N', 'S' ) ],
[], [], 'roomC', {} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found two objects with the same name, roomC' )
def test_syntax_checker_wrong_game16(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( '', '', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found an action without actors' )
def test_syntax_checker_wrong_game17(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'door', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found invalid action with the same actor twice, door' )
def test_syntax_checker_wrong_game_18(self):
game_internal = Game( [ [ GameObject( 'roomA', [], [ GameObject( 'button1', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'button2', [GameObjectAttribute.IMMOBILE], [] ) ] ),
GameObject( 'roomB' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'button1', '', 11 ),
GamePassageRevealAction( 'button1', '', 11 ) ],
[],
'roomB',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found multiple actions for the same actor, button1' )
def test_syntax_checker_wrong_game_19(self):
game_internal = Game( [ [ GameObject( 'roomA', [], [ GameObject( 'button1', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'button2', [GameObjectAttribute.IMMOBILE], [] ) ] ),
GameObject( 'roomB' ) ],
[ GameObject( 'broken button' ) ],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'button1', '', 11 ),
GameObjectUseAction( '', 'button1', 'broken button' ) ],
[],
'roomB',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found multiple actions for the same actor, button1' )
def test_syntax_checker_wrong_game_20(self):
game_internal = Game( [ [ GameObject( 'roomA', [], [ GameObject( 'handle1', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'handle2', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'crowbar' ) ] ),
GameObject( 'roomB' ) ],
[ GameObject( 'broken handle' ) ],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'handle1', 'crowbar', 11 ),
GameObjectUseAction( 'handle2', 'crowbar', 'broken handle' ) ],
[],
'roomB',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found multiple actions for the same actor, crowbar' )
def test_syntax_checker_wrong_game_21(self):
game_internal = Game( [ [ GameObject( 'roomA', [], [ GameObject( 'handle1', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'handle2', [GameObjectAttribute.IMMOBILE], [] ),
GameObject( 'crowbar' ) ] ),
GameObject( 'roomB' ) ],
[ GameObject( 'handle1' ) ],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'handle1', 'crowbar', 11 ),
GameObjectUseAction( 'handle2', 'crowbar', 'handle1' ) ],
[],
'roomB',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'found two objects with the same name, handle1' )
def test_syntax_checker_wrong_game22(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'box', [GameObjectAttribute.IMMOBILE],
[GameObject( 'key', [GameObjectAttribute.IMMOBILE] ) ] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'not top level stuffs cannot have attributes, key' )
def test_syntax_checker_wrong_game23(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'keypart1' ),
GameObject( 'box', [GameObjectAttribute.IMMOBILE], [GameObject( 'keypart2' ) ] ) ] ),
GameObject( 'ending_room' ) ],
[ GameObject( 'key', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ),
GameObjectUseAction( 'keypart1', 'keypart2', 'key' ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'not top level stuffs cannot have attributes, key' )
def test_syntax_checker_wrong_game24(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'key', [GameObjectAttribute.INVISIBLE] ) ] ),
GameObject( 'middle_room' , [], [ GameObject( 'burning_candle' ),
GameObject( 'door', [GameObjectAttribute.IMMOBILE] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'middle_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ),
GamePassage( 12, 'starting_room', 'middle_room' , 'N', 'S' ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[ GameObjectRevealAction( 'burning_candle', 'key') ],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'subjects of revealing actions must be invisible initially, burning_candle' )
def test_syntax_checker_wrong_game25(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'key' , [GameObjectAttribute.IMMOBILE] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == "action actor key must be mobile" )
def test_syntax_checker_wrong_game26(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE, GameObjectAttribute.INVISIBLE] ),
GameObject( 'key' ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'there must be exactly one action for each invisible object which reveals it, door' )
def test_syntax_checker_wrong_game27(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'key' , [] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'key', 'door', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == "action actor door must be mobile" )
def test_syntax_checker_wrong_game28(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'key' , [] ) ] ),
GameObject( 'ending_room' ) ],
[ GameObject( 'broken_key' ) ],
[ GamePassage( 11, 'starting_room', 'strange_room' , 'W', 'E', [] ),
GamePassage( 12, 'strange_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GameObjectUseAction( 'door', 'key', 'broken_key' ) ],
[ GamePassageRevealAction( 'broken_key', '', 12 )],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == "found not existing room in a passage: strange_room" )
def test_syntax_checker_wrong_game29(self):
game_internal = Game( [ [ GameObject( 'starting_room' ), GameObject( 'final/room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'final/room', 'N', 'S' ) ], [], [], 'final/room', {} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == 'game object names can contain only lower case alphabets and _, final/room' )
def test_syntax_checker_good_game1(self):
# minimal valid game
game_internal = Game( [ [ GameObject( 'starting_room' ), GameObject( 'final_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'final_room', 'N', 'S' ) ], [], [], 'final_room', {} ] )
assert ( GameSyntaxChecker().check( game_internal ) == '' )
assert ( GameSolver().solve( game_internal ) == [ [ 'go', 'N' ] ] )
def test_syntax_checker_good_game_2(self):
# testing whether final room is accessible
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ),
GameObject( 'roomC' ), GameObject( 'roomD' ),
GameObject( 'roomE' ), GameObject( 'roomF' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomA', 'roomE', 'E', 'W' ),
GamePassage(13, 'roomD', 'roomC', 'E', 'W' ),
GamePassage(14, 'roomE', 'roomB', 'N', 'E' ),
GamePassage(15, 'roomD', 'roomE', 'N', 'S' ),
GamePassage(16, 'roomC', 'roomF', 'E', 'W' ) ],
[],
[],
'roomF',
{} ] )
assert ( GameSyntaxChecker().check( game_internal ) == '' )
assert ( GameSolver().solve( game_internal ) == [ [ 'go', 'N' ], [ 'go', 'E' ], [ 'go', 'S' ], [ 'go', 'E' ], [ 'go', 'E' ] ] )
def test_syntax_checker_good_game3(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', '', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == '' )
assert ( GameSolver().solve( game_internal ) == [['use', '', 'door'], ['go', 'N']] )
def test_syntax_checker_good_game4(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'key' ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == '' )
assert ( GameSolver().solve( game_internal ) == [['take', 'key'], ['use', 'door', 'key'], ['go', 'N']] )
def test_syntax_checker_good_game5(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'box', [GameObjectAttribute.IMMOBILE], [GameObject( 'key' ) ] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == '' )
solution = GameSolver().solve( game_internal )
assert ( solution == [['open', 'box'], ['take', 'key'], ['use', 'door', 'key'], ['go', 'N']] )
def test_syntax_checker_good_game6(self):
game_internal = Game( [ [ GameObject( 'starting_room' ),
GameObject( 'middle_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'box', [GameObjectAttribute.IMMOBILE], [GameObject( 'key' ) ] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'middle_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ),
GamePassage( 12, 'starting_room', 'middle_room' , 'N', 'S' ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == '' )
solution = GameSolver().solve( game_internal )
assert ( solution == [['go', 'N'],['open', 'box'], ['take', 'key'], ['use', 'door', 'key'], ['go', 'N']] )
def test_syntax_checker_good_game7(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'key', [GameObjectAttribute.INVISIBLE] ) ] ),
GameObject( 'middle_room' , [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'box', [GameObjectAttribute.IMMOBILE], [GameObject( 'burning_candle' ) ] ) ] ),
GameObject( 'ending_room' ) ],
[],
[ GamePassage( 11, 'middle_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ),
GamePassage( 12, 'starting_room', 'middle_room' , 'N', 'S' ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ) ],
[ GameObjectRevealAction( 'key', 'burning_candle') ],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == '' )
solution = GameSolver().solve( game_internal )
assert ( solution == [['go', 'N'], ['open', 'box'], ['take', 'burning_candle'], ['go', 'S'], ['take', 'key'], ['go', 'N'], ['use', 'door', 'key'], ['go', 'N']] )
def test_syntax_checker_good_game8(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'keypart1' ),
GameObject( 'box', [GameObjectAttribute.IMMOBILE], [GameObject( 'keypart2' ) ] ) ] ),
GameObject( 'ending_room' ) ],
[ GameObject( 'key' ) ],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GamePassageRevealAction( 'door', 'key', 11 ),
GameObjectUseAction( 'keypart1', 'keypart2', 'key' ) ],
[],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == '' )
solution = GameSolver().solve( game_internal )
assert ( solution == [['take', 'keypart1'], ['open', 'box'], ['take', 'keypart2'], ['use', 'keypart1', 'keypart2'], ['use', 'door', 'key'], ['go', 'N']] )
# Here use action + passage reval view = use passage reveal, so it is just
# pure complication. game10 may make the possibility of the separaton more meaningful.
def test_syntax_checker_good_game9(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'key' , [] ) ] ),
GameObject( 'ending_room' ) ],
[ GameObject( 'broken_key' ) ],
[ GamePassage( 11, 'starting_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GameObjectUseAction( 'door', 'key', 'broken_key' ) ],
[ GamePassageRevealAction( 'broken_key', '', 11 )],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == "" )
assert ( GameSolver().solve( game_internal ) == [['take', 'key'], ['use', 'door', 'key'], ['go', 'N']] )
def test_syntax_checker_good_game10(self):
game_internal = Game( [ [ GameObject( 'starting_room', [], [ GameObject( 'door', [GameObjectAttribute.IMMOBILE] ),
GameObject( 'key' , [] ) ] ),
GameObject( 'strange_room' ),
GameObject( 'ending_room' ) ],
[ GameObject( 'broken_key' ) ],
[ GamePassage( 11, 'starting_room', 'strange_room' , 'W', 'E', [] ),
GamePassage( 12, 'strange_room', 'ending_room' , 'N', 'S', [GameObjectAttribute.INVISIBLE] ) ],
[ GameObjectUseAction( 'door', 'key', 'broken_key' ) ],
[ GamePassageRevealAction( 'broken_key', '', 12 )],
'ending_room',
{} ] )
verdict = GameSyntaxChecker().check( game_internal )
assert ( verdict == "" )
assert ( GameSolver().solve( game_internal ) == [['take', 'key'], ['use', 'door', 'key'], ['go', 'W'], ['go', 'N']] )
def test_take_and_drop_existing_object(self):
subject = self.game1.do_it( 'take', 'candle' )
assert ( not subject is None )
assert ( not self.game1.has( 'candle' ) is None )
assert ( not 'candle' in self.game1.stuffs() )
subject = self.game1.do_it( 'drop', 'candle' )
assert ( not subject is None )
assert ( self.game1.has( 'candle' ) is None )
def test_trying_take_not_existing_object(self):
subject = self.game1.do_it( 'take', 'banana' )
assert ( subject is None )
assert ( self.game1.has( 'banana' ) is None )
def test_trying_take_immobile_object(self):
subject = self.game1.do_it( 'take', 'table' )
assert ( subject is None )
assert ( self.game1.has( 'table' ) is None )
def test_action_hit_the_bird_with_the_stone(self):
self.game1.do_it( 'take', 'stone' )
object1 = self.game1.do_it( 'use', 'stone', 'bird' )
assert ( not object1 is None )
assert ( not 'bird' in self.game1.stuffs() )
assert ( self.game1.has( 'stone' ) is None )
assert ( 'injured_bird' in self.game1.stuffs() )
object2 = self.game1.do_it( 'use', 'stone', 'bird' )
assert ( object2 is None )
def test_action_hit_the_bird_with_the_stone_but_both_are_in_inventory(self):
self.game1.do_it( 'take', 'stone' )
self.game1.do_it( 'take', 'bird' )
object1 = self.game1.do_it( 'use', 'stone', 'bird' )
assert ( not self.game1.has( 'injured_bird' ) is None )
def test_action_hit_the_bird_with_the_stone_but_use_params_are_reversed(self):
self.game1.do_it( 'take', 'stone' )
self.game1.do_it( 'use', 'bird', 'stone' )
assert ( 'injured_bird' in self.game1.stuffs() )
def test_room_goes_light_from_dark_if_we_burn_the_candle_without_taking_it_first(self):
self.game1.do_it( 'take', 'match' )
self.game1.do_it( 'use', 'candle', 'match' )
assert( not 'candle' in self.game1.stuffs() )
assert( 'burning_candle' in self.game1.stuffs() )
assert( 'picture' in self.game1.stuffs() )
def test_room_goes_light_from_dark_if_we_burn_the_candle_with_taking_it_first(self):
self.game1.do_it( 'take', 'candle' )
self.game1.do_it( 'take', 'match' )
self.game1.do_it( 'use', 'candle', 'match' )
assert ( not self.game1.has( 'burning_candle' ) is None )
assert( 'picture' in self.game1.stuffs() )
def test_moving_between_rooms(self):
self.game1.do_it( 'go', 'N')
assert( self.game1.look() == 'bathroom' )
assert ( self.game1.directions() == [['S', 'dark_room']] )
self.game1.do_it( 'go', 'S')
assert( self.game1.look() == 'dark_room' )
def test_opening_objects(self):
self.game1.do_it( 'go', 'N')
assert( not 'knife' in self.game1.stuffs() )
assert ( self.game1.do_it( 'open', 'cabinet' ) )
assert( 'knife' in self.game1.stuffs() )
def test_moving_between_rooms_and_carrying_object(self):
subject = self.game1.do_it( 'take', 'candle')
self.game1.do_it( 'go', 'N')
self.game1.do_it( 'drop', 'candle')
self.game1.do_it( 'go', 'S')
assert( self.game1.look() == 'dark_room' )
assert( not 'candle' in self.game1.stuffs() )
def test_recognizing_a_new_object_through_a_view_and_it_becomes_permanent(self):
self.game1.do_it( 'take', 'match' )
object1 = self.game1.do_it( 'use', 'candle', 'match' )
self.game1.do_it( 'take', 'burning_candle')
self.game1.do_it( 'go', 'N')
self.game1.do_it( 'drop', 'burning_candle')
self.game1.do_it( 'go', 'S')
assert( self.game1.look() == 'dark_room' )
assert( 'picture' in self.game1.stuffs() )
def test_finding_a_new_passage(self):
self.test_recognizing_a_new_object_through_a_view_and_it_becomes_permanent()
assert( 'picture' in self.game1.stuffs() )
self.game1.do_it( 'use','picture')
assert ( self.game1.directions() == [['N', 'bathroom'], ['W', 'secret_room']] )
def test_winning_the_game(self):
self.test_finding_a_new_passage()
self.game1.do_it( 'go', 'W')
assert ( self.game1.won() == 1 )
def test_solver_on_full_game(self):
verdict = GameSyntaxChecker().check( self.game1 )
assert ( verdict == '' )
solution = GameSolver().solve( self.game1 )
assert ( solution == [ ['take', 'candle'], ['take', 'match'], ['take', 'bird'], ['take', 'stone'], ['use', 'candle', 'match'],
['use', 'bird', 'stone'], ['use', '', 'picture'], ['go', 'W']] )
def test_json_serializer_deserializer(self):
game1_text_blueprints_reconstructed = json.dumps( self.game1.get_blueprints(), cls=GameEncoder );
array_game_description_reconstructed = GameDecoder().decode( game1_text_blueprints_reconstructed );
assert( self.game1.get_blueprints() == array_game_description_reconstructed )
def test_json_deserializer_serializer(self):
array_game_description = GameDecoder().decode( self.game1_text_blueprints );
text_game_description2 = json.dumps( array_game_description, cls=GameEncoder );
array_game_description2 = GameDecoder().decode( text_game_description2 );
assert( array_game_description == array_game_description2 )
def test_json_game_deserializer_serializer_1(self):
game_internal_text = '[[{"obj_content": {"attributes": [], "childObjects": [], "name": "roomA"}, "obj_name": "GameObject"},\
{"obj_content": {"attributes": [], "childObjects": [], "name": "roomB"}, "obj_name": "GameObject"},\
{"obj_content": {"attributes": [], "childObjects": [], "name": "roomC"}, "obj_name": "GameObject"},\
{"obj_content": {"attributes": [], "childObjects": [], "name": "roomD"}, "obj_name": "GameObject"},\
{"obj_content": {"attributes": [], "childObjects": [], "name": "roomE"}, "obj_name": "GameObject"},\
{"obj_content": {"attributes": [], "childObjects": [], "name": "roomF"}, "obj_name": "GameObject"}],\
[],\
[{"obj_content": {"room_name2": "roomB", "room_name1": "roomA", "direction2": "S", "attributes": [], "direction1": "N", "identifier": 11},\
"obj_name": "GamePassage"},\
{"obj_content": {"room_name2": "roomE", "room_name1": "roomA", "direction2": "W", "attributes": [], "direction1": "E", "identifier": 12},\
"obj_name": "GamePassage"},\
{"obj_content": {"room_name2": "roomB", "room_name1": "roomE", "direction2": "E", "attributes": [], "direction1": "N", "identifier": 13},\
"obj_name": "GamePassage"},\
{"obj_content": {"room_name2": "roomE", "room_name1": "roomD", "direction2": "S", "attributes": [], "direction1": "N", "identifier": 14},\
"obj_name": "GamePassage"},\
{"obj_content": {"room_name2": "roomF", "room_name1": "roomC", "direction2": "W", "attributes": [], "direction1": "E", "identifier": 15},\
"obj_name": "GamePassage"}], [], [], "roomF", {}]'
game_from_text = Game( GameDecoder().decode( game_internal_text ) )
game_internal = Game( [ [ GameObject( 'roomA' ), GameObject( 'roomB' ),
GameObject( 'roomC' ), GameObject( 'roomD' ),
GameObject( 'roomE' ), GameObject( 'roomF' ) ],
[],
[ GamePassage(11, 'roomA', 'roomB', 'N', 'S' ),
GamePassage(12, 'roomA', 'roomE', 'E', 'W' ),
GamePassage(13, 'roomE', 'roomB', 'N', 'E' ),
GamePassage(14, 'roomD', 'roomE', 'N', 'S' ),
GamePassage(15, 'roomC', 'roomF', 'E', 'W' ) ],
[], [], 'roomF', {} ] )
assert( game_internal == game_from_text )
def test_json_game_deserializer_serializer_2(self):
game_from_text = Game( GameDecoder().decode( self.game1_text_blueprints ) )
assert( self.game1 == game_from_text )
if __name__ == '__main__' :
unittest.main()
| 62.761708
| 181
| 0.479249
| 3,577
| 45,565
| 5.900475
| 0.081353
| 0.052307
| 0.031081
| 0.036956
| 0.807022
| 0.766701
| 0.715152
| 0.669667
| 0.620913
| 0.588174
| 0
| 0.014894
| 0.372259
| 45,565
| 725
| 182
| 62.848276
| 0.723001
| 0.013826
| 0
| 0.580595
| 0
| 0.034429
| 0.213731
| 0.002248
| 0
| 0
| 0
| 0.001379
| 0.15493
| 1
| 0.092332
| false
| 0.165884
| 0.020344
| 0
| 0.114241
| 0.00939
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
dba687d4fe645d6ead2435d80e320a12d165b7d2
| 29
|
py
|
Python
|
temp/mapjdeo.py
|
doublejtoh/mission_ide
|
dc5ee5ee26dfad428fcf9744195d1ed910b50341
|
[
"MIT"
] | null | null | null |
temp/mapjdeo.py
|
doublejtoh/mission_ide
|
dc5ee5ee26dfad428fcf9744195d1ed910b50341
|
[
"MIT"
] | null | null | null |
temp/mapjdeo.py
|
doublejtoh/mission_ide
|
dc5ee5ee26dfad428fcf9744195d1ed910b50341
|
[
"MIT"
] | null | null | null |
s = input('as: ')
print(s)
| 5.8
| 17
| 0.482759
| 5
| 29
| 2.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241379
| 29
| 4
| 18
| 7.25
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
dba88a0578acd65c4de18a45c2a669259e8852e4
| 11,906
|
py
|
Python
|
tests/integration/test_async_http_client.py
|
TimonPeng/solana-py
|
e2d53a61fd4bfd30a8d3726aa87398c04b248b38
|
[
"MIT"
] | null | null | null |
tests/integration/test_async_http_client.py
|
TimonPeng/solana-py
|
e2d53a61fd4bfd30a8d3726aa87398c04b248b38
|
[
"MIT"
] | null | null | null |
tests/integration/test_async_http_client.py
|
TimonPeng/solana-py
|
e2d53a61fd4bfd30a8d3726aa87398c04b248b38
|
[
"MIT"
] | null | null | null |
"""Tests for the HTTP API Client."""
import pytest
import solana.system_program as sp
from solana.rpc.api import DataSliceOpt
from solana.transaction import Transaction
from .utils import assert_valid_response, aconfirm_transaction, AIRDROP_AMOUNT, generate_expected_meta_after_airdrop
@pytest.mark.integration
@pytest.mark.asyncio
async def test_request_air_drop(alt_stubbed_sender, test_http_client_async):
"""Test air drop to alt_stubbed_sender."""
resp = await test_http_client_async.request_airdrop(alt_stubbed_sender.public_key(), AIRDROP_AMOUNT)
assert_valid_response(resp)
resp = await aconfirm_transaction(test_http_client_async, resp["result"])
assert_valid_response(resp)
expected_meta = generate_expected_meta_after_airdrop(resp)
assert resp["result"]["meta"] == expected_meta
@pytest.mark.integration
@pytest.mark.asyncio
async def test_send_transaction_and_get_balance(alt_stubbed_sender, alt_stubbed_receiver, test_http_client_async):
"""Test sending a transaction to localnet."""
# Create transfer tx to transfer lamports from stubbed sender to alt_stubbed_receiver
transfer_tx = Transaction().add(
sp.transfer(
sp.TransferParams(
from_pubkey=alt_stubbed_sender.public_key(), to_pubkey=alt_stubbed_receiver, lamports=1000
)
)
)
resp = await test_http_client_async.send_transaction(transfer_tx, alt_stubbed_sender)
assert_valid_response(resp)
# Confirm transaction
resp = await aconfirm_transaction(test_http_client_async, resp["result"])
assert_valid_response(resp)
expected_meta = {
"err": None,
"fee": 5000,
"innerInstructions": [],
"logMessages": [
"Program 11111111111111111111111111111111 invoke [1]",
"Program 11111111111111111111111111111111 success",
],
"postBalances": [9999994000, 954, 1],
"postTokenBalances": [],
"preBalances": [10000000000, 0, 1],
"preTokenBalances": [],
"rewards": [
{
"commission": None,
"lamports": -46,
"postBalance": 954,
"pubkey": "J3dxNj7nDRRqRRXuEMynDG57DkZK4jYRuv3Garmb1i98",
"rewardType": "Rent",
}
],
"status": {"Ok": None},
}
assert resp["result"]["meta"] == expected_meta
# Check balances
resp = await test_http_client_async.get_balance(alt_stubbed_sender.public_key())
assert_valid_response(resp)
assert resp["result"]["value"] == 9999994000
resp = await test_http_client_async.get_balance(alt_stubbed_receiver)
assert_valid_response(resp)
assert resp["result"]["value"] == 954
@pytest.mark.integration
@pytest.mark.asyncio
async def test_send_raw_transaction_and_get_balance(alt_stubbed_sender, alt_stubbed_receiver, test_http_client_async):
"""Test sending a raw transaction to localnet."""
# Get a recent blockhash
resp = await test_http_client_async.get_recent_blockhash()
assert_valid_response(resp)
recent_blockhash = resp["result"]["value"]["blockhash"]
# Create transfer tx transfer lamports from stubbed sender to alt_stubbed_receiver
transfer_tx = Transaction(recent_blockhash=recent_blockhash).add(
sp.transfer(
sp.TransferParams(
from_pubkey=alt_stubbed_sender.public_key(), to_pubkey=alt_stubbed_receiver, lamports=1000
)
)
)
# Sign transaction
transfer_tx.sign(alt_stubbed_sender)
# Send raw transaction
resp = await test_http_client_async.send_raw_transaction(transfer_tx.serialize())
assert_valid_response(resp)
# Confirm transaction
resp = await aconfirm_transaction(test_http_client_async, resp["result"])
assert_valid_response(resp)
expected_meta = {
"err": None,
"fee": 5000,
"innerInstructions": [],
"logMessages": [
"Program 11111111111111111111111111111111 invoke [1]",
"Program 11111111111111111111111111111111 success",
],
"postBalances": [9999988000, 1954, 1],
"postTokenBalances": [],
"preBalances": [9999994000, 954, 1],
"preTokenBalances": [],
"rewards": [],
"status": {"Ok": None},
}
assert resp["result"]["meta"] == expected_meta
# Check balances
resp = await test_http_client_async.get_balance(alt_stubbed_sender.public_key())
assert_valid_response(resp)
assert resp["result"]["value"] == 9999988000
resp = await test_http_client_async.get_balance(alt_stubbed_receiver)
assert_valid_response(resp)
assert resp["result"]["value"] == 1954
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_block_commitment(test_http_client_async):
"""Test get block commitment."""
resp = await test_http_client_async.get_block_commitment(5)
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_block_time(test_http_client_async):
"""Test get block time."""
resp = await test_http_client_async.get_block_time(5)
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_cluster_nodes(test_http_client_async):
"""Test get cluster nodes."""
resp = await test_http_client_async.get_cluster_nodes()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_confirmed_block(test_http_client_async):
"""Test get confirmed block."""
resp = await test_http_client_async.get_confirmed_block(1)
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_confirmed_block_with_encoding(test_http_client_async):
"""Test get confrimed block with encoding."""
resp = await test_http_client_async.get_confirmed_block(1, encoding="base64")
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_confirmed_blocks(test_http_client_async):
"""Test get confirmed blocks."""
resp = await test_http_client_async.get_confirmed_blocks(5, 10)
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_confirmed_signature_for_address2(test_http_client_async):
"""Test get confirmed signature for address2."""
resp = await test_http_client_async.get_confirmed_signature_for_address2(
"Vote111111111111111111111111111111111111111", limit=1
)
assert_valid_response(resp)
# TODO(michael): This RPC call is only available in solana-core v1.7 or newer.
# @pytest.mark.integration
# @pytest.mark.asyncio
# async def test_get_signatures_for_address(test_http_client_async_async):
# """Test get signatures for addresses."""
# resp = await test_http_client_async_async.get_signatures_for_address(
# "Vote111111111111111111111111111111111111111", limit=1
# )
# assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_epoch_info(test_http_client_async):
"""Test get epoch info."""
resp = await test_http_client_async.get_epoch_info()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_epoch_schedule(test_http_client_async):
"""Test get epoch schedule."""
resp = await test_http_client_async.get_epoch_schedule()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_fee_calculator_for_blockhash(test_http_client_async):
"""Test get fee calculator for blockhash."""
resp = await test_http_client_async.get_recent_blockhash()
assert_valid_response(resp)
resp = await test_http_client_async.get_fee_calculator_for_blockhash(resp["result"]["value"]["blockhash"])
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_slot(test_http_client_async):
"""Test get slot."""
resp = await test_http_client_async.get_slot()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_fees(test_http_client_async):
"""Test get fees."""
resp = await test_http_client_async.get_fees()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_first_available_block(test_http_client_async):
"""Test get first available block."""
resp = await test_http_client_async.get_first_available_block()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_genesis_hash(test_http_client_async):
"""Test get genesis hash."""
resp = await test_http_client_async.get_genesis_hash()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_identity(test_http_client_async):
"""Test get identity."""
resp = await test_http_client_async.get_genesis_hash()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_inflation_governor(test_http_client_async):
"""Test get inflation governor."""
resp = await test_http_client_async.get_inflation_governor()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_inflation_rate(test_http_client_async):
"""Test get inflation rate."""
resp = await test_http_client_async.get_inflation_rate()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_largest_accounts(test_http_client_async):
"""Test get largest accounts."""
resp = await test_http_client_async.get_largest_accounts()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_leader_schedule(test_http_client_async):
"""Test get leader schedule."""
resp = await test_http_client_async.get_leader_schedule()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_minimum_balance_for_rent_exemption(test_http_client_async):
"""Test get minimum balance for rent exemption."""
resp = await test_http_client_async.get_minimum_balance_for_rent_exemption(50)
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_slot_leader(test_http_client_async):
"""Test get slot leader."""
resp = await test_http_client_async.get_slot_leader()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_supply(test_http_client_async):
"""Test get slot leader."""
resp = await test_http_client_async.get_supply()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_transaction_count(test_http_client_async):
"""Test get transactinon count."""
resp = await test_http_client_async.get_transaction_count()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_version(test_http_client_async):
"""Test get version."""
resp = await test_http_client_async.get_version()
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_account_info(alt_stubbed_sender, test_http_client_async):
"""Test get_account_info."""
resp = await test_http_client_async.get_account_info(alt_stubbed_sender.public_key())
assert_valid_response(resp)
resp = await test_http_client_async.get_account_info(alt_stubbed_sender.public_key(), encoding="jsonParsed")
assert_valid_response(resp)
resp = await test_http_client_async.get_account_info(alt_stubbed_sender.public_key(), data_slice=DataSliceOpt(1, 1))
assert_valid_response(resp)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_get_vote_accounts(test_http_client_async):
"""Test get vote accounts."""
resp = await test_http_client_async.get_vote_accounts()
assert_valid_response(resp)
| 34.311239
| 120
| 0.749538
| 1,524
| 11,906
| 5.484908
| 0.105643
| 0.067951
| 0.118914
| 0.161383
| 0.83132
| 0.800455
| 0.741596
| 0.636799
| 0.565618
| 0.559636
| 0
| 0.033866
| 0.154292
| 11,906
| 346
| 121
| 34.410405
| 0.796305
| 0.062406
| 0
| 0.597458
| 0
| 0
| 0.06793
| 0.021105
| 0
| 0
| 0
| 0.00289
| 0.20339
| 1
| 0
| false
| 0
| 0.021186
| 0
| 0.021186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
916cabad219d7b77533fff8043a3a972535766c3
| 237
|
py
|
Python
|
pyretri/extract/splitter/__init__.py
|
dongan-beta/PyRetri
|
8756d5d5813a5211b58855373b6c6cd33d7a11f6
|
[
"Apache-2.0"
] | 1,063
|
2020-04-21T12:42:05.000Z
|
2022-03-31T06:32:50.000Z
|
pyretri/extract/splitter/__init__.py
|
dongan-beta/PyRetri
|
8756d5d5813a5211b58855373b6c6cd33d7a11f6
|
[
"Apache-2.0"
] | 39
|
2020-05-07T07:24:19.000Z
|
2022-02-02T23:49:23.000Z
|
pyretri/extract/splitter/__init__.py
|
dongan-beta/PyRetri
|
8756d5d5813a5211b58855373b6c6cd33d7a11f6
|
[
"Apache-2.0"
] | 174
|
2020-04-26T04:33:11.000Z
|
2022-03-17T02:58:45.000Z
|
# -*- coding: utf-8 -*-
from yacs.config import CfgNode
from .splitter_impl.identity import Identity
from .splitter_impl.pcb import PCB
from .splitter_base import SplitterBase
__all__ = [
'SplitterBase',
'Identity', 'PCB',
]
| 16.928571
| 44
| 0.7173
| 29
| 237
| 5.62069
| 0.517241
| 0.220859
| 0.196319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005076
| 0.168776
| 237
| 13
| 45
| 18.230769
| 0.822335
| 0.088608
| 0
| 0
| 0
| 0
| 0.107477
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
9183d01977f3b641e68197d0966e83729339627d
| 72
|
py
|
Python
|
makenew_serverless_python/__init__.py
|
makenew/serverless-python
|
cbfc860ab82de76c8a0a95167e5eea2fec665816
|
[
"MIT"
] | 6
|
2019-04-30T16:44:20.000Z
|
2021-12-25T23:23:41.000Z
|
makenew_serverless_python/__init__.py
|
makenew/serverless-python
|
cbfc860ab82de76c8a0a95167e5eea2fec665816
|
[
"MIT"
] | 15
|
2020-03-28T20:29:38.000Z
|
2021-09-11T07:19:46.000Z
|
makenew_serverless_python/__init__.py
|
makenew/serverless-python
|
cbfc860ab82de76c8a0a95167e5eea2fec665816
|
[
"MIT"
] | 3
|
2019-11-21T08:29:14.000Z
|
2021-01-10T17:22:41.000Z
|
"""
Package skeleton for a Python Serverless project on AWS Lambda.
"""
| 18
| 63
| 0.736111
| 10
| 72
| 5.3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 3
| 64
| 24
| 0.883333
| 0.875
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9189832ab2f5c095f75e48d8971bf6b836144730
| 142
|
py
|
Python
|
dns_messages/dns_objects/__init__.py
|
wahlflo/dns-messages
|
f42a1c1d0c933f44ca819b9a7f6e54daf48a7140
|
[
"MIT"
] | null | null | null |
dns_messages/dns_objects/__init__.py
|
wahlflo/dns-messages
|
f42a1c1d0c933f44ca819b9a7f6e54daf48a7140
|
[
"MIT"
] | null | null | null |
dns_messages/dns_objects/__init__.py
|
wahlflo/dns-messages
|
f42a1c1d0c933f44ca819b9a7f6e54daf48a7140
|
[
"MIT"
] | null | null | null |
from .rr_data import *
from .question import Question
from .dns_message import DnsMessage
from .opcode import OPCODE
from .rocde import RCODE
| 23.666667
| 35
| 0.816901
| 21
| 142
| 5.428571
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140845
| 142
| 5
| 36
| 28.4
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
918d1cc6cbaa7cdd70a957ff4b45ae50fc86266d
| 748
|
py
|
Python
|
overview_table/models.py
|
danmcelroy/VoSeq
|
e22bd5d971154170bf3f4f24b684b95a12418637
|
[
"BSD-3-Clause"
] | 2
|
2019-08-20T04:16:12.000Z
|
2020-08-25T02:05:12.000Z
|
overview_table/models.py
|
danmcelroy/VoSeq
|
e22bd5d971154170bf3f4f24b684b95a12418637
|
[
"BSD-3-Clause"
] | 65
|
2016-09-27T23:14:51.000Z
|
2022-03-19T14:17:58.000Z
|
overview_table/models.py
|
danmcelroy/VoSeq
|
e22bd5d971154170bf3f4f24b684b95a12418637
|
[
"BSD-3-Clause"
] | 4
|
2018-07-02T16:57:44.000Z
|
2021-03-23T02:12:15.000Z
|
from django.db import models
class OverviewTable(models.Model):
"""Bulk create does not work on inherited models so we create a new one."""
sequence_string = models.TextField(help_text="HTML string of cells with "
"length of sequences for each "
"gene.")
o_code = models.CharField(max_length=300)
orden = models.TextField(blank=True)
superfamily = models.TextField(blank=True)
family = models.TextField(blank=True)
subfamily = models.TextField(blank=True)
genus = models.TextField(blank=True)
species = models.TextField(blank=True)
def __str__(self):
return "OverviewTable: {0}".format(self.o_code)
| 39.368421
| 80
| 0.624332
| 87
| 748
| 5.264368
| 0.609195
| 0.229258
| 0.262009
| 0.31441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007435
| 0.280749
| 748
| 18
| 81
| 41.555556
| 0.843866
| 0.092246
| 0
| 0
| 0
| 0
| 0.115899
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0.071429
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
918e75300fe04c031168c214fb97059362ff96a6
| 92
|
py
|
Python
|
2014/12/social-security-benefits-age/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 14
|
2015-05-08T13:41:51.000Z
|
2021-02-24T12:34:55.000Z
|
2014/12/social-security-benefits-age/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | null | null | null |
2014/12/social-security-benefits-age/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 7
|
2015-04-04T04:45:54.000Z
|
2021-02-18T11:12:48.000Z
|
#!/usr/bin/env python
COPY_GOOGLE_DOC_KEY = '1rn4wrIqnhLzikYukOvdxlbcPZL0h6GCLluwEZ57sKPM'
| 23
| 68
| 0.847826
| 9
| 92
| 8.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0.065217
| 92
| 3
| 69
| 30.666667
| 0.802326
| 0.217391
| 0
| 0
| 0
| 0
| 0.619718
| 0.619718
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
37cfe0f36dfa4aeba7a668c5f8cc7b32ad2c5a2c
| 23
|
py
|
Python
|
mathbox/calculus/__init__.py
|
freedeaths/mathbox-py
|
e294dc1b916bb634807378883b1ba941a924bec5
|
[
"MIT"
] | 7
|
2021-12-23T07:03:12.000Z
|
2021-12-31T06:35:34.000Z
|
mathbox/calculus/__init__.py
|
freedeaths/mathbox-py
|
e294dc1b916bb634807378883b1ba941a924bec5
|
[
"MIT"
] | 8
|
2021-12-23T06:12:19.000Z
|
2022-01-07T15:01:47.000Z
|
mathbox/calculus/__init__.py
|
freedeaths/mathbox-py
|
e294dc1b916bb634807378883b1ba941a924bec5
|
[
"MIT"
] | null | null | null |
"""
Calculus module
"""
| 7.666667
| 15
| 0.608696
| 2
| 23
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 3
| 16
| 7.666667
| 0.7
| 0.652174
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
53093447ee14888e947d2e785d197e90e5ea4039
| 142
|
py
|
Python
|
dropcopy-run.py
|
juniorbl/dropcopy
|
5ea76a8e2322bd86d5abed0adb50ac43ced8520a
|
[
"MIT"
] | 1
|
2015-10-25T19:27:26.000Z
|
2015-10-25T19:27:26.000Z
|
dropcopy-run.py
|
juniorbl/dropcopy
|
5ea76a8e2322bd86d5abed0adb50ac43ced8520a
|
[
"MIT"
] | null | null | null |
dropcopy-run.py
|
juniorbl/dropcopy
|
5ea76a8e2322bd86d5abed0adb50ac43ced8520a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import gobject
from dropcopy.dropcopy import Dropcopy
if __name__ == "__main__":
gobject.threads_init()
Dropcopy()
| 14.2
| 38
| 0.753521
| 18
| 142
| 5.444444
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133803
| 142
| 9
| 39
| 15.777778
| 0.796748
| 0.140845
| 0
| 0
| 0
| 0
| 0.066116
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
5316135edababd5a0e6953a5060fe8f54f7e214d
| 1,174
|
py
|
Python
|
check-engine-lib/checkengine/_constraints/_Constraint.py
|
mikulskibartosz/correct-horse
|
383cf4106605cc6f94e800bdc707789c0cedbe95
|
[
"MIT"
] | 14
|
2020-07-06T05:37:02.000Z
|
2021-06-30T16:59:31.000Z
|
check-engine-lib/checkengine/_constraints/_Constraint.py
|
mikulskibartosz/correct-horse
|
383cf4106605cc6f94e800bdc707789c0cedbe95
|
[
"MIT"
] | 1
|
2021-10-15T23:33:34.000Z
|
2021-10-16T10:15:06.000Z
|
check-engine-lib/checkengine/_constraints/_Constraint.py
|
mikulskibartosz/correct-horse
|
383cf4106605cc6f94e800bdc707789c0cedbe95
|
[
"MIT"
] | 4
|
2020-10-08T05:14:32.000Z
|
2021-07-02T14:07:46.000Z
|
from typing import List, Tuple
from abc import ABC, abstractmethod
import random
import string
from pyspark.sql import DataFrame
def _generate_constraint_column_name(constraint_type, column_name):
random_suffix = ''.join(random.choice(string.ascii_lowercase) for i in range(12))
return f"__checkengine__{column_name}_{constraint_type}_{random_suffix}"
class _Constraint(ABC):
def __init__(self, column_name: str):
self.column_name = column_name
self.constraint_column_name = _generate_constraint_column_name(self.constraint_name(), column_name)
@abstractmethod
def constraint_name(self):
pass
@abstractmethod
def prepare_df_for_check(self, data_frame: DataFrame) -> DataFrame:
return data_frame
@abstractmethod
def filter_success(self, data_frame: DataFrame) -> DataFrame:
return data_frame
@abstractmethod
def filter_failure(self, data_frame: DataFrame) -> DataFrame:
return data_frame
def validate_self(self, data_frame: DataFrame, df_columns: List[str]) -> Tuple[bool, str]:
return self.column_name in df_columns, f"There is no '{self.column_name}' column"
| 31.72973
| 107
| 0.74276
| 150
| 1,174
| 5.473333
| 0.333333
| 0.133983
| 0.06821
| 0.107186
| 0.224117
| 0.224117
| 0.224117
| 0.224117
| 0.168088
| 0.168088
| 0
| 0.002066
| 0.175468
| 1,174
| 36
| 108
| 32.611111
| 0.846074
| 0
| 0
| 0.269231
| 1
| 0
| 0.086031
| 0.052811
| 0
| 0
| 0
| 0
| 0
| 1
| 0.269231
| false
| 0.038462
| 0.192308
| 0.153846
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
532600249eae56c852a821cfdfb9697d65d358b1
| 157
|
py
|
Python
|
src/try_django/bin/django-admin.py
|
aiegoo/django2-udemy
|
7bf383dee57b15859193c3d0e1d9e7987eef58b6
|
[
"MIT"
] | null | null | null |
src/try_django/bin/django-admin.py
|
aiegoo/django2-udemy
|
7bf383dee57b15859193c3d0e1d9e7987eef58b6
|
[
"MIT"
] | 8
|
2020-02-12T03:26:47.000Z
|
2021-09-08T01:40:27.000Z
|
src/try_django/bin/django-admin.py
|
aiegoo/django2-udemy
|
7bf383dee57b15859193c3d0e1d9e7987eef58b6
|
[
"MIT"
] | null | null | null |
#!/root/repos/udemy/django2/try_django/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| 26.166667
| 50
| 0.789809
| 21
| 157
| 5.333333
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.095541
| 157
| 5
| 51
| 31.4
| 0.774648
| 0.312102
| 0
| 0
| 0
| 0
| 0.074766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
5333748b2cd7514623c36006a3669b18f1d773f5
| 154
|
py
|
Python
|
progressivis/core/column_update.py
|
jdfekete/progressivis
|
3bc79ce229cd628ef0aa4663136a674743697b47
|
[
"BSD-2-Clause"
] | 51
|
2015-09-14T16:31:02.000Z
|
2022-01-12T17:56:53.000Z
|
progressivis/core/column_update.py
|
jdfekete/progressivis
|
3bc79ce229cd628ef0aa4663136a674743697b47
|
[
"BSD-2-Clause"
] | 10
|
2017-11-15T15:10:05.000Z
|
2022-01-19T07:36:43.000Z
|
progressivis/core/column_update.py
|
jdfekete/progressivis
|
3bc79ce229cd628ef0aa4663136a674743697b47
|
[
"BSD-2-Clause"
] | 5
|
2017-11-14T20:20:56.000Z
|
2020-01-22T06:26:51.000Z
|
"""
Manage changes in table columns
"""
from collections import namedtuple
ColumnUpdate = namedtuple('ColumnUpdate', ['created', 'updated', 'deleted'])
| 19.25
| 76
| 0.733766
| 15
| 154
| 7.533333
| 0.866667
| 0.389381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123377
| 154
| 7
| 77
| 22
| 0.837037
| 0.201299
| 0
| 0
| 0
| 0
| 0.286957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
534e972c0d201ac2543e11e122e220987d2b9b24
| 221
|
py
|
Python
|
muteria/drivers/criteria/tools_by_languages/c/gcov/__init__.py
|
muteria/muteria
|
2cb72ff04548b011bce9296833bceb295199ae8e
|
[
"MIT"
] | 5
|
2020-05-06T03:13:01.000Z
|
2021-12-09T22:39:26.000Z
|
muteria/drivers/criteria/tools_by_languages/c/gcov/__init__.py
|
muteria/muteria
|
2cb72ff04548b011bce9296833bceb295199ae8e
|
[
"MIT"
] | 6
|
2019-11-27T18:38:09.000Z
|
2021-12-16T20:40:50.000Z
|
muteria/drivers/criteria/tools_by_languages/c/gcov/__init__.py
|
muteria/muteria
|
2cb72ff04548b011bce9296833bceb295199ae8e
|
[
"MIT"
] | 4
|
2019-06-24T08:54:36.000Z
|
2022-03-31T15:38:35.000Z
|
from muteria.drivers.criteria.tools_by_languages.c.gcov.gcov \
import CriteriaToolGCov
#from .gcov import CriteriaToolGCov
StaticCriteriaTool = CriteriaToolGCov
| 27.625
| 80
| 0.628959
| 18
| 221
| 7.611111
| 0.666667
| 0.145985
| 0.379562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.325792
| 221
| 7
| 81
| 31.571429
| 0.919463
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
53537535ec28ff1ec23375c42a5518508554689f
| 151
|
py
|
Python
|
picturic/schemas.py
|
ThePokerFaCcCe/messenger
|
2db3d5c2ccd05ac40d2442a13d664ca9ad3cb14c
|
[
"MIT"
] | 4
|
2021-11-24T21:48:29.000Z
|
2021-12-07T00:44:44.000Z
|
picturic/schemas.py
|
ThePokerFaCcCe/myblog
|
9b24f381148b7f3262dd59e320f5e1600d1af68f
|
[
"MIT"
] | null | null | null |
picturic/schemas.py
|
ThePokerFaCcCe/myblog
|
9b24f381148b7f3262dd59e320f5e1600d1af68f
|
[
"MIT"
] | null | null | null |
PICTURE_DEFAULT = {
"image": {
"url": str,
"name": str
},
"thumbnail": {
"url": str,
"name": str
}
}
| 11.615385
| 19
| 0.364238
| 12
| 151
| 4.5
| 0.583333
| 0.222222
| 0.37037
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.450331
| 151
| 12
| 20
| 12.583333
| 0.650602
| 0
| 0
| 0.4
| 0
| 0
| 0.187919
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5357be7e4858fa080a891cb56fb6d833d39d5199
| 222
|
py
|
Python
|
about/views.py
|
Gao-Chuan/blog
|
4dcbf0720e50be7f11841778889301b7dcb14e2d
|
[
"Apache-2.0"
] | null | null | null |
about/views.py
|
Gao-Chuan/blog
|
4dcbf0720e50be7f11841778889301b7dcb14e2d
|
[
"Apache-2.0"
] | null | null | null |
about/views.py
|
Gao-Chuan/blog
|
4dcbf0720e50be7f11841778889301b7dcb14e2d
|
[
"Apache-2.0"
] | null | null | null |
import datetime
from django.shortcuts import render
# Create your views here.
def about(request):
data = {}
data['date'] = str(datetime.datetime.now()).split('.')[0]
return render(request, 'about.html', data)
| 24.666667
| 61
| 0.68018
| 29
| 222
| 5.206897
| 0.724138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005376
| 0.162162
| 222
| 9
| 62
| 24.666667
| 0.806452
| 0.103604
| 0
| 0
| 0
| 0
| 0.075758
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
5367e683e65a9d614614547f9df3d58193bf63db
| 323
|
py
|
Python
|
src/model/mutual_net.py
|
TencentYoutuResearch/SelfSupervisedLearning-DSM
|
655a0a23a47bf2559f3d435384ae59a8871a5ff5
|
[
"Apache-2.0"
] | 27
|
2021-01-07T11:09:33.000Z
|
2021-08-31T02:46:23.000Z
|
src/model/mutual_net.py
|
TencentYoutuResearch/SelfSupervisedLearning-DSM
|
655a0a23a47bf2559f3d435384ae59a8871a5ff5
|
[
"Apache-2.0"
] | null | null | null |
src/model/mutual_net.py
|
TencentYoutuResearch/SelfSupervisedLearning-DSM
|
655a0a23a47bf2559f3d435384ae59a8871a5ff5
|
[
"Apache-2.0"
] | 3
|
2021-01-08T08:31:06.000Z
|
2021-11-26T04:10:23.000Z
|
import torch.nn as nn
class MutualNet(nn.Module):
def __init__(self, embeddingnet):
super(MutualNet, self).__init__()
self.embeddingnet = embeddingnet
def forward(self, x, y, z):
feature_x = self.embeddingnet(x)
feature_y = self.embeddingnet(y)
return feature_x, feature_y
| 26.916667
| 41
| 0.659443
| 41
| 323
| 4.902439
| 0.439024
| 0.318408
| 0.199005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241486
| 323
| 12
| 42
| 26.916667
| 0.820408
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
536cb6f7a3412ce3428828ed95647de166c57fbb
| 1,176
|
py
|
Python
|
grr/core/grr_response_core/config/__init__.py
|
tsehori/grr
|
048506f22f74642bfe61749069a45ddf496fdab3
|
[
"Apache-2.0"
] | 1
|
2021-07-01T01:43:06.000Z
|
2021-07-01T01:43:06.000Z
|
grr/core/grr_response_core/config/__init__.py
|
tsehori/grr
|
048506f22f74642bfe61749069a45ddf496fdab3
|
[
"Apache-2.0"
] | 44
|
2021-05-14T22:49:24.000Z
|
2022-03-13T21:54:02.000Z
|
grr/core/grr_response_core/config/__init__.py
|
tsehori/grr
|
048506f22f74642bfe61749069a45ddf496fdab3
|
[
"Apache-2.0"
] | 1
|
2020-06-25T14:25:54.000Z
|
2020-06-25T14:25:54.000Z
|
#!/usr/bin/env python
# Lint as: python3
"""This module will load all the configuration parameters."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# pylint: disable=unused-import
from grr_response_core.config import acls
from grr_response_core.config import api
from grr_response_core.config import artifacts
from grr_response_core.config import build
from grr_response_core.config import checks
from grr_response_core.config import client
from grr_response_core.config import config
from grr_response_core.config import contexts
from grr_response_core.config import data_store
from grr_response_core.config import gui
from grr_response_core.config import local
from grr_response_core.config import logging
from grr_response_core.config import output_plugins
from grr_response_core.config import server
from grr_response_core.config import test
# pylint: enable=unused-import
from grr_response_core.lib import config_lib
# By this time it's guaranteed that all configuration options
# and filters are imported and known to the config system.
CONFIG = config_lib._CONFIG # pylint: disable=protected-access
| 37.935484
| 63
| 0.85119
| 180
| 1,176
| 5.277778
| 0.35
| 0.117895
| 0.252632
| 0.32
| 0.534737
| 0.534737
| 0
| 0
| 0
| 0
| 0
| 0.000953
| 0.107993
| 1,176
| 30
| 64
| 39.2
| 0.904671
| 0.256803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.95
| 0
| 0.95
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
727dd4d203f65f5147f46d42f8735192c28f5ebd
| 16,149
|
py
|
Python
|
prepare_data/gen_hard_bbox_rnet_onet.py
|
thaiph99/MTCNN
|
d6acfcdba972beb47751d63a34f3cf168d0488d2
|
[
"MIT"
] | null | null | null |
prepare_data/gen_hard_bbox_rnet_onet.py
|
thaiph99/MTCNN
|
d6acfcdba972beb47751d63a34f3cf168d0488d2
|
[
"MIT"
] | null | null | null |
prepare_data/gen_hard_bbox_rnet_onet.py
|
thaiph99/MTCNN
|
d6acfcdba972beb47751d63a34f3cf168d0488d2
|
[
"MIT"
] | null | null | null |
# coding:utf-8
import sys
import numpy as np
import cv2
import os
import argparse
import pickle
rootPath = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../"))
sys.path.insert(0, rootPath)
from detection.MtcnnDetector_plate import MtcnnDetector_plate
from detection.MtcnnDetector import MtcnnDetector
from detection.fcn_detector import FcnDetector
from detection.detector_plate import Detector
from tools.loader import TestLoader
from training.mtcnn_config import config
from training.mtcnn_plate_model import P_Net, R_Net
from tools.common_utils import IoU, convert_to_square, enlarge_det
def read_wider_annotation(widerImagesPath, annoTxtPath):
data = dict()
images = []
bboxes = []
labelfile = open(annoTxtPath, 'r')
while True:
# image path
imagepath = labelfile.readline().strip('\n')
if not imagepath:
break
imagepath = os.path.join(widerImagesPath, imagepath)
images.append(imagepath)
# face numbers
nums = labelfile.readline().strip('\n')
one_image_bboxes = []
for i in range(int(nums)):
bb_info = labelfile.readline().strip('\n').split(' ')
# only need x, y, w, h
face_box = [float(bb_info[i]) for i in range(4)]
xmin = face_box[0]
ymin = face_box[1]
xmax = xmin + face_box[2]
ymax = ymin + face_box[3]
one_image_bboxes.append([xmin, ymin, xmax, ymax])
bboxes.append(one_image_bboxes)
data['images'] = images # all image pathes
data['bboxes'] = bboxes # all image bboxes
return data
def read_plate_annotation(plateImagesPath, annoTxtPath):
data = dict()
images = []
bboxes = []
labelfile = open(annoTxtPath, 'r')
lines = labelfile.readlines()
for line in lines:
line = line.strip('\n')
components = line.split(' ')
# image path
imagepath = os.path.join(plateImagesPath, components[0])
images.append(imagepath)
# plate numbers
bboxes.append([components[1], components[2], components[3], components[4]])
data['images'] = images
data['bboxes'] = bboxes
return data
def __save_data(stage, data, save_path):
im_idx_list = data['images']
gt_boxes_list = data['bboxes']
num_of_images = len(im_idx_list)
# save files
saveFolder = os.path.join(rootPath, "tmp/data/%s/" % (stage))
print(">>>>>> Gen hard samples for %s..." % (stage))
typeName = ["pos", "neg", "part"]
saveFiles = {}
for tp in typeName:
_saveFolder = os.path.join(saveFolder, tp)
if not os.path.isdir(_saveFolder):
os.makedirs(_saveFolder)
saveFiles[tp] = open(os.path.join(saveFolder, "{}.txt".format(tp)), 'w')
# read detect result
det_boxes = pickle.load(open(os.path.join(save_path, 'detections.pkl'), 'rb'))
assert len(det_boxes) == num_of_images, "incorrect detections or ground truths"
# index of neg, pos and part face, used as their image names
n_idx, p_idx, d_idx = 0, 0, 0
total_idx = 0
for im_idx, dets, gts in zip(im_idx_list, det_boxes, gt_boxes_list):
gts = np.array(gts, dtype=np.float32).reshape(-1, 4)
if dets.shape[0] == 0:
continue
img = cv2.imread(im_idx)
total_idx += 1
# change to square
dets = convert_to_square(dets)
dets[:, 0:4] = np.round(dets[:, 0:4])
neg_num = 0
for box in dets:
x_left, y_top, x_right, y_bottom, _ = box.astype(int)
width = x_right - x_left + 1
height = y_bottom - y_top + 1
# ignore box that is too small or beyond image border
if width < 20 or x_left < 0 or y_top < 0 or x_right > img.shape[1] - 1 or y_bottom > img.shape[0] - 1:
continue
# compute intersection over union(IoU) between current box and all gt boxes
Iou = IoU(box, gts)
cropped_im = img[y_top:y_bottom + 1, x_left:x_right + 1, :]
image_size = 24 if stage == "rnet" else 48
resized_im = cv2.resize(cropped_im, (image_size, image_size),
interpolation=cv2.INTER_LINEAR)
# save negative images and write label
# Iou with all gts must below 0.3
if np.max(Iou) < 0.3 and neg_num < 60:
# now to save it
save_file = os.path.join(saveFolder, "neg", "%s.jpg" % n_idx)
saveFiles['neg'].write(save_file + ' 0\n')
cv2.imwrite(save_file, resized_im)
n_idx += 1
neg_num += 1
else:
# find gt_box with the highest iou
idx = np.argmax(Iou)
assigned_gt = gts[idx]
x1, y1, x2, y2 = assigned_gt
# compute bbox reg label
offset_x1 = (x1 - x_left) / float(width)
offset_y1 = (y1 - y_top) / float(height)
offset_x2 = (x2 - x_right) / float(width)
offset_y2 = (y2 - y_bottom) / float(height)
# save positive and part-face images and write labels
if np.max(Iou) >= 0.65:
save_file = os.path.join(saveFolder, "pos", "%s.jpg" % p_idx)
saveFiles['pos'].write(save_file + ' 1 %.2f %.2f %.2f %.2f\n' %
(offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
p_idx += 1
elif np.max(Iou) >= 0.4:
save_file = os.path.join(saveFolder, "part", "%s.jpg" % d_idx)
saveFiles['part'].write(save_file + ' -1 %.2f %.2f %.2f %.2f\n' %
(offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
d_idx += 1
printStr = "\r[{}] pos: {} neg: {} part:{}".format(total_idx, p_idx, n_idx, d_idx)
sys.stdout.write(printStr)
sys.stdout.flush()
for f in saveFiles.values():
f.close()
print('\n')
def __save_plate_data(stage, data, save_path):
im_idx_list = data['images']
gt_boxes_list = data['bboxes']
num_of_images = len(im_idx_list)
# save files
saveFolder = os.path.join(rootPath, "tmp/data/%s/" % (stage))
print(">>>>>> Gen hard samples for %s..." % (stage))
typeName = ["pos", "neg", "part"]
saveFiles = {}
for tp in typeName:
_saveFolder = os.path.join(saveFolder, tp)
if not os.path.isdir(_saveFolder):
os.makedirs(_saveFolder)
saveFiles[tp] = open(os.path.join(saveFolder, "{}.txt".format(tp)), 'w')
# read detect result
det_boxes = pickle.load(open(os.path.join(save_path, 'detections.pkl'), 'rb'))
assert len(det_boxes) == num_of_images, "incorrect detections or ground truths"
# index of neg, pos and part face, used as their image names
n_idx, p_idx, d_idx = 0, 0, 0
total_idx = 0
for im_idx, dets, gts in zip(im_idx_list, det_boxes, gt_boxes_list):
gts = np.array(gts, dtype=np.float32).reshape(-1, 4)
if dets.shape[0] == 0:
continue
img = cv2.imread(im_idx)
total_idx += 1
# enlarge the det box to w:h==3:1 size
dets = enlarge_det(dets)
dets[:, 0:4] = np.round(dets[:, 0:4])
neg_num = 0
for box in dets:
x_left, y_top, x_right, y_bottom, _ = box.astype(int)
width = x_right - x_left + 1
height = y_bottom - y_top + 1
# ignore box that is too small or beyond image border
if width < 20 or x_left < 0 or y_top < 0 or x_right > img.shape[1] - 1 or y_bottom > img.shape[0] - 1:
continue
# compute intersection over union(IoU) between current box and all gt boxes
Iou = IoU(box, gts)
cropped_im = img[y_top:y_bottom + 1, x_left:x_right + 1, :]
image_size = 24 if stage == "rnet" else 48
resized_im = cv2.resize(cropped_im, (image_size * 3, image_size),
interpolation=cv2.INTER_LINEAR)
# save negative images and write label
# Iou with all gts must below 0.3
if np.max(Iou) < 0.3 and neg_num < 60:
# now to save it
save_file = os.path.join(saveFolder, "neg", "%s.jpg" % n_idx)
saveFiles['neg'].write(save_file + ' 0\n')
cv2.imwrite(save_file, resized_im)
n_idx += 1
neg_num += 1
else:
# find gt_box with the highest iou
idx = np.argmax(Iou)
assigned_gt = gts[idx]
x1, y1, x2, y2 = assigned_gt
# compute bbox reg label
offset_x1 = (x1 - x_left) / float(width)
offset_y1 = (y1 - y_top) / float(height)
offset_x2 = (x2 - x_right) / float(width)
offset_y2 = (y2 - y_bottom) / float(height)
# save positive and part-face images and write labels
if np.max(Iou) >= 0.65:
save_file = os.path.join(saveFolder, "pos", "%s.jpg" % p_idx)
saveFiles['pos'].write(save_file + ' 1 %.2f %.2f %.2f %.2f\n' %
(offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
p_idx += 1
elif np.max(Iou) >= 0.4:
save_file = os.path.join(saveFolder, "part", "%s.jpg" % d_idx)
saveFiles['part'].write(save_file + ' -1 %.2f %.2f %.2f %.2f\n' %
(offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
d_idx += 1
printStr = "\r[{}] pos: {} neg: {} part:{}".format(total_idx, p_idx, n_idx, d_idx)
sys.stdout.write(printStr)
sys.stdout.flush()
for f in saveFiles.values():
f.close()
print('\n')
def test_net(batch_size, stage, thresh, min_size, stride):
print(">>>>>> Detect bbox for %s..." % (stage))
detectors = [None, None, None]
if stage in ["rnet", "onet"]:
modelPath = os.path.join(rootPath, 'tmp/model/pnet/')
a = [b[5:-6] for b in os.listdir(modelPath) if b.startswith('pnet-') and b.endswith('.index')]
maxEpoch = max(map(int, a))
modelPath = os.path.join(modelPath, "pnet-%d" % (maxEpoch))
print("Use PNet model: %s" % (modelPath))
PNet = FcnDetector(P_Net, modelPath)
detectors[0] = PNet
if stage in ["onet"]:
modelPath = os.path.join(rootPath, 'tmp/model/rnet/')
a = [b[5:-6] for b in os.listdir(modelPath) if b.startswith('rnet-') and b.endswith('.index')]
maxEpoch = max(map(int, a))
modelPath = os.path.join(modelPath, "rnet-%d" % (maxEpoch))
print("Use RNet model: %s" % (modelPath))
RNet = Detector(R_Net, 24, batch_size, modelPath)
detectors[1] = RNet
# read annatation(type:dict)
widerImagesPath = os.path.join(rootPath, "dataset", "WIDER_train", "images")
annoTxtPath = os.path.join(rootPath, "dataset", "wider_face_train_bbx_gt.txt")
data = read_wider_annotation(widerImagesPath, annoTxtPath)
mtcnn_detector = MtcnnDetector(detectors=detectors, min_size=min_size,
stride=stride, threshold=thresh)
test_data = TestLoader(data['images'])
# do detect
detections, _ = mtcnn_detector.detect_plate(test_data)
# save detect result
save_path = os.path.join(rootPath, "tmp/data", stage)
if not os.path.exists(save_path):
os.makedirs(save_path)
save_file = os.path.join(save_path, "detections.pkl")
with open(save_file, 'wb') as f:
pickle.dump(detections, f, 1)
print("\nDone! Start to do OHEM...")
__save_data(stage, data, save_path)
def test_net_plate(batch_size, stage, thresh, min_size, stride):
print(">>>>>> Detect bbox for %s..." % (stage))
detectors = [None, None, None]
if stage in ["rnet", "onet"]:
modelPath = os.path.join(rootPath, 'tmp/model/pnet/')
a = [b[5:-6] for b in os.listdir(modelPath) if b.startswith('pnet-') and b.endswith('.index')]
maxEpoch = max(map(int, a))
modelPath = os.path.join(modelPath, "pnet-%d" % (maxEpoch))
print("Use PNet model: %s" % (modelPath))
PNet = FcnDetector(P_Net, modelPath)
detectors[0] = PNet
if stage in ["onet"]:
modelPath = os.path.join(rootPath, 'tmp/model/rnet/')
a = [b[5:-6] for b in os.listdir(modelPath) if b.startswith('rnet-') and b.endswith('.index')]
maxEpoch = max(map(int, a))
modelPath = os.path.join(modelPath, "rnet-%d" % (maxEpoch))
print("Use RNet model: %s" % (modelPath))
RNet = Detector(R_Net, 24, batch_size, modelPath)
detectors[1] = RNet
# read annotation(type:dict)
plateImagesPath = os.path.join(rootPath, "dataset")
annoTxtPath = os.path.join(rootPath, "dataset", "anno_file.txt")
data = read_plate_annotation(plateImagesPath, annoTxtPath)
mtcnn_detector = MtcnnDetector_plate(detectors=detectors, min_size=min_size,
stride=stride, threshold=thresh)
test_data = TestLoader(data['images'])
print("shape of test data:", np.shape(test_data))
# do detect
detections, _ = mtcnn_detector.detect_plate(test_data)
# save detect result
save_path = os.path.join(rootPath, "tmp/data", stage)
if not os.path.exists(save_path):
os.makedirs(save_path)
save_file = os.path.join(save_path, "detections.pkl")
with open(save_file, 'wb') as f:
pickle.dump(detections, f, 1)
print("\nDone! Start to do OHEM...")
__save_plate_data(stage, data, save_path)
def parse_args():
parser = argparse.ArgumentParser(description='Create hard bbox sample...',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--stage', dest='stage', help='working stage, can be rnet, onet',
default='unknow', type=str)
parser.add_argument('--gpus', dest='gpus', help='specify gpu to run. eg: --gpus=0,1',
default='0 ', type=str)
parser.add_argument('--mydata', dest='mydata', help='data type(default training data or my data)',
default=False, type=bool)
parser.add_argument('--lmnum', dest='lmnum', help='number of landmarks in one bounding box',
default=5, type=int)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
stage = args.stage
gpus = args.gpus
# set GPU
if gpus:
os.environ["CUDA_VISIBLE_DEVICES"] = gpus
if stage == "rnet":
batchSize = 1
threshold = [0.4, 0.05]
minSize = 24
stride = 2
elif stage == "onet":
batchSize = 1
threshold = [0.4, 0.05]
minSize = 48
stride = 2
else:
raise Exception("Invaild stage...Please use --stage")
if args.mydata == False:
test_net(
batchSize, # test batch_size
stage, # can be 'rnet' or 'onet'
threshold, # cls threshold
minSize, # min_face
stride)
elif args.lmnum == 4:
test_net_plate(
batchSize, # test batch_size
stage, # can be 'rnet' or 'onet'
threshold, # cls threshold
minSize, # min_plate
stride)
| 44.243836
| 115
| 0.557434
| 2,074
| 16,149
| 4.183703
| 0.150434
| 0.027659
| 0.038032
| 0.024893
| 0.745649
| 0.719027
| 0.703123
| 0.69909
| 0.691944
| 0.679037
| 0
| 0.020449
| 0.318658
| 16,149
| 364
| 116
| 44.365385
| 0.768154
| 0.076228
| 0
| 0.717532
| 0
| 0
| 0.093272
| 0.001861
| 0
| 0
| 0
| 0
| 0.006494
| 1
| 0.022727
| false
| 0
| 0.045455
| 0
| 0.077922
| 0.055195
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
727fcea222787de95cba2a0eabfdcf5cc6d0f6f0
| 35,021
|
py
|
Python
|
phasing/io/VariantPhaser.py
|
Zuhayr-PacBio/cDNA_Cupcake
|
2cbbcf91f363e8a385a485b2c832b466cce2a323
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
phasing/io/VariantPhaser.py
|
Zuhayr-PacBio/cDNA_Cupcake
|
2cbbcf91f363e8a385a485b2c832b466cce2a323
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
phasing/io/VariantPhaser.py
|
Zuhayr-PacBio/cDNA_Cupcake
|
2cbbcf91f363e8a385a485b2c832b466cce2a323
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
__author__ = 'etseng@pacb.com'
import pdb
from collections import defaultdict, namedtuple, Counter
from csv import DictReader
import vcf
import pysam
from Bio.Seq import Seq
from Bio import SeqIO
from cupcake.io.BioReaders import GMAPSAMReader
from .coordinate_mapper import get_base_to_base_mapping_from_sam
__VCF_EXAMPLE__ = \
"""
##fileformat=VCFv4.2
##INFO=<ID=DP,Number=1,Type=Integer,Description="Total Depth">
##INFO=<ID=AF,Number=A,Type=Float,Description="Allele Frequency">
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
##FORMAT=<ID=HQ,Number=2,Type=Integer,Description="Haplotype Quality">
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT
20 1 . G A,T . PASS AF=0.5;DB GT
"""
def type_fa_or_fq(file):
file = file.upper()
if file.endswith('.FA') or file.endswith('.FASTA'): return 'fasta'
else: return 'fastq'
class VariantPhaser(object):
def __init__(self, vc):
"""
:param vc: MPileUPVariant instance.
"""
self.vc = vc
self.min_var_pos = min(vc.variant) # mininum 0-based position of a called variant
self.max_var_pos = max(vc.variant) # maximum 0-based position of a called variant
self.accepted_vars_by_pos = {} # 0-based pos --> list of accepted, (NOT strand sense) base
self.count_of_vars_by_pos = {} # 0-based pos --> (NOT strand sense, but ref-based) base --> count
self.accepted_pos = [] # sorted list of variant positions (0-based, ref)
# process vc.variant which is
# dict of 0-based pos --> desc list of (base, count)
# ex: {1565: [('a', 49), ('g', 36)]}
# lower case means at pos 1565, we expect - strand mapping and
# seq base is 'T' on the sense strand
# this converts to self.accepted_vars_by_pos[1565] = ['A', 'G']
# later, when we are matchin back to transcript seq, need to watch for strand!
for pos, vars in vc.variant.items():
self.accepted_vars_by_pos[pos] = [_base.upper() for _base,_count in vars]
self.count_of_vars_by_pos[pos] = dict((_base.upper(), _count) for _base,_count in vars)
self.accepted_pos = list(self.accepted_vars_by_pos.keys())
self.accepted_pos.sort()
self.haplotypes = Haplotypes(self.accepted_pos, self.vc.ref_base, self.count_of_vars_by_pos)
self.seq_hap_info = {} # haplotype assignment, key: (CCS) seqid, value: haplotype index
def phase_variant(self, sam_filename, input_fa_or_fq, output_prefix, partial_ok=False):
"""
:param sam_filename: CCS SAM filename. Can be unsorted.
:param input_fa_or_fq: Input CCS fasta/fastq filename.
:param output_prefix: Output prefix. Writes to xxx.log.
:param partial_ok: default False. if True, (CCS) reads don't need to cover all SNP positions.
For each alignment:
1. discard if did not map to the strand expected
2. discard if did not map to the full range of variants (unless <partial_ok> is True)
3. discard if at var positions have non-called bases (outliers)
"""
f_log = open(output_prefix+'.log', 'w')
seq_dict = SeqIO.to_dict(SeqIO.parse(open(input_fa_or_fq), type_fa_or_fq(input_fa_or_fq)))
for r in GMAPSAMReader(sam_filename, True, query_len_dict=dict((k, len(seq_dict[k].seq)) for k in seq_dict)):
if r.sID == '*':
f_log.write("Ignore {0} because: unmapped.\n".format(r.qID))
continue
if r.flag.strand != self.vc.expected_strand:
f_log.write("Ignore {0} because: strand is {1}.\n".format(r.qID, r.flag.strand))
continue # ignore
if not partial_ok and (r.sStart > self.min_var_pos or r.sEnd < self.max_var_pos):
f_log.write("Ignore {0} because: aln too short, from {1}-{2}.\n".format(r.qID, r.sStart+1, r.sEnd))
continue
i, msg = self.match_haplotype(r, str(seq_dict[r.qID].seq).upper(), partial_ok)
if i is None: # read is rejected for reason listed in <msg>
f_log.write("Ignore {0} because: {1}.\n".format(r.qID, msg))
continue
else:
f_log.write("{0} phased: haplotype {1}={2}\n".format(r.qID, i, self.haplotypes[i]))
print("{0} has haplotype {1}:{2}".format(r.qID, i, self.haplotypes[i]))
self.seq_hap_info[r.qID] = i
def match_haplotype(self, r, s, partial_ok=False):
"""
Match an alignment record to existing haplotypes or create a new one.
Helper function for self.phase_variant()
:param r: CCS alignment (SAM record)
:param s: CCS sequence (in strand), must be plain str and every base is upper case
:param partial_ok: default False. if True, (CCS) reads don't need to cover all SNP positions.
:return: (haplotype_index, msg) or (None, msg) if variants don't match w/ called SNPs
"""
assert type(s) is str and str.isupper(s)
assert r.flag.strand == self.vc.expected_strand
# m: mapping of 0-based seq --> 0-based ref position
# rev_map: mapping of 0-based ref position --> 0-based seq
m = get_base_to_base_mapping_from_sam(r.segments, r.cigar, r.qStart, r.qEnd, r.flag.strand)
ref_m = dict((v,k) for k,v in m.items())
# go through each variant
# <hap> to represent the concatenated string of all variant positions for this seq
# ex: if there are three var positions, a hap would be "ATG" or "A?G" (if partial_ok is True), etc.
hap = ''
impute_later = False
for ref_pos in self.accepted_pos:
if ref_pos not in ref_m:
if partial_ok: # read does not cover one of the SNP positions, so use "?"
hap += "?"
else:
return None, "Does not have base at ref_pos {0}.\n".format(ref_pos)
else:
base = s[ref_m[ref_pos]]
if self.vc.expected_strand == '-': # must convert the base to the rev comp
base = str(Seq(base).reverse_complement()).upper()
if base in self.accepted_vars_by_pos[ref_pos]:
hap += base
else: # contains a base at a variant position that is not called. Try to impute.
hap += base
impute_later = True
if all(b=='?' for b in hap):
return None, "Does not cover any variant base."
if impute_later:
impute_i = self.haplotypes.impute_haplotype(hap, min_score=3)
if impute_i is None:
return None, "Seq {0} contained non-called variant. Impute failed.\n".format(hap)
else:
return impute_i, "IMPUTED"
return self.haplotypes.match_or_add_haplotype(hap_string=hap)
def phase_isoforms(read_stat_filename, seqids, phaser):
"""
:param read_stat_filename: the .read_stat file that has columns <id> and <pbid>, where <id> is CCS id and <pbid> is PB.X.Y
:param seqids: CCS IDs that were used to create the haplotypes.
:param phaser: VariantPhaser object that contains the haplotype and seqid->haplotype information.
:return: list of (isoform, dict of haplotype count), ex: {'PB.45.1': {0:10, 1:20}}
which means PB.45.1 has haplotype 0 supported by 10 CCS reads and hap 1 supported by 20 CCS reads.
*NOTE* currently uses FL CCS reads only (even if the SNPs may have been called by FL+nFL CCS SAM)
"""
result = {} # dict of (isoform, dict of haplotype_index --> CCS count supporting it
# from read stat, gather which isoforms have which (CCS) seq members.
isoforms = defaultdict(lambda: []) # key: PB.X.Y, value: list of seqid members
for r in DictReader(open(read_stat_filename), delimiter='\t'):
if r['id'] in seqids and r['is_fl']=='Y':
isoforms[r['pbid']].append(r['id'])
# for each isoform, look at the CCS membership to know which haplotypes are expressed
for _iso, _seqids in isoforms.items():
tally = defaultdict(lambda: 0) # haplotype index --> count (of CCS)
for seqid in _seqids:
if seqid in phaser.seq_hap_info: # some CCS (seqids) may not have been used by the phaser, so account for that
tally[phaser.seq_hap_info[seqid]] += 1
if len(tally) > 0:
result[_iso] = dict(tally)
return result
class Haplotypes(object):
"""
Storing haplotypes for a loci.
self.haplotype[i] is the i-th haplotype.
if N = len(self.haplotype[i]), then there are N variants along the loci.
self.hap_var_positions[j] means that the j-th variant corressponds to (0-based) position on the ref genome.
"""
def __init__(self, var_positions, ref_at_pos, count_of_vars_by_pos):
"""
:param var_positions: sorted list of (0-based) variant positions
:param ref_at_pos: dict of (0-based) variant position --> ref base at this position
:param count_of_vars_by_pos: 0-based pos --> (NOT strand sense, but ref-based) base --> count
"""
self.haplotypes = [] # haplotypes, where haplotypes[i] is the i-th distinct haplotype of all var concat
self.hap_var_positions = var_positions
self.ref_at_pos = ref_at_pos # dict of (0-based) pos --> ref base
self.alt_at_pos = None # init: None, later: dict of (0-based) pos --> unique list of alt bases
self.count_of_vars_by_pos = count_of_vars_by_pos
self.haplotype_vcf_index = None # init: None, later: dict of (hap index) --> (0-based) var pos --> phase (0 for ref, 1+ for alt)
# sanity check: all variant positions must be present
self.sanity_check()
def __getitem__(self, ith):
"""
Returns the <i>-th haplotype
"""
return self.haplotypes[ith]
def __str__(self):
return """
var positions: {pp}
haplotypes: \n{h}
""".format(pp=",".join(map(str,self.hap_var_positions)),
h="\n".join(self.haplotypes))
def sanity_check(self):
"""
Sanity check the following:
-- variant positions are properly recorded and concordant
-- alt bases are truly alt and unique
-- all haplotypes are the same length
"""
for pos in self.hap_var_positions:
assert pos in self.ref_at_pos
if self.alt_at_pos is not None:
for pos in self.alt_at_pos:
# ref base must not be in alt
assert self.ref_at_pos[pos] not in self.alt_at_pos[pos]
# alt bases must be unique
assert len(self.alt_at_pos[pos]) == len(set(self.alt_at_pos[pos]))
if len(self.haplotypes) >= 1:
n = len(self.haplotypes[0])
assert n == len(self.hap_var_positions)
for hap_str in self.haplotypes[1:]:
assert len(hap_str) == n
def match_or_add_haplotype(self, hap_string):
"""
If <hap_string> is an existing haplotype, return the index.
Otherwise, add to known haplotypes and return the new index.
:return: <index>, "FOUND" or "NEW"
"""
if hap_string in self.haplotypes:
i = self.haplotypes.index(hap_string)
return i, "FOUND"
else:
i = len(self.haplotypes)
self.haplotypes.append(hap_string)
return i, "NEW"
def impute_haplotype(self, hap_string, min_score):
"""
:param hap_string: a hap string with '?'s.
:param min_sim: minimum similarity with existing haplotype to accept assignment
:return: <index> of an existing haplotype, or None if not sufficiently matched
Impute haplotype and only return a match if:
(a) score (similarity) is >= min_score
(b) the matching score for the best one is higher than the second best match
"""
sim_tuple = namedtuple('sim_tuple', 'index score')
sims = [] # list of sim_tuple
hap_str_len = len(hap_string)
for i in range(len(self.haplotypes)):
# Liz note: currently NOT checking whether existing haplotypes have '?'. I'm assuming no '?'.
score = sum((hap_string[k]==self.haplotypes[i][k]) for k in range(hap_str_len))
if score > 0:
sims.append(sim_tuple(index=i, score=score))
if len(sims) == 0:
return None
sims.sort(key=lambda x: x.score, reverse=True)
if sims[0].score >= min_score and (len(sims)==1 or sims[0].score > sims[1].score):
return sims[0].index
else:
return None
def get_haplotype_vcf_assignment(self):
"""
Must be called before self.write_haplotype_to_vcf()
This is preparing for writing out VCF. We need to know, for each variant position,
the ref base (already filled in self.ref_at_pos) and the alt bases (self.alt_at_pos).
For each haplotype in (self.haplotype), we need to know the whether the i-th variant is the
ref (index 0), or some alt base (index 1 and onwards).
Propagates two variables:
self.haplotype_vcf_index: hap index --> pos --> phase index (0 for ref, 1+ for alt)
self.alt_at_pos: dict of <0-based pos> --> alt bases (not is not ref) at this position
"""
self.haplotype_vcf_index = [{} for i in range(len(self.haplotypes))]
self.alt_at_pos = {}
# what happens in the case of partial phasing
# ex: self.haplotypes[0] = "A?G", this means when it comes to the second pos, pos2,
# in the VCF we would want to write out .|. for diploid, . for haploid, etc
# so let's set self.haplotype_vcf_index[0][pos2] = '.' to indicate that
for i,pos in enumerate(self.hap_var_positions):
ref = self.ref_at_pos[pos]
# need to go through the haplotype bases, if ref is already represented, then don't put it in alt
self.alt_at_pos[pos] = []
for hap_i, hap_str in enumerate(self.haplotypes):
base = hap_str[i]
if base=='?': # means this haplotype does not cover this position!
self.haplotype_vcf_index[hap_i][pos] = '.'
elif base==ref: # is the ref base
self.haplotype_vcf_index[hap_i][pos] = 0
else: # is an alt base, see if it's already there
if base in self.alt_at_pos[pos]:
j = self.alt_at_pos[pos].index(base)
self.haplotype_vcf_index[hap_i][pos] = j + 1 # always +1, buz alt starts at 1 (0 is ref)
else:
j = len(self.alt_at_pos[pos])
self.alt_at_pos[pos].append(base)
self.haplotype_vcf_index[hap_i][pos] = j + 1 # always +1, buz alt starts at 1 (0 is ref)
# in the case where partial_ok=False, it's possible some alt are never presented by a haplotype
# we must check that all variants are presented here
for _base in self.count_of_vars_by_pos[pos]:
if (_base not in self.ref_at_pos[pos]) and (_base not in self.alt_at_pos[pos]):
self.alt_at_pos[pos].append(_base)
def write_haplotype_to_vcf(self, fake_genome_mapping_filename, isoform_tally, output_prefix):
"""
The following functions must first be called first:
-- self.get_haplotype_vcf_assignment
"""
if self.haplotype_vcf_index is None or self.alt_at_pos is None:
raise Exception("Must call self.get_haplotype_vcf_assignment() first!")
self.sanity_check()
name_isoforms = list(isoform_tally.keys())
name_isoforms.sort()
# write a fake VCF example so we can read the headers in
with open('template.vcf', 'w') as f:
f.write(__VCF_EXAMPLE__)
reader = vcf.VCFReader(open('template.vcf'))
reader.samples = name_isoforms
f_vcf = vcf.Writer(open(output_prefix+'.vcf', 'w'), reader)
# human readable text:
# first line: assoc VCF filename
# second line: haplotype, list of sorted isoforms
# third line onwards: haplotype and assoc count
f_human = open(output_prefix+'.human_readable.txt', 'w')
f_human.write("Associated VCF file: {0}.vcf\n".format(output_prefix))
f_human.write("haplotype\t{samples}\n".format(samples="\t".join(name_isoforms)))
for hap_index,hap_str in enumerate(self.haplotypes):
f_human.write(hap_str)
for _iso in name_isoforms:
if hap_index in isoform_tally[_iso]:
f_human.write("\t{0}".format(isoform_tally[_iso][hap_index]))
else:
f_human.write("\t0")
f_human.write('\n')
f_human.close()
# read fake genome mapping file
fake_map = {} # 0-based position on fake --> (chr, 0-based ref position)
with open(fake_genome_mapping_filename) as f:
for line in f:
fake_pos, ref_chr, ref_pos = line.strip().split(',')
fake_map[int(fake_pos)] = (ref_chr, int(ref_pos))
# for each position, write out the ref and alt bases
# then fill in for each isoform (aka "sample"):
# if this isoform only shows one allele, then it's just that allele (0 for ref, 1+ otherwise)
# if this isoform shows 2+ allele, then the first allele is indicated by self.haplotypes[0]
for i,pos in enumerate(self.hap_var_positions):
ref_chr, ref_pos = fake_map[pos]
total_count = sum(self.count_of_vars_by_pos[pos].values())
alt_freq = ["{0:.2f}".format(self.count_of_vars_by_pos[pos][b]*1./total_count) for b in self.alt_at_pos[pos]]
rec = vcf.model._Record(CHROM=ref_chr,
POS=ref_pos+1,
ID='.',
REF=self.ref_at_pos[pos],
ALT=[vcf.model._Substitution(b) for b in self.alt_at_pos[pos]],
QUAL='.',
FILTER='PASS',
INFO={'AF':alt_freq, 'DP':total_count},
FORMAT="GT:HQ",
sample_indexes=None)
samp_ft = vcf.model.make_calldata_tuple(['GT', 'HQ'])
rec.samples = []
for _iso in name_isoforms:
# isoform_tally[_iso] is a dict of haplotype index --> count
# the index for thos base at this pos would thus be haplotype_vcf_index[hap_index][i]
# we always need to show the phases in haplotype index order sorted
hap_indices = list(isoform_tally[_iso].keys())
hap_indices.sort()
genotype = "|".join(str(self.haplotype_vcf_index[hap_index][pos]) for hap_index in hap_indices)
counts = ",".join(str(isoform_tally[_iso][hap_index]) for hap_index in hap_indices)
rec.samples.append(vcf.model._Call(rec, _iso, samp_ft(*[genotype, counts])))
f_vcf.write_record(rec)
f_vcf.close()
def get_base_to_base_mapping_from_aligned_pairs(reftuple, qLen, strand):
"""
Returns: dict of 0-based position --> 0-based ref position
"""
cur_genome_loc = reftuple[0][1]
mapping = {}
for qpos, rpos in reftuple:
if qpos is not None and rpos is not None:
mapping[qpos] = (rpos, True)
elif qpos is not None:
mapping[qpos] = (cur_genome_loc, None)
if rpos is not None: cur_genome_loc = rpos
if strand == '-':
mapping = dict((qLen-1-k, v) for k,v in mapping.items())
for k in mapping:
mapping[k] = mapping[k][0]
return mapping
class MagVariantPhaser(object):
def __init__(self, vc):
"""
:param vc: MPileUPVariant instance.
"""
self.vc = vc
self.min_var_pos = min(vc.variant) # mininum 0-based position of a called variant
self.max_var_pos = max(vc.variant) # maximum 0-based position of a called variant
self.accepted_vars_by_pos = {} # 0-based pos --> list of accepted, (NOT strand sense) base
self.count_of_vars_by_pos = {} # 0-based pos --> (NOT strand sense, but ref-based) base --> count
self.accepted_pos = [] # sorted list of variant positions (0-based, ref)
# process vc.variant which is
# dict of 0-based pos --> desc list of (base, count)
# ex: {1565: [('a', 49), ('g', 36)]}
# lower case means at pos 1565, we expect - strand mapping and
# seq base is 'T' on the sense strand
# this converts to self.accepted_vars_by_pos[1565] = ['A', 'G']
# later, when we are matchin back to transcript seq, need to watch for strand!
for pos, vars in vc.variant.items():
self.accepted_vars_by_pos[pos] = [_base.upper() for _base,_count in vars]
self.count_of_vars_by_pos[pos] = dict((_base.upper(), _count) for _base,_count in vars)
self.accepted_pos = list(self.accepted_vars_by_pos.keys())
self.accepted_pos.sort()
self.haplotypes = MagHaplotypes(self.accepted_pos, [self.vc.ref_name[p] for p in self.accepted_pos], self.vc.ref_base, self.count_of_vars_by_pos)
self.seq_hap_info = {} # haplotype assignment, key: (CCS) seqid, value: haplotype index
def phase_variant(self, sam_filename, coordstr, output_prefix, partial_ok=False):
"""
:param sam_filename: CCS SAM filename. Can be unsorted.
:param coordstr: list of [contig, start, end]
:param output_prefix: Output prefix. Writes to xxx.log.
:param partial_ok: default False. if True, (CCS) reads don't need to cover all SNP positions.
For each alignment:
1. discard if did not map to the strand expected
2. discard if did not map to the full range of variants (unless <partial_ok> is True)
3. discard if at var positions have non-called bases (outliers)
"""
f_log = open(output_prefix+'.log', 'a+')
contig, start, end = coordstr
secondary_align_counts = 0
tot_align_counts = 0
with pysam.AlignmentFile(sam_filename, 'rb') as samfile:
for s in samfile.fetch(contig, start, end):
tot_align_counts += 1
if s.reference_name == '*':
f_log.write("Ignore {0} because: unmapped.\n".format(s.query_name))
continue
if not partial_ok and (s.reference_start > self.min_var_pos or s.reference_end < self.max_var_pos):
f_log.write("Ignore {0} because: aln too short, from {1}-{2}.\n".format(s.query_name, s.referenc_start+1, s.reference_end))
continue
if s.is_secondary:
secondary_align_counts += 1
continue
seqstr = s.query_sequence.upper()
i, msg = self.match_haplotype(s, seqstr, partial_ok)
if i is None: # read is rejected for reason listed in <msg>
f_log.write("Ignore {0} because: {1}.\n".format(s.query_name, msg))
continue
else:
f_log.write("{0} phased: haplotype {1}={2}\n".format(s.query_name, i, self.haplotypes[i]))
print("{0} has haplotype {1}:{2}".format(s.query_name, i, self.haplotypes[i]))
self.seq_hap_info[s.query_name] = i
f_log.write(f'Encountered {secondary_align_counts} out of {tot_align_counts} read alignments')
def match_haplotype(self, r, s, partial_ok=False):
"""
Match an alignment record to existing haplotypes or create a new one.
Helper function for self.phase_variant()
:param r: CCS alignment (pysam record)
:param s: CCS sequence (in strand), must be plain str and every base is upper case
:param partial_ok: default False. if True, (CCS) reads don't need to cover all SNP positions.
:return: (haplotype_index, msg) or (None, msg) if variants don't match w/ called SNPs
"""
try:
assert type(s) is str and str.isupper(s)
except Exception as e:
print(f'exception: {s}')
# m: mapping of 0-based seq --> 0-based ref position
# rev_map: mapping of 0-based ref position --> 0-based seq
strand = '-' if r.is_reverse else '+'
m = get_base_to_base_mapping_from_aligned_pairs(r.get_aligned_pairs(), len(r.query_sequence), strand)
ref_m = dict((v,k) for k,v in m.items())
# go through each variant
# <hap> to represent the concatenated string of all variant positions for this seq
# ex: if there are three var positions, a hap would be "ATG" or "A?G" (if partial_ok is True), etc.
hap = ''
impute_later = False
for ref_pos in self.accepted_pos:
if ref_pos not in ref_m:
if partial_ok: # read does not cover one of the SNP positions, so use "?"
hap += "?"
else:
return None, "Does not have base at ref_pos {0}.\n".format(ref_pos)
else:
base = s[ref_m[ref_pos]]
if base in self.accepted_vars_by_pos[ref_pos]:
hap += base
else: # contains a base at a variant position that is not called. Try to impute.
hap += base
impute_later = True
if all(b=='?' for b in hap):
return None, "Does not cover any variant base."
if impute_later:
impute_i = self.haplotypes.impute_haplotype(hap, min_score=3)
if impute_i is None:
return None, "Seq {0} contained non-called variant. Impute failed.\n".format(hap)
else:
return impute_i, "IMPUTED"
return self.haplotypes.match_or_add_haplotype(hap_string=hap)
class MagHaplotypes(object):
"""
Storing haplotypes for a loci.
self.haplotype[i] is the i-th haplotype.
if N = len(self.haplotype[i]), then there are N variants along the loci.
self.hap_var_positions[j] means that the j-th variant corressponds to (0-based) position on the ref genome.
"""
def __init__(self, var_positions, chrs, ref_at_pos, count_of_vars_by_pos):
"""
:param var_positions: sorted list of (0-based) variant positions
:param ref_at_pos: dict of (0-based) variant position --> ref base at this position
:param count_of_vars_by_pos: 0-based pos --> (NOT strand sense, but ref-based) base --> count
"""
self.haplotypes = [] # haplotypes, where haplotypes[i] is the i-th distinct haplotype of all var concat
self.hap_var_positions = var_positions
self.ref_at_pos = ref_at_pos # dict of (0-based) pos --> ref base
self.alt_at_pos = None # init: None, later: dict of (0-based) pos --> unique list of alt bases
self.count_of_vars_by_pos = count_of_vars_by_pos
self.haplotype_vcf_index = None # init: None, later: dict of (hap index) --> (0-based) var pos --> phase (0 for ref, 1+ for alt)
self.chrs = chrs # contig names where chrs[i] is the i-th contig name
# sanity check: all variant positions must be present
self.sanity_check()
def __getitem__(self, ith):
"""
Returns the <i>-th haplotype
"""
return self.haplotypes[ith]
def __str__(self):
return """
var positions: {pp}
haplotypes: \n{h}
""".format(pp=",".join(map(str,self.hap_var_positions)),
h="\n".join(self.haplotypes))
def sanity_check(self):
"""
Sanity check the following:
-- variant positions are properly recorded and concordant
-- alt bases are truly alt and unique
-- all haplotypes are the same length
"""
for pos in self.hap_var_positions:
assert pos in self.ref_at_pos
if self.alt_at_pos is not None:
for pos in self.alt_at_pos:
# ref base must not be in alt
assert self.ref_at_pos[pos] not in self.alt_at_pos[pos]
# alt bases must be unique
assert len(self.alt_at_pos[pos]) == len(set(self.alt_at_pos[pos]))
if len(self.haplotypes) >= 1:
n = len(self.haplotypes[0])
assert n == len(self.hap_var_positions)
for hap_str in self.haplotypes[1:]:
assert len(hap_str) == n
def match_or_add_haplotype(self, hap_string):
"""
If <hap_string> is an existing haplotype, return the index.
Otherwise, add to known haplotypes and return the new index.
:return: <index>, "FOUND" or "NEW"
"""
if hap_string in self.haplotypes:
i = self.haplotypes.index(hap_string)
return i, "FOUND"
else:
i = len(self.haplotypes)
self.haplotypes.append(hap_string)
return i, "NEW"
def impute_haplotype(self, hap_string, min_score):
"""
:param hap_string: a hap string with '?'s.
:param min_sim: minimum similarity with existing haplotype to accept assignment
:return: <index> of an existing haplotype, or None if not sufficiently matched
Impute haplotype and only return a match if:
(a) score (similarity) is >= min_score
(b) the matching score for the best one is higher than the second best match
"""
sim_tuple = namedtuple('sim_tuple', 'index score')
sims = [] # list of sim_tuple
hap_str_len = len(hap_string)
for i in range(len(self.haplotypes)):
# Liz note: currently NOT checking whether existing haplotypes have '?'. I'm assuming no '?'.
score = sum((hap_string[k]==self.haplotypes[i][k]) for k in range(hap_str_len))
if score > 0:
sims.append(sim_tuple(index=i, score=score))
if len(sims) == 0:
return None
sims.sort(key=lambda x: x.score, reverse=True)
if sims[0].score >= min_score and (len(sims)==1 or sims[0].score > sims[1].score):
return sims[0].index
else:
return None
def get_haplotype_vcf_assignment(self):
"""
Must be called before self.write_haplotype_to_vcf()
This is preparing for writing out VCF. We need to know, for each variant position,
the ref base (already filled in self.ref_at_pos) and the alt bases (self.alt_at_pos).
For each haplotype in (self.haplotype), we need to know the whether the i-th variant is the
ref (index 0), or some alt base (index 1 and onwards).
Propagates two variables:
self.haplotype_vcf_index: hap index --> pos --> phase index (0 for ref, 1+ for alt)
self.alt_at_pos: dict of <0-based pos> --> alt bases (not is not ref) at this position
"""
self.haplotype_vcf_index = [{} for i in range(len(self.haplotypes))]
self.alt_at_pos = {}
# what happens in the case of partial phasing
# ex: self.haplotypes[0] = "A?G", this means when it comes to the second pos, pos2,
# in the VCF we would want to write out .|. for diploid, . for haploid, etc
# so let's set self.haplotype_vcf_index[0][pos2] = '.' to indicate that
for i,pos in enumerate(self.hap_var_positions):
ref = self.ref_at_pos[pos]
# need to go through the haplotype bases, if ref is already represented, then don't put it in alt
self.alt_at_pos[pos] = []
for hap_i, hap_str in enumerate(self.haplotypes):
base = hap_str[i]
if base=='?': # means this haplotype does not cover this position!
self.haplotype_vcf_index[hap_i][pos] = '.'
elif base==ref: # is the ref base
self.haplotype_vcf_index[hap_i][pos] = 0
else: # is an alt base, see if it's already there
if base in self.alt_at_pos[pos]:
j = self.alt_at_pos[pos].index(base)
self.haplotype_vcf_index[hap_i][pos] = j + 1 # always +1, buz alt starts at 1 (0 is ref)
else:
j = len(self.alt_at_pos[pos])
self.alt_at_pos[pos].append(base)
self.haplotype_vcf_index[hap_i][pos] = j + 1 # always +1, buz alt starts at 1 (0 is ref)
# in the case where partial_ok=False, it's possible some alt are never presented by a haplotype
# we must check that all variants are presented here
for _base in self.count_of_vars_by_pos[pos]:
if (_base not in self.ref_at_pos[pos]) and (_base not in self.alt_at_pos[pos]):
self.alt_at_pos[pos].append(_base)
def write_haplotype_to_humanreadable(self, contig, f_human1, f_human2, seq_hap_info):
"""
The following functions must first be called first:
-- self.get_haplotype_vcf_assignment
f_human1 : human readable tab file handle, one SNP per line
f_human2: human readable tab file handle, one allele per line
"""
if self.haplotype_vcf_index is None or self.alt_at_pos is None:
raise Exception("Must call self.get_haplotype_vcf_assignment() first!")
self.sanity_check()
hap_count = Counter()
for ccs_id, hap_index in seq_hap_info.items():
hap_count[hap_index] += 1
# f_human1.write("haplotype\thapIdx\tcontig\tpos\tvarIdx\tbase\tcount\n")
# f_human2.write("haplotype\thapIdx\tcontig\tcount\n")
for hap_index,hap_str in enumerate(self.haplotypes):
f_human2.write(f'{hap_str}\t{hap_index}\t{contig}\t')
f_human2.write(str(hap_count[hap_index]) + '\n')
for pos_index,pos in enumerate(self.hap_var_positions):
i = self.haplotype_vcf_index[hap_index][pos]
if i == '.': # means this haplotype does not include this position, skip!
continue
assert type(i) is int
f_human1.write(f'{hap_str}\t{hap_index}\t{contig}\t')
f_human1.write(str(pos+1)+'\t')
f_human1.write(str(pos_index+1)+'\t')
if i == 0:
base = self.ref_at_pos[pos]
f_human1.write("REF\t")
else:
base = self.alt_at_pos[pos][i-1]
f_human1.write("ALT" + str(i-1) + '\t')
#if i>0: pdb.set_trace()
f_human1.write(str(self.count_of_vars_by_pos[pos][base]) + '\n')
| 47.518318
| 153
| 0.602696
| 5,048
| 35,021
| 4.016442
| 0.098059
| 0.01455
| 0.016424
| 0.021899
| 0.753835
| 0.732676
| 0.722367
| 0.707275
| 0.697707
| 0.690999
| 0
| 0.009762
| 0.29508
| 35,021
| 736
| 154
| 47.58288
| 0.81152
| 0.357271
| 0
| 0.61165
| 0
| 0
| 0.062742
| 0.008806
| 0
| 0
| 0
| 0
| 0.033981
| 1
| 0.06068
| false
| 0.002427
| 0.021845
| 0.004854
| 0.15534
| 0.007282
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
729240850edac5cd8418657f0002acc9a4f8d5d5
| 11,946
|
py
|
Python
|
fedot/core/operations/evaluation/operation_implementations/models/ts_implementations.py
|
bahia14/Fedot_Times_Series_Forecast
|
995751068733541ba2f546065082709ce0fb63ae
|
[
"BSD-3-Clause"
] | null | null | null |
fedot/core/operations/evaluation/operation_implementations/models/ts_implementations.py
|
bahia14/Fedot_Times_Series_Forecast
|
995751068733541ba2f546065082709ce0fb63ae
|
[
"BSD-3-Clause"
] | null | null | null |
fedot/core/operations/evaluation/operation_implementations/models/ts_implementations.py
|
bahia14/Fedot_Times_Series_Forecast
|
995751068733541ba2f546065082709ce0fb63ae
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import Optional
import numpy as np
from scipy import stats
from statsmodels.tsa.api import STLForecast
from statsmodels.tsa.ar_model import AutoReg
from statsmodels.tsa.arima.model import ARIMA
from fedot.core.log import Log
from fedot.core.operations.evaluation.operation_implementations.data_operations.ts_transformations import _ts_to_table
from fedot.core.operations.evaluation. \
operation_implementations.implementation_interfaces import ModelImplementation
from fedot.core.repository.dataset_types import DataTypesEnum
class ARIMAImplementation(ModelImplementation):
def __init__(self, log: Log = None, **params: Optional[dict]):
super().__init__(log)
self.params = params
self.arima = None
self.lmbda = None
self.scope = None
self.actual_ts_len = None
self.sts = None
# TODO for some configuration of p,d,q got ValueError
def fit(self, input_data):
""" Class fit arima model on data
:param input_data: data with features, target and ids to process
"""
source_ts = np.array(input_data.features)
# Save actual time series length
self.actual_ts_len = len(source_ts)
self.sts = source_ts
# Apply box-cox transformation for positive values
min_value = np.min(source_ts)
if min_value > 0:
pass
else:
# Making a shift to positive values
self.scope = abs(min_value) + 1
source_ts = source_ts + self.scope
transformed_ts, self.lmbda = stats.boxcox(source_ts)
if not self.params:
# Default data
self.params = {'p': 2, 'd': 0, 'q': 2}
p = int(self.params.get('p'))
d = int(self.params.get('d'))
q = int(self.params.get('q'))
params = {'order': (p, d, q)}
self.arima = ARIMA(transformed_ts, **params).fit()
return self.arima
def predict(self, input_data, is_fit_pipeline_stage: bool):
""" Method for time series prediction on forecast length
:param input_data: data with features, target and ids to process
:param is_fit_pipeline_stage: is this fit or predict stage for pipeline
:return output_data: output data with smoothed time series
"""
parameters = input_data.task.task_params
forecast_length = parameters.forecast_length
old_idx = input_data.idx
target = input_data.target
# For training pipeline get fitted data
if is_fit_pipeline_stage:
fitted_values = self.arima.fittedvalues
fitted_values = self._inverse_boxcox(predicted=fitted_values,
lmbda=self.lmbda)
# Undo shift operation
fitted_values = self._inverse_shift(fitted_values)
diff = int(self.actual_ts_len - len(fitted_values))
# If first elements skipped
if diff != 0:
# Fill nans with first values
first_element = fitted_values[0]
first_elements = [first_element] * diff
first_elements.extend(list(fitted_values))
fitted_values = np.array(first_elements)
_, predict = _ts_to_table(idx=old_idx,
time_series=fitted_values,
window_size=forecast_length)
new_idx, target_columns = _ts_to_table(idx=old_idx,
time_series=target,
window_size=forecast_length)
# Update idx and target
input_data.idx = new_idx
input_data.target = target_columns
# For predict stage we can make prediction
else:
start_id = old_idx[-1] - forecast_length + 1
end_id = old_idx[-1]
predicted = self.arima.predict(start=start_id,
end=end_id)
predicted = self._inverse_boxcox(predicted=predicted,
lmbda=self.lmbda)
# Undo shift operation
predict = self._inverse_shift(predicted)
# Convert one-dim array as column
predict = np.array(predict).reshape(1, -1)
new_idx = np.arange(start_id, end_id + 1)
# Update idx
input_data.idx = new_idx
# Update idx and features
output_data = self._convert_to_output(input_data,
predict=predict,
data_type=DataTypesEnum.table)
return output_data
def get_params(self):
return self.params
@staticmethod
def _inverse_boxcox(predicted, lmbda):
""" Method apply inverse Box-Cox transformation """
if lmbda == 0:
return np.exp(predicted)
else:
return np.exp(np.log(lmbda * predicted + 1) / lmbda)
def _inverse_shift(self, values):
""" Method apply inverse shift operation """
if self.scope is None:
pass
else:
values = values - self.scope
return values
class AutoRegImplementation(ModelImplementation):
def __init__(self, log: Log = None, **params: Optional[dict]):
super().__init__(log)
self.params = params
self.actual_ts_len = None
self.autoreg = None
def fit(self, input_data):
""" Class fit arima model on data
:param input_data: data with features, target and ids to process
"""
source_ts = np.array(input_data.features)
self.actual_ts_len = len(source_ts)
if not self.params:
# Default data
self.params = {'lag_1': 12, 'lag_2': 60}
lag_1 = int(self.params.get('lag_1'))
lag_2 = int(self.params.get('lag_2'))
params = {'lags': [lag_1, lag_2]}
self.autoreg = AutoReg(source_ts, **params).fit()
return self.autoreg
def predict(self, input_data, is_fit_pipeline_stage: bool):
""" Method for time series prediction on forecast length
:param input_data: data with features, target and ids to process
:param is_fit_pipeline_stage: is this fit or predict stage for pipeline
:return output_data: output data with smoothed time series
"""
parameters = input_data.task.task_params
forecast_length = parameters.forecast_length
old_idx = input_data.idx
target = input_data.target
if is_fit_pipeline_stage:
fitted = self.autoreg.predict(start=old_idx[0], end=old_idx[-1])
# First n elements in time series are skipped
diff = self.actual_ts_len - len(fitted)
# Fill nans with first values
first_element = fitted[0]
first_elements = [first_element] * diff
first_elements.extend(list(fitted))
fitted = np.array(first_elements)
_, predict = _ts_to_table(idx=old_idx,
time_series=fitted,
window_size=forecast_length)
new_idx, target_columns = _ts_to_table(idx=old_idx,
time_series=target,
window_size=forecast_length)
# Update idx and target
input_data.idx = new_idx
input_data.target = target_columns
# For predict stage we can make prediction
else:
start_id = old_idx[-1] - forecast_length + 1
end_id = old_idx[-1]
predicted = self.autoreg.predict(start=start_id,
end=end_id)
# Convert one-dim array as column
predict = np.array(predicted).reshape(1, -1)
new_idx = np.arange(start_id, end_id + 1)
# Update idx
input_data.idx = new_idx
# Update idx and features
output_data = self._convert_to_output(input_data,
predict=predict,
data_type=DataTypesEnum.table)
return output_data
def get_params(self):
return self.params
class STLForecastARIMAImplementation(ModelImplementation):
def __init__(self, log: Log = None, **params: Optional[dict]):
super().__init__(log)
self.params = params
self.model = None
self.lmbda = None
self.scope = None
self.actual_ts_len = None
self.sts = None
def fit(self, input_data):
""" Class fit STLForecast arima model on data
:param input_data: data with features, target and ids to process
"""
source_ts = np.array(input_data.features)
# Save actual time series length
self.actual_ts_len = len(source_ts)
self.sts = source_ts
if not self.params:
# Default data
self.params = {'p': 2, 'd': 0, 'q': 2, 'period': 365}
p = int(self.params.get('p'))
d = int(self.params.get('d'))
q = int(self.params.get('q'))
period = int(self.params.get('period'))
params = {'period': period, 'model_kwargs': {'order': (p, d, q)}}
self.model = STLForecast(source_ts, ARIMA, **params).fit()
return self.model
def predict(self, input_data, is_fit_pipeline_stage: bool):
""" Method for time series prediction on forecast length
:param input_data: data with features, target and ids to process
:param is_fit_pipeline_stage: is this fit or predict stage for pipeline
:return output_data: output data with smoothed time series
"""
parameters = input_data.task.task_params
forecast_length = parameters.forecast_length
old_idx = input_data.idx
target = input_data.target
# For training pipeline get fitted data
if is_fit_pipeline_stage:
fitted_values = self.model.get_prediction(start=old_idx[0], end=old_idx[-1]).predicted_mean
diff = int(self.actual_ts_len) - len(fitted_values)
# If first elements skipped
if diff != 0:
# Fill nans with first values
first_element = fitted_values[0]
first_elements = [first_element] * diff
first_elements.extend(list(fitted_values))
fitted_values = np.array(first_elements)
_, predict = _ts_to_table(idx=old_idx,
time_series=fitted_values,
window_size=forecast_length)
new_idx, target_columns = _ts_to_table(idx=old_idx,
time_series=target,
window_size=forecast_length)
# Update idx and target
input_data.idx = new_idx
input_data.target = target_columns
# For predict stage we can make prediction
else:
start_id = old_idx[-1] - forecast_length + 1
end_id = old_idx[-1]
predicted = self.model.get_prediction(start=start_id, end=end_id).predicted_mean
# Convert one-dim array as column
predict = np.array(predicted).reshape(1, -1)
new_idx = np.arange(start_id, end_id + 1)
# Update idx
input_data.idx = new_idx
# Update idx and features
output_data = self._convert_to_output(input_data,
predict=predict,
data_type=DataTypesEnum.table)
return output_data
def get_params(self):
return self.params
| 36.2
| 118
| 0.575674
| 1,395
| 11,946
| 4.700358
| 0.118996
| 0.049413
| 0.016471
| 0.020589
| 0.77871
| 0.757359
| 0.737075
| 0.702913
| 0.685222
| 0.679122
| 0
| 0.006669
| 0.347313
| 11,946
| 329
| 119
| 36.31003
| 0.834295
| 0.172192
| 0
| 0.676768
| 0
| 0
| 0.007869
| 0
| 0
| 0
| 0
| 0.00304
| 0
| 1
| 0.070707
| false
| 0.010101
| 0.050505
| 0.015152
| 0.19697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
72e834e553061b8aebab53754e351698630f6498
| 609
|
py
|
Python
|
scripts/tests/setup.py
|
NDevTK/cel
|
e97226416b6e12245564bfc1c3631d610d62f052
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/tests/setup.py
|
NDevTK/cel
|
e97226416b6e12245564bfc1c3631d610d62f052
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/tests/setup.py
|
NDevTK/cel
|
e97226416b6e12245564bfc1c3631d610d62f052
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''
Installs all dependencies required to run test.py.
'''
import subprocess
# TODO: Add Windows support
subprocess.check_call(['apt-get', 'install', 'python-pip'])
subprocess.check_call(['pip', 'install', 'absl-py'])
subprocess.check_call(['pip', 'install', 'google-api-python-client'])
subprocess.check_call(['pip', 'install', 'grpc-google-iam-admin-v1'])
subprocess.check_call(['pip', 'install', 'grpc-google-iam-v1'])
| 33.833333
| 72
| 0.727422
| 89
| 609
| 4.921348
| 0.640449
| 0.171233
| 0.216895
| 0.200913
| 0.324201
| 0.191781
| 0.191781
| 0.191781
| 0
| 0
| 0
| 0.01105
| 0.108374
| 609
| 17
| 73
| 35.823529
| 0.79558
| 0.415435
| 0
| 0
| 0
| 0
| 0.398256
| 0.139535
| 0
| 0
| 0
| 0.058824
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
f402cceedb38ed018e632aec19ad1a850b040623
| 1,124
|
py
|
Python
|
tests/test_events.py
|
jvanbrug/cs143sim
|
4e510a7669a534b55b606cbe175142104ae4a92c
|
[
"MIT"
] | null | null | null |
tests/test_events.py
|
jvanbrug/cs143sim
|
4e510a7669a534b55b606cbe175142104ae4a92c
|
[
"MIT"
] | null | null | null |
tests/test_events.py
|
jvanbrug/cs143sim
|
4e510a7669a534b55b606cbe175142104ae4a92c
|
[
"MIT"
] | null | null | null |
from simpy.core import Environment
from cs143sim.events import FlowStart
from cs143sim.events import LinkAvailable
from cs143sim.events import PacketReceipt
from cs143sim.events import RoutingTableOutdated
from test_actors import basic_flow
from test_actors import basic_link
from test_actors import basic_packet
from test_actors import basic_router
def basic_environment():
return Environment()
def basic_flow_start():
FlowStart(env=basic_environment(), delay=1.0, flow=basic_flow())
def basic_link_available():
LinkAvailable(env=basic_environment(), delay=1.0, link=basic_link())
def basic_packet_receipt():
PacketReceipt(env=basic_environment(), delay=1.0, receiver=basic_router(),
packet=basic_packet())
def basic_update_routing_table():
RoutingTableOutdated(env=basic_environment(), delay=1.0,
router=basic_router())
def test_flow_start():
basic_flow_start()
def test_link_available():
basic_link_available()
def test_packet_receipt():
basic_packet_receipt()
def test_update_routing_table():
basic_update_routing_table()
| 22.938776
| 78
| 0.764235
| 144
| 1,124
| 5.652778
| 0.215278
| 0.04914
| 0.088452
| 0.117936
| 0.250614
| 0.127764
| 0
| 0
| 0
| 0
| 0
| 0.020986
| 0.152135
| 1,124
| 48
| 79
| 23.416667
| 0.833158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.310345
| true
| 0
| 0.310345
| 0.034483
| 0.655172
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
f41e1355b328be6cadaf3784932fcdefc9064e64
| 303
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_MISC/misc-examples/03_csBinarySearchTreeInOrderSuccessor.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/misc-examples/03_csBinarySearchTreeInOrderSuccessor.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/misc-examples/03_csBinarySearchTreeInOrderSuccessor.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
"""
What does the phrase "in-order successor" mean when we are talking about a node in a binary search tree?
A - the node that has the next lowest value
B - the node that has the maximum value
C - the node that has the minimuin value
D - the node that has the next highest value
answer is :
"""
| 18.9375
| 104
| 0.719472
| 56
| 303
| 3.892857
| 0.553571
| 0.12844
| 0.201835
| 0.256881
| 0.348624
| 0.192661
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231023
| 303
| 15
| 105
| 20.2
| 0.935622
| 0.957096
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
f424c41dabf2730fc770b299307eb3c76316defe
| 135
|
py
|
Python
|
send_email/apps.py
|
AthmanZiri/django-site
|
04c6e0967b628b8ebef1ca1caae8cee83c1a2f07
|
[
"MIT"
] | null | null | null |
send_email/apps.py
|
AthmanZiri/django-site
|
04c6e0967b628b8ebef1ca1caae8cee83c1a2f07
|
[
"MIT"
] | null | null | null |
send_email/apps.py
|
AthmanZiri/django-site
|
04c6e0967b628b8ebef1ca1caae8cee83c1a2f07
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.apps import AppConfig
class SendEmailConfig(AppConfig):
name = 'send_email'
| 16.875
| 39
| 0.8
| 16
| 135
| 6.375
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 135
| 7
| 40
| 19.285714
| 0.886957
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
f47f90ec352390a446cb134ed76b2f6da04ebc27
| 209
|
py
|
Python
|
nngen/onnx/shape.py
|
m1kit/nngen
|
c96457aa0f1681a8ba7c12881ea3d455eccffde0
|
[
"Apache-2.0"
] | 7
|
2020-06-08T13:36:13.000Z
|
2021-12-24T06:55:30.000Z
|
nngen/onnx/shape.py
|
m1kit/nngen
|
c96457aa0f1681a8ba7c12881ea3d455eccffde0
|
[
"Apache-2.0"
] | null | null | null |
nngen/onnx/shape.py
|
m1kit/nngen
|
c96457aa0f1681a8ba7c12881ea3d455eccffde0
|
[
"Apache-2.0"
] | 1
|
2021-03-12T03:51:56.000Z
|
2021-03-12T03:51:56.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def Shape(visitor, node):
input = visitor.visit(node.input[0])
return tuple(input.shape)
| 20.9
| 40
| 0.784689
| 28
| 209
| 5.357143
| 0.571429
| 0.2
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005618
| 0.148325
| 209
| 9
| 41
| 23.222222
| 0.837079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.833333
| 0.166667
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
be6ed34db6e4f27b954b3ede51144930fffd1e0e
| 11,518
|
py
|
Python
|
echopype/tests/visualize/test_plot.py
|
mbdunn/echopype
|
a53290801d1ca062d45c00ca2c541d54682dd40a
|
[
"Apache-2.0"
] | null | null | null |
echopype/tests/visualize/test_plot.py
|
mbdunn/echopype
|
a53290801d1ca062d45c00ca2c541d54682dd40a
|
[
"Apache-2.0"
] | null | null | null |
echopype/tests/visualize/test_plot.py
|
mbdunn/echopype
|
a53290801d1ca062d45c00ca2c541d54682dd40a
|
[
"Apache-2.0"
] | null | null | null |
import echopype
import echopype.visualize
from echopype.testing import TEST_DATA_FOLDER
import pytest
from xarray.plot.facetgrid import FacetGrid
from matplotlib.collections import QuadMesh
import xarray as xr
import numpy as np
ek60_path = TEST_DATA_FOLDER / "ek60"
ek80_path = TEST_DATA_FOLDER / "ek80_new"
azfp_path = TEST_DATA_FOLDER / "azfp"
ad2cp_path = TEST_DATA_FOLDER / "ad2cp"
param_args = ("filepath", "sonar_model", "azfp_xml_path", "range_kwargs")
param_testdata = [
(
ek60_path / "ncei-wcsd" / "Summer2017-D20170719-T211347.raw",
"EK60",
None,
{},
),
(
ek60_path / "DY1002_EK60-D20100318-T023008_rep_freq.raw",
"EK60",
None,
{},
),
(
ek80_path / "echopype-test-D20211004-T235930.raw",
"EK80",
None,
{'waveform_mode': 'BB', 'encode_mode': 'complex'},
),
(
ek80_path / "D20211004-T233354.raw",
"EK80",
None,
{'waveform_mode': 'CW', 'encode_mode': 'power'},
),
(
ek80_path / "D20211004-T233115.raw",
"EK80",
None,
{'waveform_mode': 'CW', 'encode_mode': 'complex'},
),
(
azfp_path / "17082117.01A",
"AZFP",
azfp_path / "17041823.XML",
{},
), # Will always need env variables
pytest.param(
ad2cp_path / "raw" / "090" / "rawtest.090.00001.ad2cp",
"AD2CP",
None,
{},
marks=pytest.mark.xfail(
run=False,
reason="Not supported at the moment",
),
),
]
@pytest.mark.parametrize(param_args, param_testdata)
def test_plot_multi(
filepath,
sonar_model,
azfp_xml_path,
range_kwargs,
):
# TODO: Need to figure out how to compare the actual rendered plots
ed = echopype.open_raw(filepath, sonar_model, azfp_xml_path)
plots = echopype.visualize.create_echogram(ed)
assert isinstance(plots, list) is True
assert all(isinstance(plot, FacetGrid) for plot in plots) is True
@pytest.mark.parametrize(param_args, param_testdata)
def test_plot_single(
filepath,
sonar_model,
azfp_xml_path,
range_kwargs,
):
# TODO: Need to figure out how to compare the actual rendered plots
ed = echopype.open_raw(filepath, sonar_model, azfp_xml_path)
plots = echopype.visualize.create_echogram(
ed, channel=ed.beam.channel[0].values
)
assert isinstance(plots, list) is True
if (
sonar_model.lower() == 'ek80'
and range_kwargs['encode_mode'] == 'complex'
):
assert all(isinstance(plot, FacetGrid) for plot in plots) is True
else:
assert all(isinstance(plot, QuadMesh) for plot in plots) is True
@pytest.mark.parametrize(param_args, param_testdata)
def test_plot_multi_get_range(
filepath,
sonar_model,
azfp_xml_path,
range_kwargs,
):
# TODO: Need to figure out how to compare the actual rendered plots
ed = echopype.open_raw(filepath, sonar_model, azfp_xml_path)
if ed.sonar_model.lower() == 'azfp':
avg_temperature = (
ed.environment['temperature'].mean('time1').values
)
env_params = {
'temperature': avg_temperature,
'salinity': 27.9,
'pressure': 59,
}
range_kwargs['env_params'] = env_params
plots = echopype.visualize.create_echogram(
ed, get_range=True, range_kwargs=range_kwargs
)
assert isinstance(plots, list) is True
assert all(isinstance(plot, FacetGrid) for plot in plots) is True
# Beam shape check
if (
sonar_model.lower() == 'ek80'
and range_kwargs['encode_mode'] == 'complex'
):
assert plots[0].axes.shape[-1] > 1
else:
assert plots[0].axes.shape[-1] == 1
# Channel shape check
assert ed.beam.channel.shape[0] == len(plots)
@pytest.mark.parametrize(param_args, param_testdata)
def test_plot_Sv(
filepath,
sonar_model,
azfp_xml_path,
range_kwargs,
):
# TODO: Need to figure out how to compare the actual rendered plots
ed = echopype.open_raw(filepath, sonar_model, azfp_xml_path)
if ed.sonar_model.lower() == 'azfp':
avg_temperature = (
ed.environment['temperature'].mean('time1').values
)
env_params = {
'temperature': avg_temperature,
'salinity': 27.9,
'pressure': 59,
}
range_kwargs['env_params'] = env_params
if 'azfp_cal_type' in range_kwargs:
range_kwargs.pop('azfp_cal_type')
Sv = echopype.calibrate.compute_Sv(ed, **range_kwargs)
plots = echopype.visualize.create_echogram(Sv)
assert isinstance(plots, list) is True
assert all(isinstance(plot, FacetGrid) for plot in plots) is True
@pytest.mark.parametrize(param_args, param_testdata)
def test_plot_mvbs(
filepath,
sonar_model,
azfp_xml_path,
range_kwargs,
):
# TODO: Need to figure out how to compare the actual rendered plots
ed = echopype.open_raw(filepath, sonar_model, azfp_xml_path)
if ed.sonar_model.lower() == 'azfp':
avg_temperature = (
ed.environment['temperature'].mean('time1').values
)
env_params = {
'temperature': avg_temperature,
'salinity': 27.9,
'pressure': 59,
}
range_kwargs['env_params'] = env_params
if 'azfp_cal_type' in range_kwargs:
range_kwargs.pop('azfp_cal_type')
Sv = echopype.calibrate.compute_Sv(ed, **range_kwargs)
mvbs = echopype.preprocess.compute_MVBS(Sv, ping_time_bin='10S')
plots = []
try:
plots = echopype.visualize.create_echogram(mvbs)
except Exception as e:
assert isinstance(e, ValueError)
assert str(e) == "Ping time must have a length that is greater or equal to 2" # noqa
if len(plots) > 0:
assert all(isinstance(plot, FacetGrid) for plot in plots) is True
@pytest.mark.parametrize(
("water_level", "expect_warning"),
[
(True, False),
([True], True),
(False, True),
(xr.DataArray(np.array(50.0)).expand_dims({'channel': 3}), False),
(xr.DataArray(np.array(50.0)), False),
(10, False),
(30.5, False),
],
)
def test_water_level_echodata(water_level, expect_warning):
from echopype.echodata import EchoData
from echopype.visualize.api import _add_water_level
filepath = ek60_path / "ncei-wcsd" / "Summer2017-D20170719-T211347.raw"
sonar_model = "EK60"
range_kwargs = {}
echodata = echopype.open_raw(
sonar_model=sonar_model, raw_file=filepath, xml_path=None
)
range_in_meter = echodata.compute_range(
env_params=range_kwargs.get('env_params', {}),
azfp_cal_type=range_kwargs.get('azfp_cal_type', None),
ek_waveform_mode=range_kwargs.get('waveform_mode', 'CW'),
ek_encode_mode=range_kwargs.get('encode_mode', 'power'),
)
single_array = range_in_meter.sel(channel='GPT 18 kHz 009072058c8d 1-1 ES18-11',
ping_time='2017-07-19T21:13:47.984999936').values
no_input_water_level = False
if isinstance(water_level, list):
water_level = water_level[0]
echodata.platform = echodata.platform.drop_vars('water_level')
no_input_water_level = True
if isinstance(water_level, xr.DataArray):
if 'channel' in water_level.dims:
original_array = single_array + water_level.isel(channel=0).values
elif isinstance(water_level, bool) and water_level is True:
if no_input_water_level is False:
original_array = (
single_array
+ echodata.platform.water_level.sel(channel='GPT 18 kHz 009072058c8d 1-1 ES18-11',
time3='2017-07-19T21:13:47.984999936').values
)
else:
original_array = single_array
elif water_level is not False and isinstance(water_level, (int, float)):
original_array = single_array + water_level
else:
original_array = single_array
results = None
try:
if expect_warning:
with pytest.warns(UserWarning):
results = _add_water_level(
range_in_meter=range_in_meter,
water_level=water_level,
data_type=EchoData,
platform_data=echodata.platform,
)
else:
results = _add_water_level(
range_in_meter=range_in_meter,
water_level=water_level,
data_type=EchoData,
platform_data=echodata.platform,
)
except Exception as e:
assert isinstance(e, ValueError)
assert str(e) == 'Water level must have any of these dimensions: channel, ping_time, range_sample' # noqa
if isinstance(results, xr.DataArray):
final_array = results.sel(channel='GPT 18 kHz 009072058c8d 1-1 ES18-11',
ping_time='2017-07-19T21:13:47.984999936').values
print(f"original_array = {original_array}")
print(f"results = {results}")
assert np.array_equal(original_array, final_array)
@pytest.mark.parametrize(
("water_level", "expect_warning"),
[
(True, True),
(False, True),
(xr.DataArray(np.array(50.0)).expand_dims({'channel': 3}), False),
(xr.DataArray(np.array(50.0)), False),
(10, False),
(30.5, False),
],
)
def test_water_level_Sv_dataset(water_level, expect_warning):
from echopype.visualize.api import _add_water_level
filepath = ek60_path / "ncei-wcsd" / "Summer2017-D20170719-T211347.raw"
sonar_model = "EK60"
range_kwargs = {}
echodata = echopype.open_raw(
sonar_model=sonar_model, raw_file=filepath, xml_path=None
)
Sv = echopype.calibrate.compute_Sv(echodata, **range_kwargs)
ds = Sv.set_coords('echo_range')
range_in_meter = ds.echo_range
single_array = range_in_meter.sel(channel='GPT 18 kHz 009072058c8d 1-1 ES18-11',
ping_time='2017-07-19T21:13:47.984999936').values
if isinstance(water_level, xr.DataArray):
if 'channel' in water_level.dims:
original_array = single_array + water_level.isel(channel=0).values
elif not isinstance(water_level, bool) and isinstance(water_level, (int, float)):
original_array = single_array + water_level
else:
original_array = single_array
results = None
try:
if expect_warning:
with pytest.warns(UserWarning):
results = _add_water_level(
range_in_meter=range_in_meter,
water_level=water_level,
data_type=xr.Dataset,
)
else:
results = _add_water_level(
range_in_meter=range_in_meter,
water_level=water_level,
data_type=xr.Dataset,
)
except Exception as e:
assert isinstance(e, ValueError)
assert str(e) == 'Water level must have any of these dimensions: channel, ping_time, range_sample' # noqa
if isinstance(results, xr.DataArray):
final_array = results.sel(channel='GPT 18 kHz 009072058c8d 1-1 ES18-11',
ping_time='2017-07-19T21:13:47.984999936').values
assert np.array_equal(original_array, final_array)
| 33.002865
| 114
| 0.622504
| 1,406
| 11,518
| 4.863442
| 0.15505
| 0.064346
| 0.021059
| 0.03539
| 0.765867
| 0.736326
| 0.717754
| 0.707517
| 0.662474
| 0.662474
| 0
| 0.054044
| 0.272269
| 11,518
| 348
| 115
| 33.097701
| 0.761751
| 0.03577
| 0
| 0.655738
| 0
| 0
| 0.138543
| 0.034523
| 0
| 0
| 0
| 0.002874
| 0.068852
| 1
| 0.022951
| false
| 0
| 0.036066
| 0
| 0.059016
| 0.006557
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
be909d3398cbd0bc40faa3b82a76c4c1dbe5bc23
| 132
|
py
|
Python
|
bracketology/__init__.py
|
stahl085/bracketology
|
3311241fa82eb41a5529ac5e126171850f715032
|
[
"MIT"
] | 2
|
2020-02-26T07:02:04.000Z
|
2020-03-09T19:21:37.000Z
|
Bracketology/bracketology/__init__.py
|
andrewargeros/minnemudac-2021
|
2e96b598d3b231937f1fda871fecaeb44d236a7e
|
[
"MIT"
] | 1
|
2020-03-09T02:39:25.000Z
|
2021-03-16T02:56:58.000Z
|
Bracketology/bracketology/__init__.py
|
andrewargeros/minnemudac-2021
|
2e96b598d3b231937f1fda871fecaeb44d236a7e
|
[
"MIT"
] | 4
|
2020-02-26T03:35:39.000Z
|
2021-04-09T00:46:33.000Z
|
__version__ = '0.0.8'
from bracketology.brackets import Team, Game, SubBracket16, FinalFour, Bracket
import bracketology.simulators
| 33
| 78
| 0.818182
| 16
| 132
| 6.5
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042017
| 0.098485
| 132
| 3
| 79
| 44
| 0.831933
| 0
| 0
| 0
| 0
| 0
| 0.037879
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
fe3c1f9a570d1238ee435cb1cfdbb65470ae8432
| 565
|
py
|
Python
|
facebook_lite_app/views.py
|
B339r1p/fb-lite
|
2930ef3ef487250e74664df6642b2c398f256de9
|
[
"MIT"
] | null | null | null |
facebook_lite_app/views.py
|
B339r1p/fb-lite
|
2930ef3ef487250e74664df6642b2c398f256de9
|
[
"MIT"
] | null | null | null |
facebook_lite_app/views.py
|
B339r1p/fb-lite
|
2930ef3ef487250e74664df6642b2c398f256de9
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from rest_framework import viewsets
from .serializers import PostSerializer,CommentSerializer,LikeSerializer
from .models import Post,Comment,Like
# Create your views here.
class PostViewSet(viewsets.ModelViewSet):
queryset = Post.objects.all()
serializer_class = PostSerializer
class CommentViewSet(viewsets.ModelViewSet):
queryset = Comment.objects.all()
serializer_class = CommentSerializer
class LikeViewSet(viewsets.ModelViewSet):
queryset = Like.objects.all()
serializer_class = LikeSerializer
| 28.25
| 72
| 0.8
| 59
| 565
| 7.59322
| 0.491525
| 0.133929
| 0.1875
| 0.167411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130973
| 565
| 20
| 73
| 28.25
| 0.912424
| 0.040708
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.307692
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
fe41b910ef8a962de3f0c491a4edbf423b0dc5cd
| 172
|
py
|
Python
|
Python/242valid_anagram.py
|
Apocrypse/LeetCode
|
3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39
|
[
"MIT"
] | 4
|
2020-03-17T03:08:51.000Z
|
2022-03-14T17:33:28.000Z
|
Python/242valid_anagram.py
|
Apocrypse/LeetCode
|
3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39
|
[
"MIT"
] | null | null | null |
Python/242valid_anagram.py
|
Apocrypse/LeetCode
|
3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39
|
[
"MIT"
] | 3
|
2021-04-29T16:51:02.000Z
|
2022-03-19T17:37:56.000Z
|
class Solution:
def isAnagram(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
return sorted(s) == sorted(t)
| 19.111111
| 37
| 0.44186
| 20
| 172
| 3.8
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.418605
| 172
| 8
| 38
| 21.5
| 0.76
| 0.22093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
fe4b9404ba9f3a23c8d01ee95ddce3f6a7b0ffc9
| 105
|
py
|
Python
|
IotApp/test.py
|
greenSyntax/AWS-RaspberryPi
|
ac09fa90eb61c525be94e44f0580a669782c221e
|
[
"MIT"
] | null | null | null |
IotApp/test.py
|
greenSyntax/AWS-RaspberryPi
|
ac09fa90eb61c525be94e44f0580a669782c221e
|
[
"MIT"
] | null | null | null |
IotApp/test.py
|
greenSyntax/AWS-RaspberryPi
|
ac09fa90eb61c525be94e44f0580a669782c221e
|
[
"MIT"
] | null | null | null |
import threading
def printit():
threading.Timer(2.0, printit).start()
print "Hello World"
printit()
| 11.666667
| 38
| 0.714286
| 14
| 105
| 5.357143
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.142857
| 105
| 8
| 39
| 13.125
| 0.811111
| 0
| 0
| 0
| 0
| 0
| 0.105769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.8
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
fe6bfa3bd83df9e7ddc136154e1615f0e3edc7dd
| 94
|
py
|
Python
|
tests/test_ansible_task_worker.py
|
benthomasson/ansible-task-worker
|
33189b503e010df93adf486fde8c0eec9c436e18
|
[
"Apache-2.0"
] | null | null | null |
tests/test_ansible_task_worker.py
|
benthomasson/ansible-task-worker
|
33189b503e010df93adf486fde8c0eec9c436e18
|
[
"Apache-2.0"
] | 10
|
2020-01-05T19:08:49.000Z
|
2021-11-15T17:47:59.000Z
|
tests/test_ansible_task_worker.py
|
benthomasson/ansible-task-worker
|
33189b503e010df93adf486fde8c0eec9c436e18
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `ansible_task_worker` package."""
| 18.8
| 46
| 0.62766
| 13
| 94
| 4.384615
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.12766
| 94
| 4
| 47
| 23.5
| 0.682927
| 0.882979
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fe6dff8becd7dd38f4f1f68d27c8e924eb891859
| 9,016
|
py
|
Python
|
tests/test_run_mypy.py
|
bsamseth/jsonschema-typed
|
54d2f8c250fdba14422e44545c7cd1254218045e
|
[
"MIT"
] | 14
|
2020-02-26T17:48:33.000Z
|
2022-02-04T13:25:24.000Z
|
tests/test_run_mypy.py
|
bsamseth/jsonschema-typed
|
54d2f8c250fdba14422e44545c7cd1254218045e
|
[
"MIT"
] | 5
|
2020-08-10T20:37:59.000Z
|
2021-07-08T08:55:15.000Z
|
tests/test_run_mypy.py
|
bsamseth/jsonschema-typed
|
54d2f8c250fdba14422e44545c7cd1254218045e
|
[
"MIT"
] | 3
|
2020-07-26T18:57:58.000Z
|
2021-09-30T18:41:04.000Z
|
"""
This type of test attempts to run mypy on selected sources and asserts that
the output is consistent. This does not (yet) test sufficient edge cases,
and more testing should be done.
# TODO: Add more test cases.
"""
import os
import pytest
from mypy import api
from typing import List, Tuple, TypedDict
class Expect(TypedDict):
normal: str
error: str
exit_status: int
case_directory = os.path.join(os.path.dirname(__file__), "cases")
cases: List[Tuple[str, Expect]] = [
(
"from_readme.py",
Expect(
normal="""
note: Revealed type is 'TypedDict('FooSchema', {'title'?: builtins.str, 'awesome'?: builtins.int})'
error: TypedDict "FooSchema" has no key 'description'
error: Argument 2 has incompatible type "None"; expected "int"
""",
error="",
exit_status=1,
),
),
(
"check_required.py",
Expect(
normal="""
error: Key 'awesome' missing for TypedDict "FooSchema"
note: Revealed type is 'TypedDict('FooSchema', {'title'?: builtins.str, 'awesome': builtins.int})'
error: TypedDict "FooSchema" has no key 'description'
error: Argument 2 has incompatible type "None"; expected "int"
""",
error="",
exit_status=1,
),
),
(
"alias.py",
Expect(
normal="""
note: Revealed type is 'TypedDict('FooSchema', {'title'?: builtins.str, 'awesome'?: builtins.int})'
error: TypedDict "FooSchema" has no key 'description'
error: Argument 2 has incompatible type "None"; expected "int"
""",
error="",
exit_status=1,
),
),
(
"nonetype.py",
Expect(
normal="""
note: Revealed type is 'TypedDict('NoneSchema', {'title'?: builtins.str, 'awesome'?: Union[builtins.list[Any], None]})'
error: Argument 2 has incompatible type "int"; expected "Optional[List[Any]]"
""",
error="",
exit_status=1,
),
),
(
"nested.py",
Expect(
normal="""
note: Revealed type is 'TypedDict('NestedFooSchema', {'title': builtins.str, 'awesome'?: TypedDict({'nested'?: TypedDict({'thing': builtins.str}), 'thing': builtins.int})})'
note: Revealed type is 'TypedDict('NestedFooSchemaAwesome', {'nested'?: TypedDict({'thing': builtins.str}), 'thing': builtins.int})'
note: Revealed type is 'TypedDict('NestedFooSchemaAwesomeNested', {'thing': builtins.str})'
""",
error="",
exit_status=1,
),
),
(
"hard.py",
Expect(
normal="""
note: Revealed type is 'TypedDict('ComplicatedJson', {'num': builtins.int, 'status': builtins.list[TypedDict({'code'?: Union[Literal['success'], Literal['failure']], 'diagnostics'?: builtins.list[TypedDict({'field'?: builtins.str, 'illegal_value'?: builtins.str, 'level': Union[Literal['info'], Literal['warn'], Literal['error']], 'mismatch_fields'?: builtins.list[builtins.str], 'ids'?: builtins.list[TypedDict({'id': builtins.int, 'thing_type'?: Union[Literal['A'], Literal['B']]})]})], 'message'?: builtins.str, 'module': Union[Literal['m1'], Literal['m2']]})]})'
note: Revealed type is 'builtins.int'
note: Revealed type is 'builtins.list[TypedDict({'code'?: Union[Literal['success'], Literal['failure']], 'diagnostics'?: builtins.list[TypedDict({'field'?: builtins.str, 'illegal_value'?: builtins.str, 'level': Union[Literal['info'], Literal['warn'], Literal['error']], 'mismatch_fields'?: builtins.list[builtins.str], 'ids'?: builtins.list[TypedDict({'id': builtins.int, 'thing_type'?: Union[Literal['A'], Literal['B']]})]})], 'message'?: builtins.str, 'module': Union[Literal['m1'], Literal['m2']]})]'
note: Revealed type is 'TypedDict({'code'?: Union[Literal['success'], Literal['failure']], 'diagnostics'?: builtins.list[TypedDict({'field'?: builtins.str, 'illegal_value'?: builtins.str, 'level': Union[Literal['info'], Literal['warn'], Literal['error']], 'mismatch_fields'?: builtins.list[builtins.str], 'ids'?: builtins.list[TypedDict({'id': builtins.int, 'thing_type'?: Union[Literal['A'], Literal['B']]})]})], 'message'?: builtins.str, 'module': Union[Literal['m1'], Literal['m2']]})'
note: Revealed type is 'builtins.list[TypedDict({'field'?: builtins.str, 'illegal_value'?: builtins.str, 'level': Union[Literal['info'], Literal['warn'], Literal['error']], 'mismatch_fields'?: builtins.list[builtins.str], 'ids'?: builtins.list[TypedDict({'id': builtins.int, 'thing_type'?: Union[Literal['A'], Literal['B']]})]})]'
note: Revealed type is 'TypedDict({'field'?: builtins.str, 'illegal_value'?: builtins.str, 'level': Union[Literal['info'], Literal['warn'], Literal['error']], 'mismatch_fields'?: builtins.list[builtins.str], 'ids'?: builtins.list[TypedDict({'id': builtins.int, 'thing_type'?: Union[Literal['A'], Literal['B']]})]})'
note: Revealed type is 'TypedDict('ComplicatedJsonStatusDiagnostics', {'field'?: builtins.str, 'illegal_value'?: builtins.str, 'level': Union[Literal['info'], Literal['warn'], Literal['error']], 'mismatch_fields'?: builtins.list[builtins.str], 'ids'?: builtins.list[TypedDict({'id': builtins.int, 'thing_type'?: Union[Literal['A'], Literal['B']]})]})'
""",
error="",
exit_status=1,
),
),
(
"optional_typed_dict.py",
Expect(
normal="""
note: Revealed type is 'TypedDict('FooSchema', {'title'?: builtins.str, 'awesome'?: builtins.int})'
error: TypedDict "FooSchema" has no key 'description'
error: Argument 2 has incompatible type "None"; expected "int"
""",
error="",
exit_status=1,
),
),
(
"optional_typed_dict_hard_mode.py",
Expect(
normal="""
note: Revealed type is 'TypedDict('ComplicatedJson', {'num'?: builtins.int, 'status'?: builtins.list[TypedDict({'code'?: Union[Literal['success'], Literal['failure']], 'diagnostics'?: builtins.list[TypedDict({'field'?: builtins.str, 'illegal_value'?: builtins.str, 'level': Union[Literal['info'], Literal['warn'], Literal['error']], 'mismatch_fields'?: builtins.list[builtins.str], 'ids'?: builtins.list[TypedDict({'id': builtins.int, 'thing_type'?: Union[Literal['A'], Literal['B']]})]})], 'message'?: builtins.str, 'module': Union[Literal['m1'], Literal['m2']]})]})'
error: TypedDict "ComplicatedJson" has no key 'description'
error: Argument 2 has incompatible type "None"; expected "int"
""",
error="",
exit_status=1,
),
),
(
"outer_array.py",
Expect(
normal="""
note: Revealed type is 'builtins.list[TypedDict({'a_number'?: builtins.int, 'a_string': builtins.str, 'nested_array_of_numbers'?: builtins.list[builtins.list[builtins.float]]})]'
note: Revealed type is 'TypedDict('ArrayOfObjects', {'a_number'?: builtins.int, 'a_string': builtins.str, 'nested_array_of_numbers'?: builtins.list[builtins.list[Union[builtins.int, builtins.float]]]})'
""",
error="",
exit_status=1,
),
),
(
"tuple.py",
Expect(
normal="""
tests/cases/tuple.py:9: error: List item 0 has incompatible type "List[str]"; expected "Optional[Tuple[Optional[str], Optional[str]]]"
tests/cases/tuple.py:10: error: List item 0 has incompatible type "Tuple[str, str, str]"; expected "Optional[Tuple[Optional[str], Optional[str]]]"
tests/cases/tuple.py:11: error: List item 0 has incompatible type "int"; expected "Optional[Tuple[Optional[str], Optional[str]]]"
tests/cases/tuple.py:12: error: List item 0 has incompatible type "Tuple[int, int]"; expected "Optional[Tuple[Optional[str], Optional[str]]]"
tests/cases/tuple.py:15: note: Revealed type is 'builtins.list[Union[Tuple[Union[builtins.str, None], Union[builtins.str, None]], None]]'
Found 4 errors in 1 file (checked 1 source file)
""",
error="",
exit_status=1,
),
),
]
@pytest.mark.parametrize("case_file, expected", cases)
def test_cases(case_file: str, expected: Expect):
normal_report, error_report, exit_status = api.run(
["--show-traceback", os.path.join(case_directory, case_file)]
)
for line in expected["normal"].strip().splitlines():
assert line.strip() in normal_report
for line in expected["error"].strip().splitlines():
assert line.strip() in error_report
assert exit_status == expected["exit_status"]
| 54.313253
| 584
| 0.594388
| 985
| 9,016
| 5.371574
| 0.148223
| 0.079002
| 0.057456
| 0.064638
| 0.764317
| 0.759214
| 0.731053
| 0.69571
| 0.665848
| 0.665848
| 0
| 0.005755
| 0.229148
| 9,016
| 165
| 585
| 54.642424
| 0.75554
| 0.023514
| 0
| 0.557823
| 0
| 0.142857
| 0.774102
| 0.216007
| 0
| 0
| 0
| 0.006061
| 0.020408
| 1
| 0.006803
| false
| 0
| 0.027211
| 0
| 0.061224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fe6f7534b411ea5987584e0cec59e6f82308cf33
| 1,898
|
py
|
Python
|
test/pytest/test_bech32.py
|
kcorlidy/importaddress
|
e63aeb2e288df29435d9321a0a90e4c905b851fa
|
[
"Apache-2.0"
] | null | null | null |
test/pytest/test_bech32.py
|
kcorlidy/importaddress
|
e63aeb2e288df29435d9321a0a90e4c905b851fa
|
[
"Apache-2.0"
] | null | null | null |
test/pytest/test_bech32.py
|
kcorlidy/importaddress
|
e63aeb2e288df29435d9321a0a90e4c905b851fa
|
[
"Apache-2.0"
] | 1
|
2021-05-03T23:42:18.000Z
|
2021-05-03T23:42:18.000Z
|
from importaddress.segwit_addr import encode, decode
import pytest
valid = [
["BC1QW508D6QEJXTDG4Y5R3ZARVARY0C5XW7KV8F3T4", "0014751e76e8199196d454941c45d1b3a323f1433bd6"],
["tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7", "00201863143c14c5166804bd19203356da136c985678cd4d27a1b8c6329604903262"],
["bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx", "5128751e76e8199196d454941c45d1b3a323f1433bd6751e76e8199196d454941c45d1b3a323f1433bd6"],
["BC1SW50QA3JX3S", "6002751e"],
["bc1zw508d6qejxtdg4y5r3zarvaryvg6kdaj", "5210751e76e8199196d454941c45d1b3a323"],
["tb1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesrxh6hy", "0020000000c4a5cad46221b2a187905e5266362b99d5e91c6ce24d165dab93e86433"]
]
invalid = [
"tc1qw508d6qejxtdg4y5r3zarvary0c5xw7kg3g4ty" # Invalid human-readable part
"bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t5" # Invalid checksum
"BC13W508D6QEJXTDG4Y5R3ZARVARY0C5XW7KN40WF2" # Invalid witness version
"bc1rw5uspcuh" # Invalid program length
"bc10w508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kw5rljs90" # Invalid program length
"BC1QR508D6QEJXTDG4Y5R3ZARVARYV98GJ9P" # Invalid program length for witness version 0 (per BIP141)
"tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sL5k7" # Mixed case
"bc1zw508d6qejxtdg4y5r3zarvaryvqyzf3du" # zero padding of more than 4 bits
"tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3pjxtptv" # Non-zero padding in 8-to-5 conversion
"bc1gmk9yu" # Empty data section
]
def test_encode():
for c,p in valid:
p = bytes.fromhex(p)
l0 = p[0] - 0x50 if p[0] else 0
assert c.lower() == encode(c[:2].lower(), l0, p[2:])
def test_decode():
for c,p in valid:
c = c.lower()
p = bytes.fromhex(p)[2:]
assert bytes(decode(c[:2], c)[1]) == p
def test_decode_invalid():
for c in invalid:
c = c.lower()
with pytest.raises(Exception):
bytes(decode(c[:2], c)[1])
| 42.177778
| 167
| 0.818757
| 153
| 1,898
| 10.124183
| 0.509804
| 0.027114
| 0.038735
| 0.009038
| 0.034861
| 0.019367
| 0
| 0
| 0
| 0
| 0
| 0.26331
| 0.089568
| 1,898
| 44
| 168
| 43.136364
| 0.633102
| 0.143836
| 0
| 0.108108
| 0
| 0
| 0.631122
| 0.604464
| 0
| 0
| 0.00248
| 0
| 0.054054
| 1
| 0.081081
| false
| 0
| 0.054054
| 0
| 0.135135
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
fe70feb29bf8d55c18b23df33560a15791015eca
| 161
|
py
|
Python
|
pywind/lib/filter.py
|
augustand/fdslight
|
f3d82465aaa27160438b22f9b474be8c5dc100cc
|
[
"BSD-2-Clause"
] | null | null | null |
pywind/lib/filter.py
|
augustand/fdslight
|
f3d82465aaa27160438b22f9b474be8c5dc100cc
|
[
"BSD-2-Clause"
] | null | null | null |
pywind/lib/filter.py
|
augustand/fdslight
|
f3d82465aaa27160438b22f9b474be8c5dc100cc
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
class _drop_html_event(object):
pass
def drop_html_event(sts):
"""过滤到HTML的事件属性
:param sts:
:return:
"""
pass
| 12.384615
| 31
| 0.608696
| 20
| 161
| 4.65
| 0.75
| 0.172043
| 0.27957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008403
| 0.26087
| 161
| 12
| 32
| 13.416667
| 0.773109
| 0.354037
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
fe7e2b32b5faa9daaf437a5ad761058903432372
| 646
|
py
|
Python
|
secateur/forms.py
|
funkybob/secateur
|
9bda9ab7b9ddd8c1c43e5d2081342222f28eaaaf
|
[
"MIT"
] | null | null | null |
secateur/forms.py
|
funkybob/secateur
|
9bda9ab7b9ddd8c1c43e5d2081342222f28eaaaf
|
[
"MIT"
] | null | null | null |
secateur/forms.py
|
funkybob/secateur
|
9bda9ab7b9ddd8c1c43e5d2081342222f28eaaaf
|
[
"MIT"
] | null | null | null |
from django import forms
class Disconnect(forms.Form):
pass
class BlockAccountsForm(forms.Form):
screen_name = forms.CharField(help_text="The Twitter username.")
duration = forms.IntegerField(
min_value=1,
max_value=52,
initial=6,
help_text="How long to block the accounts (in weeks)",
)
block_account = forms.BooleanField(required=False)
mute_account = forms.BooleanField(required=False)
block_followers = forms.BooleanField(required=False)
mute_followers = forms.BooleanField(required=False)
class Search(forms.Form):
screen_name = forms.CharField(help_text="Username")
| 26.916667
| 68
| 0.716718
| 78
| 646
| 5.794872
| 0.512821
| 0.150442
| 0.221239
| 0.265487
| 0.535398
| 0.181416
| 0.181416
| 0.181416
| 0
| 0
| 0
| 0.007605
| 0.185759
| 646
| 23
| 69
| 28.086957
| 0.851711
| 0
| 0
| 0
| 0
| 0
| 0.108359
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.058824
| 0.058824
| 0
| 0.647059
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
feb3d82764064b9c5ad416fba4e417a1542bf938
| 150
|
py
|
Python
|
old/application/__init__.py
|
UNSW-CEEM/market-control-room
|
a64228084b50e1eb662a20f939c02bc01a9c195a
|
[
"MIT"
] | 1
|
2021-07-28T03:07:55.000Z
|
2021-07-28T03:07:55.000Z
|
old/application/__init__.py
|
luke-marshall/market-control-room
|
a64228084b50e1eb662a20f939c02bc01a9c195a
|
[
"MIT"
] | 6
|
2021-03-09T01:07:34.000Z
|
2022-02-26T09:59:28.000Z
|
old/application/__init__.py
|
UNSW-CEEM/market-control-room
|
a64228084b50e1eb662a20f939c02bc01a9c195a
|
[
"MIT"
] | null | null | null |
from flask import Flask, request
app = Flask(__name__, static_url_path='')
@app.route('/')
def root():
return app.send_static_file('monitor.html')
| 18.75
| 44
| 0.726667
| 22
| 150
| 4.590909
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113333
| 150
| 7
| 45
| 21.428571
| 0.759399
| 0
| 0
| 0
| 0
| 0
| 0.087248
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
feb5b11dc902ec538bd96c1ad4a739b143dbc66c
| 26,199
|
py
|
Python
|
cvat/apps/engine/migrations/0001_initial.py
|
irisradgroup/cvat
|
cc427b122a0382e501154f4caf4afe813ee7e3b1
|
[
"Intel",
"MIT"
] | null | null | null |
cvat/apps/engine/migrations/0001_initial.py
|
irisradgroup/cvat
|
cc427b122a0382e501154f4caf4afe813ee7e3b1
|
[
"Intel",
"MIT"
] | null | null | null |
cvat/apps/engine/migrations/0001_initial.py
|
irisradgroup/cvat
|
cc427b122a0382e501154f4caf4afe813ee7e3b1
|
[
"Intel",
"MIT"
] | null | null | null |
# Generated by Django 3.1.13 on 2021-09-01 22:47
import cvat.apps.engine.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='AttributeSpec',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('mutable', models.BooleanField()),
('input_type', models.CharField(choices=[('checkbox', 'CHECKBOX'), ('radio', 'RADIO'), ('number', 'NUMBER'), ('text', 'TEXT'), ('select', 'SELECT')], max_length=16)),
('default_value', models.CharField(max_length=128)),
('values', models.CharField(max_length=4096)),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='CloudStorage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('provider_type', models.CharField(choices=[('AWS_S3_BUCKET', 'AWS_S3'), ('AZURE_CONTAINER', 'AZURE_CONTAINER'), ('GOOGLE_DRIVE', 'GOOGLE_DRIVE')], max_length=20)),
('resource', models.CharField(max_length=63)),
('display_name', models.CharField(max_length=63)),
('created_date', models.DateTimeField(auto_now_add=True)),
('updated_date', models.DateTimeField(auto_now=True)),
('credentials', models.CharField(max_length=500)),
('credentials_type', models.CharField(choices=[('TEMP_KEY_SECRET_KEY_TOKEN_SET', 'TEMP_KEY_SECRET_KEY_TOKEN_SET'), ('ACCOUNT_NAME_TOKEN_PAIR', 'ACCOUNT_NAME_TOKEN_PAIR'), ('ANONYMOUS_ACCESS', 'ANONYMOUS_ACCESS')], max_length=29)),
('specific_attributes', models.CharField(blank=True, max_length=50)),
('description', models.TextField(blank=True)),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='cloud_storages', to=settings.AUTH_USER_MODEL)),
],
options={
'default_permissions': (),
'unique_together': {('provider_type', 'resource', 'credentials')},
},
),
migrations.CreateModel(
name='Data',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('chunk_size', models.PositiveIntegerField(null=True)),
('size', models.PositiveIntegerField(default=0)),
('image_quality', models.PositiveSmallIntegerField(default=50)),
('start_frame', models.PositiveIntegerField(default=0)),
('stop_frame', models.PositiveIntegerField(default=0)),
('frame_filter', models.CharField(blank=True, default='', max_length=256)),
('compressed_chunk_type', models.CharField(choices=[('video', 'VIDEO'), ('imageset', 'IMAGESET'), ('list', 'LIST')], default=cvat.apps.engine.models.DataChoice['IMAGESET'], max_length=32)),
('original_chunk_type', models.CharField(choices=[('video', 'VIDEO'), ('imageset', 'IMAGESET'), ('list', 'LIST')], default=cvat.apps.engine.models.DataChoice['IMAGESET'], max_length=32)),
('storage_method', models.CharField(choices=[('cache', 'CACHE'), ('file_system', 'FILE_SYSTEM')], default=cvat.apps.engine.models.StorageMethodChoice['FILE_SYSTEM'], max_length=15)),
('storage', models.CharField(choices=[('cloud_storage', 'CLOUD_STORAGE'), ('local', 'LOCAL'), ('share', 'SHARE')], default=cvat.apps.engine.models.StorageChoice['LOCAL'], max_length=15)),
('cloud_storage', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='data', to='engine.cloudstorage')),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('path', models.CharField(default='', max_length=1024)),
('frame', models.PositiveIntegerField()),
('width', models.PositiveIntegerField()),
('height', models.PositiveIntegerField()),
('data', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='images', to='engine.data')),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('annotation', 'ANNOTATION'), ('validation', 'VALIDATION'), ('completed', 'COMPLETED')], default=cvat.apps.engine.models.StatusChoice['ANNOTATION'], max_length=32)),
('assignee', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('reviewer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='review_job_set', to=settings.AUTH_USER_MODEL)),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='Label',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', cvat.apps.engine.models.SafeCharField(max_length=64)),
('color', models.CharField(default='', max_length=8)),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='LabeledImage',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('frame', models.PositiveIntegerField()),
('group', models.PositiveIntegerField(null=True)),
('source', models.CharField(choices=[('auto', 'AUTO'), ('manual', 'MANUAL')], default='manual', max_length=16, null=True)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.job')),
('label', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.label')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.CreateModel(
name='LabeledShape',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('frame', models.PositiveIntegerField()),
('group', models.PositiveIntegerField(null=True)),
('source', models.CharField(choices=[('auto', 'AUTO'), ('manual', 'MANUAL')], default='manual', max_length=16, null=True)),
('type', models.CharField(choices=[('rectangle', 'RECTANGLE'), ('polygon', 'POLYGON'), ('polyline', 'POLYLINE'), ('points', 'POINTS'), ('cuboid', 'CUBOID')], max_length=16)),
('occluded', models.BooleanField(default=False)),
('z_order', models.IntegerField(default=0)),
('points', cvat.apps.engine.models.FloatArrayField()),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.job')),
('label', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.label')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.CreateModel(
name='LabeledTrack',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('frame', models.PositiveIntegerField()),
('group', models.PositiveIntegerField(null=True)),
('source', models.CharField(choices=[('auto', 'AUTO'), ('manual', 'MANUAL')], default='manual', max_length=16, null=True)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.job')),
('label', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.label')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', cvat.apps.engine.models.SafeCharField(max_length=256)),
('bug_tracker', models.CharField(blank=True, default='', max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True)),
('updated_date', models.DateTimeField(auto_now_add=True)),
('status', models.CharField(choices=[('annotation', 'ANNOTATION'), ('validation', 'VALIDATION'), ('completed', 'COMPLETED')], default=cvat.apps.engine.models.StatusChoice['ANNOTATION'], max_length=32)),
('assignee', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', cvat.apps.engine.models.SafeCharField(max_length=256)),
('mode', models.CharField(max_length=32)),
('bug_tracker', models.CharField(blank=True, default='', max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True)),
('updated_date', models.DateTimeField(auto_now=True)),
('overlap', models.PositiveIntegerField(null=True)),
('segment_size', models.PositiveIntegerField(default=0)),
('status', models.CharField(choices=[('annotation', 'ANNOTATION'), ('validation', 'VALIDATION'), ('completed', 'COMPLETED')], default=cvat.apps.engine.models.StatusChoice['ANNOTATION'], max_length=32)),
('dimension', models.CharField(choices=[('3d', 'DIM_3D'), ('2d', 'DIM_2D')], default=cvat.apps.engine.models.DimensionType['DIM_2D'], max_length=2)),
('subset', models.CharField(blank=True, default='', max_length=64)),
('assignee', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assignees', to=settings.AUTH_USER_MODEL)),
('data', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tasks', to='engine.data')),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='owners', to=settings.AUTH_USER_MODEL)),
('project', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tasks', related_query_name='task', to='engine.project')),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='TrackedShape',
fields=[
('type', models.CharField(choices=[('rectangle', 'RECTANGLE'), ('polygon', 'POLYGON'), ('polyline', 'POLYLINE'), ('points', 'POINTS'), ('cuboid', 'CUBOID')], max_length=16)),
('occluded', models.BooleanField(default=False)),
('z_order', models.IntegerField(default=0)),
('points', cvat.apps.engine.models.FloatArrayField()),
('id', models.BigAutoField(primary_key=True, serialize=False)),
('frame', models.PositiveIntegerField()),
('outside', models.BooleanField(default=False)),
('track', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.labeledtrack')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.CreateModel(
name='TrainingProject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('host', models.CharField(max_length=256)),
('username', models.CharField(max_length=256)),
('password', models.CharField(max_length=256)),
('training_id', models.CharField(max_length=64)),
('enabled', models.BooleanField(null=True)),
('project_class', models.CharField(blank=True, choices=[('OD', 'Object Detection')], max_length=2, null=True)),
],
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('path', models.CharField(default='', max_length=1024)),
('width', models.PositiveIntegerField()),
('height', models.PositiveIntegerField()),
('data', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='video', to='engine.data')),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='TrainingProjectLabel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('training_label_id', models.CharField(max_length=64)),
('cvat_label', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='training_project_label', to='engine.label')),
],
),
migrations.CreateModel(
name='TrainingProjectImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('idx', models.PositiveIntegerField()),
('training_image_id', models.CharField(max_length=64)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.task')),
],
),
migrations.CreateModel(
name='TrackedShapeAttributeVal',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('value', cvat.apps.engine.models.SafeCharField(max_length=4096)),
('shape', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.trackedshape')),
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.attributespec')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.CreateModel(
name='ServerFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.CharField(max_length=1024)),
('data', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='server_files', to='engine.data')),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='Segment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_frame', models.IntegerField()),
('stop_frame', models.IntegerField()),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.task')),
],
options={
'default_permissions': (),
},
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('estimated_quality', models.FloatField()),
('status', models.CharField(choices=[('accepted', 'ACCEPTED'), ('rejected', 'REJECTED'), ('review_further', 'REVIEW_FURTHER')], max_length=16)),
('assignee', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='reviewed', to=settings.AUTH_USER_MODEL)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.job')),
('reviewer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='reviews', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='RemoteFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.CharField(max_length=1024)),
('data', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='remote_files', to='engine.data')),
],
options={
'default_permissions': (),
},
),
migrations.AddField(
model_name='project',
name='training_project',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='engine.trainingproject'),
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.FloatField(default=0.0)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='LabeledTrackAttributeVal',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('value', cvat.apps.engine.models.SafeCharField(max_length=4096)),
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.attributespec')),
('track', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.labeledtrack')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.CreateModel(
name='LabeledShapeAttributeVal',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('value', cvat.apps.engine.models.SafeCharField(max_length=4096)),
('shape', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.labeledshape')),
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.attributespec')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.CreateModel(
name='LabeledImageAttributeVal',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('value', cvat.apps.engine.models.SafeCharField(max_length=4096)),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.labeledimage')),
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.attributespec')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.AddField(
model_name='label',
name='project',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='engine.project'),
),
migrations.AddField(
model_name='label',
name='task',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='engine.task'),
),
migrations.CreateModel(
name='JobCommit',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('version', models.PositiveIntegerField(default=0)),
('timestamp', models.DateTimeField(auto_now=True)),
('message', models.CharField(default='', max_length=4096)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='commits', to='engine.job')),
],
options={
'abstract': False,
'default_permissions': (),
},
),
migrations.AddField(
model_name='job',
name='segment',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.segment'),
),
migrations.CreateModel(
name='Issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('frame', models.PositiveIntegerField()),
('position', cvat.apps.engine.models.FloatArrayField()),
('created_date', models.DateTimeField(auto_now_add=True)),
('resolved_date', models.DateTimeField(blank=True, null=True)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.job')),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issues', to=settings.AUTH_USER_MODEL)),
('resolver', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_issues', to=settings.AUTH_USER_MODEL)),
('review', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='engine.review')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.TextField(default='')),
('created_date', models.DateTimeField(auto_now_add=True)),
('updated_date', models.DateTimeField(auto_now=True)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.issue')),
],
),
migrations.AddField(
model_name='attributespec',
name='label',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.label'),
),
migrations.CreateModel(
name='RelatedFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('path', models.FileField(max_length=1024, storage=cvat.apps.engine.models.MyFileSystemStorage(), upload_to=cvat.apps.engine.models.upload_path_handler)),
('data', models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_files', to='engine.data')),
('primary_image', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_files', to='engine.image')),
],
options={
'default_permissions': (),
'unique_together': {('data', 'path')},
},
),
migrations.AlterUniqueTogether(
name='label',
unique_together={('task', 'name')},
),
migrations.CreateModel(
name='ClientFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(max_length=1024, storage=cvat.apps.engine.models.MyFileSystemStorage(), upload_to=cvat.apps.engine.models.upload_path_handler)),
('data', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='client_files', to='engine.data')),
],
options={
'default_permissions': (),
'unique_together': {('data', 'file')},
},
),
migrations.AlterUniqueTogether(
name='attributespec',
unique_together={('label', 'name')},
),
]
| 56.954348
| 246
| 0.580824
| 2,494
| 26,199
| 5.944266
| 0.103448
| 0.02914
| 0.050051
| 0.078651
| 0.787251
| 0.747791
| 0.705565
| 0.696863
| 0.668668
| 0.646948
| 0
| 0.008811
| 0.263598
| 26,199
| 459
| 247
| 57.078431
| 0.759602
| 0.001756
| 0
| 0.637168
| 1
| 0
| 0.150358
| 0.010133
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.002212
| 0.00885
| 0
| 0.017699
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
feb5dc7aa6e32742e66af2f8c2cc1c87bac34007
| 25,331
|
py
|
Python
|
google/cloud/aiplatform/matching_engine/matching_engine_index.py
|
nayaknishant/python-aiplatform
|
309b3b9d1688a62b0c60aada1e7de1d131fb163e
|
[
"Apache-2.0"
] | 1
|
2022-03-30T05:23:29.000Z
|
2022-03-30T05:23:29.000Z
|
google/cloud/aiplatform/matching_engine/matching_engine_index.py
|
xxxtrillionarie/GCP_MLOps_VertexAI_Workshop
|
d0d719c0bf557b908eb63f3a245db2f47b136eb3
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/aiplatform/matching_engine/matching_engine_index.py
|
xxxtrillionarie/GCP_MLOps_VertexAI_Workshop
|
d0d719c0bf557b908eb63f3a245db2f47b136eb3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Dict, List, Optional, Sequence, Tuple
from google.auth import credentials as auth_credentials
from google.protobuf import field_mask_pb2
from google.cloud.aiplatform import base
from google.cloud.aiplatform.compat.types import (
matching_engine_deployed_index_ref as gca_matching_engine_deployed_index_ref,
matching_engine_index as gca_matching_engine_index,
)
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform.matching_engine import matching_engine_index_config
from google.cloud.aiplatform import utils
_LOGGER = base.Logger(__name__)
class MatchingEngineIndex(base.VertexAiResourceNounWithFutureManager):
"""Matching Engine index resource for Vertex AI."""
client_class = utils.IndexClientWithOverride
_resource_noun = "indexes"
_getter_method = "get_index"
_list_method = "list_indexes"
_delete_method = "delete_index"
_parse_resource_name_method = "parse_index_path"
_format_resource_name_method = "index_path"
def __init__(
self,
index_name: str,
project: Optional[str] = None,
location: Optional[str] = None,
credentials: Optional[auth_credentials.Credentials] = None,
):
"""Retrieves an existing index given an index name or ID.
Example Usage:
my_index = aiplatform.MatchingEngineIndex(
index_name='projects/123/locations/us-central1/indexes/my_index_id'
)
or
my_index = aiplatform.MatchingEngineIndex(
index_name='my_index_id'
)
Args:
index_name (str):
Required. A fully-qualified index resource name or a index ID.
Example: "projects/123/locations/us-central1/indexes/my_index_id"
or "my_index_id" when project and location are initialized or passed.
project (str):
Optional. Project to retrieve index from. If not set, project
set in aiplatform.init will be used.
location (str):
Optional. Location to retrieve index from. If not set, location
set in aiplatform.init will be used.
credentials (auth_credentials.Credentials):
Optional. Custom credentials to use to retrieve this Index. Overrides
credentials set in aiplatform.init.
"""
super().__init__(
project=project,
location=location,
credentials=credentials,
resource_name=index_name,
)
self._gca_resource = self._get_gca_resource(resource_name=index_name)
@property
def description(self) -> str:
"""Description of the index."""
self._assert_gca_resource_is_available()
return self._gca_resource.description
@classmethod
@base.optional_sync()
def _create(
cls,
display_name: str,
contents_delta_uri: str,
config: matching_engine_index_config.MatchingEngineIndexConfig,
description: Optional[str] = None,
labels: Optional[Dict[str, str]] = None,
project: Optional[str] = None,
location: Optional[str] = None,
credentials: Optional[auth_credentials.Credentials] = None,
request_metadata: Optional[Sequence[Tuple[str, str]]] = (),
sync: bool = True,
) -> "MatchingEngineIndex":
"""Creates a MatchingEngineIndex resource.
Args:
display_name (str):
Required. The display name of the Index.
The name can be up to 128 characters long and
can be consist of any UTF-8 characters.
contents_delta_uri (str):
Required. Allows inserting, updating or deleting the contents of the Matching Engine Index.
The string must be a valid Google Cloud Storage directory path. If this
field is set when calling IndexService.UpdateIndex, then no other
Index field can be also updated as part of the same call.
The expected structure and format of the files this URI points to is
described at
https://docs.google.com/document/d/12DLVB6Nq6rdv8grxfBsPhUA283KWrQ9ZenPBp0zUC30
config (matching_engine_index_config.MatchingEngineIndexConfig):
Required. The configuration with regard to the algorithms used for efficient search.
description (str):
Optional. The description of the Index.
labels (Dict[str, str]):
Optional. The labels with user-defined
metadata to organize your Index.
Label keys and values can be no longer than 64
characters (Unicode codepoints), can only
contain lowercase letters, numeric characters,
underscores and dashes. International characters
are allowed.
See https://goo.gl/xmQnxf for more information
on and examples of labels. No more than 64 user
labels can be associated with one
Index(System labels are excluded)."
System reserved label keys are prefixed with
"aiplatform.googleapis.com/" and are immutable.
project (str):
Optional. Project to create EntityType in. If not set, project
set in aiplatform.init will be used.
location (str):
Optional. Location to create EntityType in. If not set, location
set in aiplatform.init will be used.
credentials (auth_credentials.Credentials):
Optional. Custom credentials to use to create EntityTypes. Overrides
credentials set in aiplatform.init.
request_metadata (Sequence[Tuple[str, str]]):
Optional. Strings which should be sent along with the request as metadata.
encryption_spec (str):
Optional. Customer-managed encryption key
spec for data storage. If set, both of the
online and offline data storage will be secured
by this key.
sync (bool):
Optional. Whether to execute this creation synchronously. If False, this method
will be executed in concurrent Future and any downstream object will
be immediately returned and synced when the Future has completed.
Returns:
MatchingEngineIndex - Index resource object
"""
gapic_index = gca_matching_engine_index.Index(
display_name=display_name,
description=description,
metadata={
"config": config.as_dict(),
"contentsDeltaUri": contents_delta_uri,
},
)
if labels:
utils.validate_labels(labels)
gapic_index.labels = labels
api_client = cls._instantiate_client(location=location, credentials=credentials)
create_lro = api_client.create_index(
parent=initializer.global_config.common_location_path(
project=project, location=location
),
index=gapic_index,
metadata=request_metadata,
)
_LOGGER.log_create_with_lro(cls, create_lro)
created_index = create_lro.result()
_LOGGER.log_create_complete(cls, created_index, "index")
index_obj = cls(
index_name=created_index.name,
project=project,
location=location,
credentials=credentials,
)
return index_obj
def update_metadata(
self,
display_name: Optional[str] = None,
description: Optional[str] = None,
labels: Optional[Dict[str, str]] = None,
request_metadata: Optional[Sequence[Tuple[str, str]]] = (),
) -> "MatchingEngineIndex":
"""Updates the metadata for this index.
Args:
display_name (str):
Optional. The display name of the Index.
The name can be up to 128 characters long and
can be consist of any UTF-8 characters.
description (str):
Optional. The description of the Index.
labels (Dict[str, str]):
Optional. The labels with user-defined
metadata to organize your Indexs.
Label keys and values can be no longer than 64
characters (Unicode codepoints), can only
contain lowercase letters, numeric characters,
underscores and dashes. International characters
are allowed.
See https://goo.gl/xmQnxf for more information
on and examples of labels. No more than 64 user
labels can be associated with one Index
(System labels are excluded)."
System reserved label keys are prefixed with
"aiplatform.googleapis.com/" and are immutable.
request_metadata (Sequence[Tuple[str, str]]):
Optional. Strings which should be sent along with the request as metadata.
Returns:
MatchingEngineIndex - The updated index resource object.
"""
self.wait()
update_mask = list()
if labels:
utils.validate_labels(labels)
update_mask.append("labels")
if display_name is not None:
update_mask.append("display_name")
if description is not None:
update_mask.append("description")
update_mask = field_mask_pb2.FieldMask(paths=update_mask)
gapic_index = gca_matching_engine_index.Index(
name=self.resource_name,
display_name=display_name,
description=description,
labels=labels,
)
_LOGGER.log_action_start_against_resource(
"Updating", "index", self,
)
update_lro = self.api_client.update_index(
index=gapic_index, update_mask=update_mask, metadata=request_metadata,
)
_LOGGER.log_action_started_against_resource_with_lro(
"Update", "index", self.__class__, update_lro
)
self._gca_resource = update_lro.result()
_LOGGER.log_action_completed_against_resource("index", "Updated", self)
return self
def update_embeddings(
self,
contents_delta_uri: str,
is_complete_overwrite: Optional[bool] = None,
request_metadata: Optional[Sequence[Tuple[str, str]]] = (),
) -> "MatchingEngineIndex":
"""Updates the embeddings for this index.
Args:
contents_delta_uri (str):
Required. Allows inserting, updating or deleting the contents of the Matching Engine Index.
The string must be a valid Google Cloud Storage directory path. If this
field is set when calling IndexService.UpdateIndex, then no other
Index field can be also updated as part of the same call.
The expected structure and format of the files this URI points to is
described at
https://docs.google.com/document/d/12DLVB6Nq6rdv8grxfBsPhUA283KWrQ9ZenPBp0zUC30
is_complete_overwrite (str):
Optional. If this field is set together with contentsDeltaUri when calling IndexService.UpdateIndex,
then existing content of the Index will be replaced by the data from the contentsDeltaUri.
request_metadata (Sequence[Tuple[str, str]]):
Optional. Strings which should be sent along with the request as metadata.
Returns:
MatchingEngineIndex - The updated index resource object.
"""
self.wait()
update_mask = list()
if contents_delta_uri or is_complete_overwrite:
update_mask.append("metadata")
update_mask = field_mask_pb2.FieldMask(paths=update_mask)
gapic_index = gca_matching_engine_index.Index(
name=self.resource_name,
metadata={
"contentsDeltaUri": contents_delta_uri,
"isCompleteOverwrite": is_complete_overwrite,
},
)
_LOGGER.log_action_start_against_resource(
"Updating", "index", self,
)
update_lro = self.api_client.update_index(
index=gapic_index, update_mask=update_mask, metadata=request_metadata,
)
_LOGGER.log_action_started_against_resource_with_lro(
"Update", "index", self.__class__, update_lro
)
self._gca_resource = update_lro.result()
_LOGGER.log_action_completed_against_resource("index", "Updated", self)
return self
@property
def deployed_indexes(
self,
) -> List[gca_matching_engine_deployed_index_ref.DeployedIndexRef]:
"""Returns a list of deployed index references that originate from this index.
Returns:
List[gca_matching_engine_deployed_index_ref.DeployedIndexRef] - Deployed index references
"""
self.wait()
return self._gca_resource.deployed_indexes
@classmethod
def create_tree_ah_index(
cls,
display_name: str,
contents_delta_uri: str,
dimensions: int,
approximate_neighbors_count: int,
leaf_node_embedding_count: Optional[int] = None,
leaf_nodes_to_search_percent: Optional[float] = None,
distance_measure_type: Optional[
matching_engine_index_config.DistanceMeasureType
] = None,
description: Optional[str] = None,
labels: Optional[Dict[str, str]] = None,
project: Optional[str] = None,
location: Optional[str] = None,
credentials: Optional[auth_credentials.Credentials] = None,
request_metadata: Optional[Sequence[Tuple[str, str]]] = (),
sync: bool = True,
) -> "MatchingEngineIndex":
"""Creates a MatchingEngineIndex resource that uses the tree-AH algorithm.
Example Usage:
my_index = aiplatform.Index.create_tree_ah_index(
display_name="my_display_name",
contents_delta_uri="gs://my_bucket/embeddings",
dimensions=1,
approximate_neighbors_count=150,
distance_measure_type="SQUARED_L2_DISTANCE",
leaf_node_embedding_count=100,
leaf_nodes_to_search_percent=50,
description="my description",
labels={ "label_name": "label_value" },
)
Args:
display_name (str):
Required. The display name of the Index.
The name can be up to 128 characters long and
can be consist of any UTF-8 characters.
contents_delta_uri (str):
Required. Allows inserting, updating or deleting the contents of the Matching Engine Index.
The string must be a valid Google Cloud Storage directory path. If this
field is set when calling IndexService.UpdateIndex, then no other
Index field can be also updated as part of the same call.
The expected structure and format of the files this URI points to is
described at
https://docs.google.com/document/d/12DLVB6Nq6rdv8grxfBsPhUA283KWrQ9ZenPBp0zUC30
dimensions (int):
Required. The number of dimensions of the input vectors.
approximate_neighbors_count (int):
Required. The default number of neighbors to find via approximate search before exact reordering is
performed. Exact reordering is a procedure where results returned by an
approximate search algorithm are reordered via a more expensive distance computation.
leaf_node_embedding_count (int):
Optional. Number of embeddings on each leaf node. The default value is 1000 if not set.
leaf_nodes_to_search_percent (float):
Optional. The default percentage of leaf nodes that any query may be searched. Must be in
range 1-100, inclusive. The default value is 10 (means 10%) if not set.
distance_measure_type (matching_engine_index_config.DistanceMeasureType):
Optional. The distance measure used in nearest neighbor search.
description (str):
Optional. The description of the Index.
labels (Dict[str, str]):
Optional. The labels with user-defined
metadata to organize your Index.
Label keys and values can be no longer than 64
characters (Unicode codepoints), can only
contain lowercase letters, numeric characters,
underscores and dashes. International characters
are allowed.
See https://goo.gl/xmQnxf for more information
on and examples of labels. No more than 64 user
labels can be associated with one
Index(System labels are excluded)."
System reserved label keys are prefixed with
"aiplatform.googleapis.com/" and are immutable.
project (str):
Optional. Project to create EntityType in. If not set, project
set in aiplatform.init will be used.
location (str):
Optional. Location to create EntityType in. If not set, location
set in aiplatform.init will be used.
credentials (auth_credentials.Credentials):
Optional. Custom credentials to use to create EntityTypes. Overrides
credentials set in aiplatform.init.
request_metadata (Sequence[Tuple[str, str]]):
Optional. Strings which should be sent along with the request as metadata.
encryption_spec (str):
Optional. Customer-managed encryption key
spec for data storage. If set, both of the
online and offline data storage will be secured
by this key.
sync (bool):
Optional. Whether to execute this creation synchronously. If False, this method
will be executed in concurrent Future and any downstream object will
be immediately returned and synced when the Future has completed.
Returns:
MatchingEngineIndex - Index resource object
"""
algorithm_config = matching_engine_index_config.TreeAhConfig(
leaf_node_embedding_count=leaf_node_embedding_count,
leaf_nodes_to_search_percent=leaf_nodes_to_search_percent,
)
config = matching_engine_index_config.MatchingEngineIndexConfig(
dimensions=dimensions,
algorithm_config=algorithm_config,
approximate_neighbors_count=approximate_neighbors_count,
distance_measure_type=distance_measure_type,
)
return cls._create(
display_name=display_name,
contents_delta_uri=contents_delta_uri,
config=config,
description=description,
labels=labels,
project=project,
location=location,
credentials=credentials,
request_metadata=request_metadata,
sync=sync,
)
@classmethod
def create_brute_force_index(
cls,
display_name: str,
contents_delta_uri: str,
dimensions: int,
distance_measure_type: Optional[
matching_engine_index_config.DistanceMeasureType
] = None,
description: Optional[str] = None,
labels: Optional[Dict[str, str]] = None,
project: Optional[str] = None,
location: Optional[str] = None,
credentials: Optional[auth_credentials.Credentials] = None,
request_metadata: Optional[Sequence[Tuple[str, str]]] = (),
sync: bool = True,
) -> "MatchingEngineIndex":
"""Creates a MatchingEngineIndex resource that uses the brute force algorithm.
Example Usage:
my_index = aiplatform.Index.create_brute_force_index(
display_name="my_display_name",
contents_delta_uri="gs://my_bucket/embeddings",
dimensions=1,
approximate_neighbors_count=150,
distance_measure_type="SQUARED_L2_DISTANCE",
description="my description",
labels={ "label_name": "label_value" },
)
Args:
display_name (str):
Required. The display name of the Index.
The name can be up to 128 characters long and
can be consist of any UTF-8 characters.
contents_delta_uri (str):
Required. Allows inserting, updating or deleting the contents of the Matching Engine Index.
The string must be a valid Google Cloud Storage directory path. If this
field is set when calling IndexService.UpdateIndex, then no other
Index field can be also updated as part of the same call.
The expected structure and format of the files this URI points to is
described at
https://docs.google.com/document/d/12DLVB6Nq6rdv8grxfBsPhUA283KWrQ9ZenPBp0zUC30
dimensions (int):
Required. The number of dimensions of the input vectors.
distance_measure_type (matching_engine_index_config.DistanceMeasureType):
Optional. The distance measure used in nearest neighbor search.
description (str):
Optional. The description of the Index.
labels (Dict[str, str]):
Optional. The labels with user-defined
metadata to organize your Index.
Label keys and values can be no longer than 64
characters (Unicode codepoints), can only
contain lowercase letters, numeric characters,
underscores and dashes. International characters
are allowed.
See https://goo.gl/xmQnxf for more information
on and examples of labels. No more than 64 user
labels can be associated with one
Index(System labels are excluded)."
System reserved label keys are prefixed with
"aiplatform.googleapis.com/" and are immutable.
project (str):
Optional. Project to create EntityType in. If not set, project
set in aiplatform.init will be used.
location (str):
Optional. Location to create EntityType in. If not set, location
set in aiplatform.init will be used.
credentials (auth_credentials.Credentials):
Optional. Custom credentials to use to create EntityTypes. Overrides
credentials set in aiplatform.init.
request_metadata (Sequence[Tuple[str, str]]):
Optional. Strings which should be sent along with the request as metadata.
encryption_spec (str):
Optional. Customer-managed encryption key
spec for data storage. If set, both of the
online and offline data storage will be secured
by this key.
sync (bool):
Optional. Whether to execute this creation synchronously. If False, this method
will be executed in concurrent Future and any downstream object will
be immediately returned and synced when the Future has completed.
Returns:
MatchingEngineIndex - Index resource object
"""
algorithm_config = matching_engine_index_config.BruteForceConfig()
config = matching_engine_index_config.MatchingEngineIndexConfig(
dimensions=dimensions,
algorithm_config=algorithm_config,
distance_measure_type=distance_measure_type,
)
return cls._create(
display_name=display_name,
contents_delta_uri=contents_delta_uri,
config=config,
description=description,
labels=labels,
project=project,
location=location,
credentials=credentials,
request_metadata=request_metadata,
sync=sync,
)
| 42.288815
| 116
| 0.623268
| 2,753
| 25,331
| 5.572466
| 0.135125
| 0.009126
| 0.026009
| 0.014862
| 0.775243
| 0.748517
| 0.711166
| 0.706408
| 0.690763
| 0.690763
| 0
| 0.007357
| 0.318503
| 25,331
| 598
| 117
| 42.359532
| 0.881307
| 0.551301
| 0
| 0.604348
| 0
| 0
| 0.036213
| 0
| 0
| 0
| 0
| 0
| 0.004348
| 1
| 0.034783
| false
| 0
| 0.034783
| 0
| 0.134783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
227da6004fc76cd422c9164eb2afc40bcf2e2618
| 63
|
py
|
Python
|
panopticon/django/__init__.py
|
mobify/python-panopticon
|
081f341c5fcb6d7d1c438d66cd1dd8a8083ea8a1
|
[
"MIT"
] | 3
|
2016-04-16T05:08:13.000Z
|
2017-06-20T19:10:06.000Z
|
panopticon/django/__init__.py
|
elbaschid/python-panopticon
|
081f341c5fcb6d7d1c438d66cd1dd8a8083ea8a1
|
[
"MIT"
] | 4
|
2016-05-26T22:57:45.000Z
|
2016-06-23T21:40:57.000Z
|
panopticon/django/__init__.py
|
elbaschid/python-panopticon
|
081f341c5fcb6d7d1c438d66cd1dd8a8083ea8a1
|
[
"MIT"
] | 2
|
2016-03-25T12:44:31.000Z
|
2018-02-14T22:16:26.000Z
|
default_app_config = "panopticon.django.apps.PanopticonConfig"
| 31.5
| 62
| 0.857143
| 7
| 63
| 7.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 63
| 1
| 63
| 63
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.619048
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
228051f708c9ce73e4ea5fed3e1eb55ca03f21e0
| 91
|
py
|
Python
|
dropdown/apps.py
|
earthpyy/django-rest-dropdown
|
5222753f0e5b9ec5b589d6576cc9860f23a54e6d
|
[
"MIT"
] | 2
|
2022-01-11T03:23:22.000Z
|
2022-01-21T08:18:27.000Z
|
dropdown/apps.py
|
earthpyy/django-rest-dropdown
|
5222753f0e5b9ec5b589d6576cc9860f23a54e6d
|
[
"MIT"
] | null | null | null |
dropdown/apps.py
|
earthpyy/django-rest-dropdown
|
5222753f0e5b9ec5b589d6576cc9860f23a54e6d
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class DropdownConfig(AppConfig):
name = 'dropdown'
| 15.166667
| 33
| 0.758242
| 10
| 91
| 6.9
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 5
| 34
| 18.2
| 0.907895
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
2292624cde27e72457034e44e1127862ef316d4f
| 462
|
py
|
Python
|
src/kemokrw/client.py
|
Kemok-Repos/kemokrw
|
bfe2a82e2ef5d3580ed5dfe65129b30bd3fc4971
|
[
"MIT"
] | null | null | null |
src/kemokrw/client.py
|
Kemok-Repos/kemokrw
|
bfe2a82e2ef5d3580ed5dfe65129b30bd3fc4971
|
[
"MIT"
] | null | null | null |
src/kemokrw/client.py
|
Kemok-Repos/kemokrw
|
bfe2a82e2ef5d3580ed5dfe65129b30bd3fc4971
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class ApiClient(ABC):
"""Clase Extract
Encapsula el manejo de una API para simplificar los procesos de autenticación y de llamadas a los endpoints.
Métodos
-------
get():
Realiza una solicitud a un endpoint utilizando el verbo GET.
"""
@abstractmethod
def get(self):
pass
def post(self):
pass
def put(self):
pass
def delete(self):
pass
| 18.48
| 112
| 0.612554
| 57
| 462
| 4.964912
| 0.649123
| 0.113074
| 0.116608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.311688
| 462
| 24
| 113
| 19.25
| 0.889937
| 0.458874
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.363636
| false
| 0.363636
| 0.090909
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
229297867251239fe769ebd85fa258512c95d035
| 30,702
|
py
|
Python
|
bayes_opt/functions.py
|
ntienvu/KnowingOptimumValue_BO
|
42225cb9d61c1225bd757fe9dd02834a0bc7a3e6
|
[
"MIT"
] | 14
|
2020-06-30T00:36:14.000Z
|
2022-01-11T13:15:53.000Z
|
bayes_opt/functions.py
|
ntienvu/KnowingOptimumValue_BO
|
42225cb9d61c1225bd757fe9dd02834a0bc7a3e6
|
[
"MIT"
] | null | null | null |
bayes_opt/functions.py
|
ntienvu/KnowingOptimumValue_BO
|
42225cb9d61c1225bd757fe9dd02834a0bc7a3e6
|
[
"MIT"
] | 2
|
2020-10-17T15:27:06.000Z
|
2021-02-27T10:34:04.000Z
|
# -*- coding: utf-8 -*-
import numpy as np
from collections import OrderedDict
from scipy.stats import multivariate_normal
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def reshape(x,input_dim):
'''
Reshapes x into a matrix with input_dim columns
'''
x = np.array(x)
if x.size ==input_dim:
x = x.reshape((1,input_dim))
return x
class functions:
def plot(self):
bounds=self.bounds
if isinstance(bounds,dict):
# Get the name of the parameters
keys = bounds.keys()
arr_bounds = []
for key in keys:
arr_bounds.append(bounds[key])
arr_bounds = np.asarray(arr_bounds)
else:
arr_bounds=np.asarray(bounds)
X=np.array([np.arange(x[0], x[1], 0.01) for x in arr_bounds])
X=X.reshape(-1,2)
X1=np.array([X[:,0]])
X2=np.array([X[:,1]])
X1, X2 = np.meshgrid(X1, X2)
y=np.zeros([X1.shape[1],X2.shape[1]])
#print(y.shape)
#print(X1.shape)
#print(X2.shape)
for ii in range(0,X1.shape[1]):
for jj in range(0,X2.shape[1]):
Xij=np.array([X1[ii,ii],X2[jj,jj]])
#print(Xij)
y[ii,jj]=self.func(Xij)
# f1=plt.figure(1)
# ax=plt.axes(projection='3d')
# ax.plot_surface(X1,X2,y)
plt.contourf(X1,X2,y,levels=np.arange(0,35,1))
plt.colorbar()
def findSdev(self):
num_points_per_dim=100
bounds=self.bounds
if isinstance(bounds,dict):
# Get the name of the parameters
keys = bounds.keys()
arr_bounds = []
for key in keys:
arr_bounds.append(bounds[key])
arr_bounds = np.asarray(arr_bounds)
else:
arr_bounds=np.asarray(bounds)
X=np.array([np.random.uniform(x[0], x[1], size=num_points_per_dim) for x in arr_bounds])
X=X.reshape(num_points_per_dim,-1)
y=self.func(X)
sdv=np.std(y)
#maxima=np.max(y)
#minima=np.min(y)
return sdv
class saddlepoint(functions):
def __init__(self):
self.input_dim=2
self.bounds=OrderedDict({'x1':(-1,1),'x2':(-1,1)})
self.fstar=0
self.min=0
self.ismax=1
self.name='saddlepoint'
def func(self,X):
X = reshape(X,self.input_dim)
n = X.shape[0]
fval=X[:,0]*X[:,0]-X[:,1]*X[:,1]
return fval*self.ismax
class sin(functions):
def __init__(self,sd=None):
self.input_dim=1
self.bounds={'x':(-1,15)}
#self.bounds={'x':(0,1)}
self.fstar=11
self.min=0
self.ismax=1
self.name='sin'
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
def func(self,x):
x=np.asarray(x)
fval=np.sin(x)
return fval*self.ismax
class sincos(functions):
def __init__(self,sd=None):
self.input_dim=1
self.bounds={'x':(-1,2)}
#self.bounds={'x':(0,1)}
self.fstar=11
self.min=0
self.ismax=1
self.name='sincos'
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
def func(self,x):
x=np.asarray(x)
fval=x*np.sin(x)+x*np.cos(2*x)
return fval*self.ismax
class fourier(functions):
'''
Forrester function.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,sd=None):
self.bounds = {'x':(0,10)}
self.sd=0
self.input_dim = 1
self.ismax=-1
self.min = 4.795 ## approx
self.fstar = -9.5083483926941064*self.ismax ## approx
self.name='fourier'
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
def func(self,X):
X=np.asarray(X)
X = X.reshape((len(X),1))
n = X.shape[0]
fval = X*np.sin(X)+X*np.cos(2*X)
if self.sd ==0:
noise = np.zeros(n).reshape(n,1)
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return self.ismax*fval.reshape(n,1) + noise
class branin(functions):
def __init__(self,sd=None):
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
self.input_dim=2
#if sd==None: self.sd = 0
#else: self.sd=sd
self.bounds=OrderedDict([('x1',(-5,10)),('x2',(0,15))])
#self.bounds=OrderedDict([('x1',(-20,70)),('x2',(-50,50))])
self.ismax=-1
self.fstar=0.397887*self.ismax
self.min=[9.424,2.475]
self.name='branin'
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
n=X.shape[0]
if len(X.shape)==1:
x1=X[0]
x2=X[1]
else:
x1=X[:,0]
x2=X[:,1]
a=1
b=5.1/(4*np.pi**2)
c=5/np.pi
r=6
s=10
t=1/(8*np.pi)
fx=a*(x2-b*x1*x1+c*x1-r)**2+s*(1-t)*np.cos(x1)+s
if self.sd==0:
return fx*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fx*self.ismax+np.ravel(noise)
class forrester(functions):
'''
Forrester function.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self, sd=None):
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
self.ismax=-1
self.input_dim = 1
self.min = 0.78 ## approx
self.fstar = -6.03*self.ismax ## approx
self.bounds = {'x':(0,1)}
self.name='forrester'
#self.sd=0
def func(self,X):
X=np.asarray(X)
X = X.reshape((len(X),1))
n = X.shape[0]
fval = ((6*X -2)**2)*np.sin(12*X-4)
if self.sd!=0:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fval*self.ismax+np.ravel(noise)
else:
return fval*self.ismax
class rosenbrock(functions):
'''
rosenbrock function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=0):
if sd==0:
self.sd=0
else:
self.sd=self.findSdev()
self.input_dim = 2
if bounds == None: self.bounds = OrderedDict([('x1',(-2.048,2.048)),('x2',(-2.048,2.048))])
else: self.bounds = bounds
self.min = [(0, 0)]
self.ismax=-1
self.fstar = 0
self.name = 'Rosenbrock'
#self.sd=self.findSdev()
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
n=X.shape[0]
n=1
if len(X.shape)==1:# one observation
x1=X[0]
x2=X[1]
else:# multiple observations
x1=X[:,0]
x2=X[:,1]
n=X.shape[0]
fx = 100*(x2-x1**2)**2 + (x1-1)**2
if self.sd==0:
return fx*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fx*self.ismax+np.ravel(noise)
class beale(functions):
'''
beale function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=None):
if sd==None:
self.sd=0
else:
self.sd=self.findSdev()
self.input_dim = 2
if bounds == None: self.bounds = OrderedDict({'x1':(-1,1),'x2':(-1,1)})
else: self.bounds = bounds
self.min = [(3, 0.5)]
self.ismax=-1
self.fstar = 0
self.name = 'Beale'
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
if len(X.shape)==1:
x1=X[0]
x2=X[1]
else:
x1=X[:,0]
x2=X[:,1]
fval = (1.5-x1+x1*x2)**2+(2.25-x1+x1*x2**2)**2+(2.625-x1+x1*x2**3)**2
n=X.shape[0]
if self.sd==0:
return fval*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fval*self.ismax+np.ravel(noise)
class dropwave(functions):
'''
dropwave function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=None):
if sd==None:
self.sd=0
else:
self.sd=self.findSdev()
self.input_dim = 2
if bounds == None: self.bounds = OrderedDict([('x1',(-5.12,5.12)),('x2',(-5.12,5.12))])
else: self.bounds = bounds
self.min = [(0, 0)]
self.ismax=-1
self.fstar = -1*self.ismax
self.name = 'dropwave'
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
n=1
if len(X.shape)==1:
x1=X[0]
x2=X[1]
else:
x1=X[:,0]
x2=X[:,1]
fval = - (1+np.cos(12*np.sqrt(x1**2+x2**2))) / (0.5*(x1**2+x2**2)+2)
n=X.shape[0]
if self.sd==0:
return fval*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fval*self.ismax+np.ravel(noise)
class cosines(functions):
'''
Cosines function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=None):
if sd==None or sd==0:
self.sd=0
else:
self.sd=self.findSdev()
self.input_dim = 2
if bounds == None: self.bounds = OrderedDict([('x1',(0,1)),('x2',(0,1))])
else: self.bounds = bounds
self.min = [(0.31426205, 0.30249864)]
self.ismax=1
self.fstar = -1.59622468*self.ismax
self.name = 'Cosines'
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
if len(X.shape)==1:
x1=X[0]
x2=X[1]
else:
x1=X[:,0]
x2=X[:,1]
#X = reshape(X,self.input_dim)
#n = X.shape[0]
u = 1.6*x1-0.5
v = 1.6*x2-0.5
fval = 1-(u**2 + v**2 - 0.3*np.cos(3*np.pi*u) - 0.3*np.cos(3*np.pi*v) )
return self.ismax*fval
class goldstein(functions):
'''
Goldstein function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=None):
if sd==None or sd==0:
self.sd=0
else:
self.sd=self.findSdev()
self.input_dim = 2
if bounds == None: self.bounds = {'x1':(-2,2),'x2':(-2,2)}
else: self.bounds = bounds
self.ismax=-1
self.min = [(0,-1)]
self.fstar = 3*self.ismax
self.name = 'Goldstein'
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
if len(X.shape)==1:
x1=X[0]
x2=X[1]
else:
x1=X[:,0]
x2=X[:,1]
fact1a = (x1 + x2 + 1)**2
fact1b = 19 - 14*x1 + 3*x1**2 - 14*x2 + 6*x1*x2 + 3*x2**2
fact1 = 1 + fact1a*fact1b
fact2a = (2*x1 - 3*x2)**2
fact2b = 18 - 32*x1 + 12*x1**2 + 48*x2 - 36*x1*x2 + 27*x2**2
fact2 = 30 + fact2a*fact2b
fval = fact1*fact2
n=X.shape[0]
if self.sd==0:
return fval*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fval*self.ismax+np.ravel(noise)
class sixhumpcamel(functions):
'''
Six hump camel function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=0):
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
self.input_dim = 2
if bounds == None: self.bounds = OrderedDict([('x1',(-3,3)),('x2',(-2,2))])
else: self.bounds = bounds
self.min = [(0.0898,-0.7126),(-0.0898,0.7126)]
self.ismax=-1
self.fstar = -1.0316*self.ismax
self.name = 'Six-hump camel'
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
n=1
if len(X.shape)==1:
x1=X[0]
x2=X[1]
else:
x1=X[:,0]
x2=X[:,1]
term1 = (4-2.1*x1**2+(x1**4)/3) * x1**2
term2 = x1*x2
term3 = (-4+4*x2**2) * x2**2
fval = term1 + term2 + term3
n=X.shape[0]
if self.sd==0:
return fval*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fval*self.ismax+np.ravel(noise)
class mccormick(functions):
'''
Mccormick function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=0):
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
self.input_dim = 2
if bounds == None: self.bounds = [(-1.5,4),(-3,4)]
else: self.bounds = bounds
self.min = [(-0.54719,-1.54719)]
self.ismax=-1
self.fstar = -1.9133*self.ismax
self.name = 'Mccormick'
def func(self,X):
X = reshape(X,self.input_dim)
x1=X[:,0]
x2=X[:,1]
term1 = np.sin(x1 + x2)
term2 = (x1 - x2)**2
term3 = -1.5*x1
term4 = 2.5*x2
fval = term1 + term2 + term3 + term4 + 1
n=X.shape[0]
if self.sd==0:
return fval*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fval*self.ismax+np.ravel(noise)
class powers(functions):
'''
Powers function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,bounds=None,sd=0):
if sd==None or sd==0:
self.sd=0
else:
self.sd=self.findSdev()
self.input_dim = 2
if bounds == None: self.bounds = [(-1,1),(-1,1)]
else: self.bounds = bounds
self.min = [(0,0)]
self.fstar = 0
#if sd==None: self.sd = 0
#else: self.sd=sd
self.name = 'Sum of Powers'
def func(self,x):
x = reshape(x,self.input_dim)
n = x.shape[0]
if x.shape[1] != self.input_dim:
return 'wrong input dimension'
else:
x1 = x[:,0]
x2 = x[:,1]
fval = abs(x1)**2 + abs(x2)**3
if self.sd ==0:
noise = np.zeros(n).reshape(n,1)
else:
noise = np.random.normal(0,self.sd,n).reshape(n,1)
return fval.reshape(n,1) + noise
class eggholder(functions):
def __init__(self,bounds=None,sd=0):
if sd==None or sd==0:
self.sd=0
else:
#self.sd=self.findSdev()
self.sd=sd
self.input_dim = 2
#self.bounds = {'x1':(-512,512),'x2':(-512,512)}
self.bounds = [(-512,512),(-512,512)]
self.min = [(512,404.2319)]
self.ismax=-1
self.fstar = -959.6407*self.ismax
self.name = 'Egg-holder'
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
if len(X.shape)==1:
x1=X[0]
x2=X[1]
else:
x1=X[:,0]
x2=X[:,1]
fval = -(x2+47) * np.sin(np.sqrt(abs(x2+x1/2+47))) + -x1 * np.sin(np.sqrt(abs(x1-(x2+47))))
n=X.shape[0]
if self.sd==0:
return fval*self.ismax
else:
noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
return fval*self.ismax+np.ravel(noise)
class alpine1(functions):
'''
Alpine1 function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,input_dim,bounds=None,sd=0):
if sd==None or sd==0:
self.sd=0
else:
self.sd=self.findSdev()
if bounds == None:
self.bounds = bounds =[(-10,10)]*input_dim
else:
self.bounds = bounds
self.min = [(0)]*input_dim
self.input_dim = input_dim
self.ismax=-1
self.fstar = -46*self.ismax
self.name='alpine1'
def func(self,X):
X = reshape(X,self.input_dim)
#n = X.shape[0]
temp=(X*np.sin(X) + 0.1*X)
if len(temp.shape)<=1:
fval=np.sum(temp)
else:
fval = np.sum(temp,axis=1)
n=X.shape[0]
if self.sd ==0:
noise = np.zeros(n).reshape(n,1)
else:
noise = np.random.normal(0,self.sd,n).reshape(n,1)
return self.ismax*fval.reshape(n,1) + noise
class alpine2(functions):
'''
Alpine2 function
:param bounds: the box constraints to define the domain in which the function is optimized.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,input_dim,bounds=None,sd=0):
if sd==None or sd==0:
self.sd=0
else:
self.sd=self.findSdev()
if bounds == None:
self.bounds = bounds =[(1,10)]*input_dim
else:
self.bounds = bounds
self.min = [(7.917)]*input_dim
self.ismax=-1
self.fstar = self.ismax*(-2.808**input_dim)
self.input_dim = input_dim
self.name='Alpine2'
def internal_func(self,X):
fval = np.cumprod(np.sqrt(X))[self.input_dim-1]*np.cumprod(np.sin(X))[self.input_dim-1]
#fval = np.cumprod(np.sqrt(X)*np.sin(X))
return fval
def func(self,X):
X=np.asarray(X)
X = reshape(X,self.input_dim)
#n = X.shape[0]
#fval = np.cumprod(np.sqrt(X),axis=1)[:,self.input_dim-1]*np.cumprod(np.sin(X),axis=1)[:,self.input_dim-1]
#fval = np.cumprod(np.sqrt(X))[:,self.input_dim-1]*np.cumprod(np.sin(X))[:,self.input_dim-1]
fval=[self.ismax*self.internal_func(val) for idx, val in enumerate(X)]
fval=np.asarray(fval)
#noise = np.random.normal(0,0.1*self.sd,n).reshape(n,1)
n=X.shape[0]
if self.sd ==0:
noise = np.zeros(n).reshape(n,1)
else:
noise = np.random.normal(0,self.sd,n).reshape(n,1)
return self.ismax*fval.reshape(n,1) + noise
class gSobol(functions):
'''
gSolbol function
:param a: one-dimensional array containing the coefficients of the function.
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self,a,bounds=None,sd=None):
self.a = a
self.input_dim = len(self.a)
if bounds == None:
self.bounds =[(-4,6)]*self.input_dim
else:
self.bounds = bounds
if not (self.a>0).all(): return 'Wrong vector of coefficients, they all should be positive'
self.S_coef = (1/(3*((1+self.a)**2))) / (np.prod(1+1/(3*((1+self.a)**2)))-1)
if sd==None: self.sd = 0
else: self.sd=sd
self.ismax=-1
self.fstar=0# in correct
#self.fstar=20*self.ismax# in correct
self.name='gSobol'
def func(self,X):
X = reshape(X,self.input_dim)
n = X.shape[0]
aux = (abs(4*X-2)+np.ones(n).reshape(n,1)*self.a)/(1+np.ones(n).reshape(n,1)*self.a)
fval = np.cumprod(aux,axis=1)[:,self.input_dim-1]
n=X.shape[0]
if self.sd ==0:
noise = np.zeros(n).reshape(n,1)
else:
noise = np.random.normal(0,self.sd,n).reshape(n,1)
return self.ismax*fval.reshape(n,1) + noise
#####
class ackley(functions):
'''
Ackley function
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self, input_dim, bounds=None,sd=None):
self.input_dim = input_dim
if sd==None or sd==0:
self.sd=0
else:
self.sd=sd
#self.sd=self.findSdev()
if bounds == None:
self.bounds =[(-32.768,32.768)]*self.input_dim
else:
self.bounds = bounds
self.min = [(0.)*self.input_dim]
self.fstar = 0
self.ismax=-1
self.name='ackley'
def func(self,X):
X = reshape(X,self.input_dim)
#print X
#n = X.shape[0]
fval = (20+np.exp(1)-20*np.exp(-0.2*np.sqrt((X**2).sum(1)/self.input_dim))-np.exp(np.cos(2*np.pi*X).sum(1)/self.input_dim))
n=X.shape[0]
if self.sd ==0:
noise = np.zeros(n).reshape(n,1)
else:
noise = np.random.normal(0,self.sd,n).reshape(n,1)
return self.ismax*fval.reshape(n,1) + noise
#####
class hartman_6d:
'''
Ackley function
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self, bounds=None,sd=None):
if sd==None or sd==0:
self.sd=0
else:
self.sd=sd
#self.sd=self.findSdev()
self.input_dim = 6
if bounds == None:
self.bounds =[(0,1)]*self.input_dim
else:
self.bounds = bounds
self.min = [(0.)*self.input_dim]
self.ismax=-1
#self.fstar = -3.32237*self.ismax
self.fstar = -3.05*self.ismax
self.name='hartman_6d'
def func(self,X):
X = reshape(X,self.input_dim)
n = X.shape[0]
alpha = [1.0, 1.2, 3.0, 3.2];
A = [[10, 3, 17, 3.5, 1.7, 8],
[0.05, 10, 17, 0.1, 8, 14],
[3, 3.5, 1.7, 10, 17, 8],
[17, 8, 0.05, 10, 0.1, 14]]
A=np.asarray(A)
P = [[1312, 1696, 5569, 124, 8283, 5886],
[2329, 4135, 8307, 3736, 1004, 9991],
[2348, 1451, 3522, 2883, 3047, 6650],
[4047, 8828, 8732, 5743, 1091, 381]]
P=np.asarray(P)
c=10**(-4)
P=np.multiply(P,c)
outer = 0;
fval =np.zeros((n,1))
for idx in range(n):
outer = 0;
for ii in range(4):
inner = 0;
for jj in range(6):
xj = X[idx,jj]
Aij = A[ii, jj]
Pij = P[ii, jj]
inner = inner + Aij*(xj-Pij)**2
new = alpha[ii] * np.exp(-inner)
outer = outer + new
fval[idx] = -(2.58 + outer) / 1.94;
noise = np.random.normal(0,self.sd,n).reshape(n,1)
if n==1:
return self.ismax*(fval[0][0])+noise
else:
return self.ismax*(fval)+noise
"""
class hartman_4d:
'''
Ackley function
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self, bounds=None,sd=None):
self.input_dim = 4
if bounds == None:
self.bounds =[(0,1)]*self.input_dim
else:
self.bounds = bounds
self.min = [(0.)*self.input_dim]
self.fstar = -3.32237
self.ismax=-1
self.name='hartman_4d'
def func(self,X):
X = reshape(X,self.input_dim)
n = X.shape[0]
alpha = [1.0, 1.2, 3.0, 3.2];
A = [[10, 3, 17, 3.5, 1.7, 8],
[0.05, 10, 17, 0.1, 8, 14],
[3, 3.5, 1.7, 10, 17, 8],
[17, 8, 0.05, 10, 0.1, 14]]
A=np.asarray(A)
P = [[1312, 1696, 5569, 124, 8283, 5886],
[2329, 4135, 8307, 3736, 1004, 9991],
[2348, 1451, 3522, 2883, 3047, 6650],
[4047, 8828, 8732, 5743, 1091, 381]]
P=np.asarray(P)
c=10**(-4)
P=np.multiply(P,c)
outer = 0;
fval =np.zeros((n,1))
for idx in range(n):
X_idx=X[idx,:]
outer = 0;
for ii in range(4):
inner = 0;
for jj in range(4):
xj = X_idx[jj]
Aij = A[ii, jj]
Pij = P[ii, jj]
inner = inner + Aij*(xj-Pij)**2
new = alpha[ii] * np.exp(-inner)
outer = outer + new
fval[idx] = (1.1 - outer) / 0.839;
if n==1:
return self.ismax*(fval[0][0])
else:
return self.ismax*(fval)
"""
class hartman_3d(functions):
'''
hartman_3d: function
:param sd: standard deviation, to generate noisy evaluations of the function.
'''
def __init__(self, bounds=None,sd=None):
if sd==None or sd==0:
self.sd=0
else:
self.sd=sd
#self.sd=self.findSdev()
self.input_dim = 3
if bounds == None:
self.bounds =[(0,1)]*self.input_dim
else:
self.bounds = bounds
self.min = [0.114614,0.555649,0.852547]
self.ismax=-1
self.fstar = -3.86278*self.ismax
#self.fstar = -3.7078*self.ismax
self.name='hartman_3d'
def func(self,X):
X = reshape(X,self.input_dim)
n = X.shape[0]
alpha = [1.0, 1.2, 3.0, 3.2];
A = [[3.0, 10, 30],
[0.1, 10, 35],
[3.0, 10, 30],
[0.1, 10, 35]]
A=np.asarray(A)
P = [[3689, 1170, 2673],
[4699, 4387, 7470],
[1091, 8732, 5547],
[381, 5743, 8828]]
P=np.asarray(P)
c=10**(-4)
P=np.multiply(P,c)
outer = 0;
fval =np.zeros((n,1))
for idx in range(n):
outer = 0;
for ii in range(4):
inner = 0;
for jj in range(3):
xj = X[idx,jj]
Aij = A[ii, jj]
Pij = P[ii, jj]
inner = inner + Aij*(xj-Pij)**2
new = alpha[ii] * np.exp(-inner)
outer = outer + new
fval[idx] = -outer;
noise = np.random.normal(0,self.sd,n).reshape(n,1)
#noise=0
if n==1:
return self.ismax*(fval[0][0])+noise
else:
return self.ismax*(fval)+noise
class mixture(functions):
'''
a scalable gaussian mixture function
:param sd: standard deviation to generate noisy exaluations of the functions
:param peaks: number of gaussian peaks used
'''
def __init__(self,bounds=None, peaks=3):
self.input_dim=2
self.peaks=peaks
self.sd=0
if bounds == None:
self.bounds =[(0,1)]*self.input_dim
else:
self.bounds = bounds
self.min = [(0.)*self.input_dim]
self.fstar=-1
self.ismax=-1
self.name="mixture"
self.sd=self.findSdev()
def func(self,X):
X = reshape(X,self.input_dim)
n = X.shape[0]
y=2*multivariate_normal.pdf(X,mean=[0.5,0.5],cov=0.07*np.eye(2))
if self.peaks>=2:
y+=1.8*multivariate_normal.pdf(X,mean=[0.2,0.2],cov=0.03*np.eye(2))
if self.peaks>=3:
y+=1.7*multivariate_normal.pdf(X,mean=[0.7,0.7],cov=0.07*np.eye(2))
if self.peaks>=4:
y+=1*multivariate_normal.pdf(X,mean=[0.8,0.5],cov=0.02*np.eye(2))
if self.peaks>=5:
y+=1.7*multivariate_normal.pdf(X,mean=[0.4,0.6],cov=0.005*np.eye(2))
if self.peaks>=6:
y+=1.75*multivariate_normal.pdf(X,mean=[0.3,0.4],cov=0.0012*np.eye(2))
if self.peaks>=7:
y+=1.75*multivariate_normal.pdf(X,mean=[0.9,0.8],cov=0.01*np.eye(2))
if self.peaks>=8:
y+=1.75*multivariate_normal.pdf(X,mean=[0.2,0.6],cov=0.01*np.eye(2))
if self.peaks>=9:
y+=1.75*multivariate_normal.pdf(X,mean=[0.9,0.3],cov=0.01*np.eye(2))
return y
class gaussian(functions):
'''
a scalable gaussian mixture function
:param sd: standard deviation to generate noisy exaluations of the functions
:param peaks: number of gaussian peaks used
'''
def __init__(self,bounds=None, dim=3):
self.input_dim=dim
self.sd=0
if bounds == None:
self.bounds =[(0,1)]*self.input_dim
else:
self.bounds = bounds
self.min = [(0.)*self.input_dim]
self.fstar=-1
self.ismax=-1
self.name="gaussian"
self.sd=self.findSdev()
def func(self,X):
X = reshape(X,self.input_dim)
n = X.shape[0]
noise = np.random.normal(0,self.sd,n).reshape(n,1)
y=multivariate_normal.pdf(X,mean=0.5*np.ones(self.input_dim),cov=np.eye(self.input_dim))
return y
| 28.506964
| 131
| 0.495245
| 4,419
| 30,702
| 3.389681
| 0.072415
| 0.040457
| 0.057681
| 0.01442
| 0.791441
| 0.75953
| 0.72882
| 0.708525
| 0.68062
| 0.650511
| 0
| 0.076764
| 0.353788
| 30,702
| 1,077
| 132
| 28.506964
| 0.678226
| 0.138949
| 0
| 0.668571
| 0
| 0
| 0.012481
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.007143
| 0
| 0.167143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
22aaa82a2864215312f103824e22eea0738b7ea9
| 208
|
py
|
Python
|
args_kwargs.py
|
jjberg83/python_eksperimenter
|
ea26a6bd4a0cf71e69cbf5015a06db30de811b45
|
[
"MIT"
] | null | null | null |
args_kwargs.py
|
jjberg83/python_eksperimenter
|
ea26a6bd4a0cf71e69cbf5015a06db30de811b45
|
[
"MIT"
] | null | null | null |
args_kwargs.py
|
jjberg83/python_eksperimenter
|
ea26a6bd4a0cf71e69cbf5015a06db30de811b45
|
[
"MIT"
] | null | null | null |
"""
https://edabit.com/challenge/ogjDWJAT2kTXEzkD5
https://www.programiz.com/python-programming/args-and-kwargs#:~:text=Python%20has%20*args%20which%20allow,to%20pass%20variable%20length%20arguments.
"""
| 29.714286
| 149
| 0.778846
| 26
| 208
| 6.230769
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09
| 0.038462
| 208
| 6
| 150
| 34.666667
| 0.72
| 0.942308
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
22c01daa2fa85b8fe837a2ec1d3ac93b1b19ddaf
| 143
|
py
|
Python
|
src/pcc_data_exchange/plugins/sinopia/__init__.py
|
LD4P/pcc-data-exchange
|
bb0de1c85928e582ab51f469740b06cc0870d413
|
[
"Apache-2.0"
] | null | null | null |
src/pcc_data_exchange/plugins/sinopia/__init__.py
|
LD4P/pcc-data-exchange
|
bb0de1c85928e582ab51f469740b06cc0870d413
|
[
"Apache-2.0"
] | null | null | null |
src/pcc_data_exchange/plugins/sinopia/__init__.py
|
LD4P/pcc-data-exchange
|
bb0de1c85928e582ab51f469740b06cc0870d413
|
[
"Apache-2.0"
] | null | null | null |
from airflow.plugins_manager import AirflowPlugin
from flask import Blueprint
class SinopiaPlugin(AirflowPlugin):
name = "sinopia_plugin"
| 23.833333
| 49
| 0.825175
| 16
| 143
| 7.25
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125874
| 143
| 6
| 50
| 23.833333
| 0.928
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
22c72b9f198ab9f1bc3185a4d6e873e252d1aef5
| 195
|
py
|
Python
|
cegs_portal/search/views/index.py
|
ReddyLab/cegs-portal
|
a83703a3557167be328c24bfb866b6aa019ba059
|
[
"MIT"
] | null | null | null |
cegs_portal/search/views/index.py
|
ReddyLab/cegs-portal
|
a83703a3557167be328c24bfb866b6aa019ba059
|
[
"MIT"
] | null | null | null |
cegs_portal/search/views/index.py
|
ReddyLab/cegs-portal
|
a83703a3557167be328c24bfb866b6aa019ba059
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from cegs_portal.search.forms import SearchForm
def index(request):
form = SearchForm()
return render(request, "search/index.html", {"form": form})
| 21.666667
| 63
| 0.738462
| 25
| 195
| 5.72
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148718
| 195
| 8
| 64
| 24.375
| 0.861446
| 0
| 0
| 0
| 0
| 0
| 0.107692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
a3a6a16c4407acc39270d40d3c50a5347af46f25
| 7,323
|
py
|
Python
|
sdl2/test/sdlgfx_test.py
|
papagiannakis/py-sdl2
|
c8ff267761ce19d7714e72a4a3eb97a375c06fc6
|
[
"CC0-1.0"
] | 222
|
2017-08-19T00:51:59.000Z
|
2022-02-05T19:39:33.000Z
|
sdl2/test/sdlgfx_test.py
|
Sahil-pixel/py-sdl2
|
e5c8cbaccfda4f20f35f58bc8d00e0f533b30c3b
|
[
"CC0-1.0"
] | 103
|
2017-08-20T17:13:05.000Z
|
2022-02-05T20:20:01.000Z
|
sdl2/test/sdlgfx_test.py
|
Sahil-pixel/py-sdl2
|
e5c8cbaccfda4f20f35f58bc8d00e0f533b30c3b
|
[
"CC0-1.0"
] | 54
|
2017-08-20T17:13:00.000Z
|
2022-01-14T23:51:13.000Z
|
import os
import sys
import pytest
from sdl2 import SDL_Init, SDL_Quit, SDL_INIT_VIDEO
from sdl2 import surface
sdlgfx = pytest.importorskip("sdl2.sdlgfx")
class TestSDLGFX(object):
__tags__ = ["sdl", "sdlgfx"]
@classmethod
def setup_class(cls):
if SDL_Init(SDL_INIT_VIDEO) != 0:
raise pytest.skip('Video subsystem not supported')
@classmethod
def teardown_class(cls):
SDL_Quit()
@pytest.mark.skip("not implemented")
def test_FPSManager(self):
pass
@pytest.mark.skip("not implemented")
def test_SDL_initFramerate(self):
pass
@pytest.mark.skip("not implemented")
def test_SDL_getFramerate(self):
pass
@pytest.mark.skip("not implemented")
def test_SDL_setFramerate(self):
pass
@pytest.mark.skip("not implemented")
def test_SDL_getFramecount(self):
pass
@pytest.mark.skip("not implemented")
def test_SDL_framerateDelay(self):
pass
@pytest.mark.skip("not implemented")
def test_pixelColor(self):
pass
@pytest.mark.skip("not implemented")
def test_pixelRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_hlineColor(self):
pass
@pytest.mark.skip("not implemented")
def test_hlineRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_vlineColor(self):
pass
@pytest.mark.skip("not implemented")
def test_vlineRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_rectangleColor(self):
pass
@pytest.mark.skip("not implemented")
def test_rectangleRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_roundedRectangleColor(self):
pass
@pytest.mark.skip("not implemented")
def test_roundedRectangleRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_boxColor(self):
pass
@pytest.mark.skip("not implemented")
def test_boxRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_roundedBoxColor(self):
pass
@pytest.mark.skip("not implemented")
def test_roundedBoxRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_lineColor(self):
pass
@pytest.mark.skip("not implemented")
def test_lineRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_aalineColor(self):
pass
@pytest.mark.skip("not implemented")
def test_aalineRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_thickLineColor(self):
pass
@pytest.mark.skip("not implemented")
def test_thickLineRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_circleColor(self):
pass
@pytest.mark.skip("not implemented")
def test_circleRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_arcColor(self):
pass
@pytest.mark.skip("not implemented")
def test_arcRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_aacircleColor(self):
pass
@pytest.mark.skip("not implemented")
def test_aacircleRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_filledCircleColor(self):
pass
@pytest.mark.skip("not implemented")
def test_filledCircleRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_ellipseColor(self):
pass
@pytest.mark.skip("not implemented")
def test_ellipseRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_aaellipseColor(self):
pass
@pytest.mark.skip("not implemented")
def test_aaellipseRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_filledEllipseColor(self):
pass
@pytest.mark.skip("not implemented")
def test_filledEllipseRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_pieColor(self):
pass
@pytest.mark.skip("not implemented")
def test_pieRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_filledPieColor(self):
pass
@pytest.mark.skip("not implemented")
def test_filledPieRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_trigonColor(self):
pass
@pytest.mark.skip("not implemented")
def test_trigonRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_aatrigonColor(self):
pass
@pytest.mark.skip("not implemented")
def test_aatrigonRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_filledTrigonColor(self):
pass
@pytest.mark.skip("not implemented")
def test_filledTrigonRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_polygonColor(self):
pass
@pytest.mark.skip("not implemented")
def test_polygonRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_aapolygonColor(self):
pass
@pytest.mark.skip("not implemented")
def test_aapolygonRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_filledPolygonColor(self):
pass
@pytest.mark.skip("not implemented")
def test_filledPolygonRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_texturedPolygon(self):
pass
@pytest.mark.skip("not implemented")
def test_bezierColor(self):
pass
@pytest.mark.skip("not implemented")
def test_bezierRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_gfxPrimitivesSetFont(self):
pass
@pytest.mark.skip("not implemented")
def test_gfxPrimitivesSetFontRotation(self):
pass
@pytest.mark.skip("not implemented")
def test_characterColor(self):
pass
@pytest.mark.skip("not implemented")
def test_characterRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_stringColor(self):
pass
@pytest.mark.skip("not implemented")
def test_stringRGBA(self):
pass
@pytest.mark.skip("not implemented")
def test_rotozoomSurface(self):
pass
@pytest.mark.skip("not implemented")
def test_rotozoomSurfaceXY(self):
pass
@pytest.mark.skip("not implemented")
def test_rotozoomSurfaceSize(self):
pass
@pytest.mark.skip("not implemented")
def test_rotozoomSurfaceSizeXY(self):
pass
@pytest.mark.skip("not implemented")
def test_zoomSurface(self):
pass
@pytest.mark.skip("not implemented")
def test_zoomSurfaceSize(self):
pass
@pytest.mark.skip("not implemented")
def test_shrinkSurface(self):
pass
def test_rotateSurface90Degrees(self):
w, h = 470, 530
sf = surface.SDL_CreateRGBSurface(0, w, h, 32, 0, 0, 0, 0)
assert isinstance(sf.contents, surface.SDL_Surface)
rotsf = sdlgfx.rotateSurface90Degrees(sf, 1)
assert isinstance(rotsf.contents, surface.SDL_Surface)
assert rotsf.contents.w == h
assert rotsf.contents.h == w
surface.SDL_FreeSurface(rotsf)
surface.SDL_FreeSurface(sf)
| 22.742236
| 66
| 0.644954
| 839
| 7,323
| 5.513707
| 0.143027
| 0.110463
| 0.217899
| 0.264591
| 0.670774
| 0.670774
| 0.670774
| 0.663208
| 0.663208
| 0.049719
| 0
| 0.003979
| 0.244982
| 7,323
| 321
| 67
| 22.813084
| 0.8327
| 0
| 0
| 0.605809
| 0
| 0
| 0.154172
| 0
| 0
| 0
| 0
| 0
| 0.016598
| 1
| 0.311203
| false
| 0.298755
| 0.024896
| 0
| 0.344398
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
a3c60fff8cfc39e43dade01a5dc17b43609ec2d6
| 326
|
py
|
Python
|
leaderboard_entries/pytorch_likelihood.py
|
philomenec/reco-gym
|
f8553d197f42ec2f415aefce48525d0e9b10ddaa
|
[
"Apache-2.0"
] | 413
|
2018-09-18T17:49:44.000Z
|
2022-03-23T12:25:41.000Z
|
leaderboard_entries/pytorch_likelihood.py
|
aliang-rec/reco-gym
|
f8553d197f42ec2f415aefce48525d0e9b10ddaa
|
[
"Apache-2.0"
] | 15
|
2018-11-08T17:04:21.000Z
|
2021-11-30T19:20:27.000Z
|
leaderboard_entries/pytorch_likelihood.py
|
aliang-rec/reco-gym
|
f8553d197f42ec2f415aefce48525d0e9b10ddaa
|
[
"Apache-2.0"
] | 81
|
2018-09-22T02:28:55.000Z
|
2022-03-30T14:03:01.000Z
|
from recogym import build_agent_init
from recogym.agents import PyTorchMLRAgent, pytorch_mlr_args
pytorch_mlr_args['n_epochs'] = 30
pytorch_mlr_args['learning_rate'] = 0.01,
pytorch_mlr_args['ll_IPS'] = False,
pytorch_mlr_args['alpha'] = 1.0
agent = build_agent_init('PyTorchMLRAgent', PyTorchMLRAgent, {**pytorch_mlr_args})
| 36.222222
| 82
| 0.803681
| 48
| 326
| 5.0625
| 0.479167
| 0.246914
| 0.345679
| 0.238683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023411
| 0.082822
| 326
| 8
| 83
| 40.75
| 0.789298
| 0
| 0
| 0
| 0
| 0
| 0.144172
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a3c68b80a3ed2cfc8cf74f7679a0fb5d0fcf63c3
| 284
|
py
|
Python
|
movielist_app/modelsv1.py
|
rhedwan/BuildingDjangoAPI
|
09c7513ef43390435c7de78e8812083796b9a0fe
|
[
"MIT"
] | null | null | null |
movielist_app/modelsv1.py
|
rhedwan/BuildingDjangoAPI
|
09c7513ef43390435c7de78e8812083796b9a0fe
|
[
"MIT"
] | null | null | null |
movielist_app/modelsv1.py
|
rhedwan/BuildingDjangoAPI
|
09c7513ef43390435c7de78e8812083796b9a0fe
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Movie(models.Model):
name = models.CharField(max_length=50)
description = models.CharField(max_length=200)
active= models.BooleanField(default = True)
def __str__(self):
return self.name
| 23.666667
| 50
| 0.697183
| 36
| 284
| 5.333333
| 0.722222
| 0.15625
| 0.1875
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022321
| 0.211268
| 284
| 12
| 51
| 23.666667
| 0.834821
| 0.084507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0.142857
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
a3cc0b790418490fcaefb984a7158ac9ce6c761f
| 74
|
py
|
Python
|
websauna/depot/models.py
|
ooduor/websauna.depot
|
4992c28a8e35d4b22f7ff3a8b042fa74fca4ede4
|
[
"MIT"
] | null | null | null |
websauna/depot/models.py
|
ooduor/websauna.depot
|
4992c28a8e35d4b22f7ff3a8b042fa74fca4ede4
|
[
"MIT"
] | null | null | null |
websauna/depot/models.py
|
ooduor/websauna.depot
|
4992c28a8e35d4b22f7ff3a8b042fa74fca4ede4
|
[
"MIT"
] | null | null | null |
"""Websauna Depot models.
Place your SQLAlchemy models in this file.
"""
| 14.8
| 42
| 0.72973
| 10
| 74
| 5.4
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 74
| 4
| 43
| 18.5
| 0.870968
| 0.891892
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a3eec4439daefdc979cf5f3a43952191078a58e0
| 73
|
py
|
Python
|
test.py
|
188806879/TBD44
|
f30fdf95d09b788383fe41218b1ebf9237e6ed0f
|
[
"MIT"
] | null | null | null |
test.py
|
188806879/TBD44
|
f30fdf95d09b788383fe41218b1ebf9237e6ed0f
|
[
"MIT"
] | null | null | null |
test.py
|
188806879/TBD44
|
f30fdf95d09b788383fe41218b1ebf9237e6ed0f
|
[
"MIT"
] | null | null | null |
num = 10
print(num)
print("手下修")
print("我是经理谁谁敢动我的东西?")
ni shi sha bi
| 9.125
| 22
| 0.657534
| 12
| 73
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.178082
| 73
| 7
| 23
| 10.428571
| 0.766667
| 0
| 0
| 0
| 0
| 0
| 0.219178
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.6
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
a3ef74c5d4110df90cf0f4fc463e4fbe85c5f274
| 154
|
py
|
Python
|
myvenv/bin/django-admin.py
|
qq565999484/django_learn
|
520bc9ddfb6a4d78e85e0a29871838bb7fb66c80
|
[
"Apache-2.0"
] | null | null | null |
myvenv/bin/django-admin.py
|
qq565999484/django_learn
|
520bc9ddfb6a4d78e85e0a29871838bb7fb66c80
|
[
"Apache-2.0"
] | null | null | null |
myvenv/bin/django-admin.py
|
qq565999484/django_learn
|
520bc9ddfb6a4d78e85e0a29871838bb7fb66c80
|
[
"Apache-2.0"
] | null | null | null |
#!/Users/ios2/django_learn/myvenv/bin/python3.6
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| 25.666667
| 47
| 0.785714
| 21
| 154
| 5.190476
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021583
| 0.097403
| 154
| 5
| 48
| 30.8
| 0.76259
| 0.298701
| 0
| 0
| 0
| 0
| 0.074766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
43366a63035d965e61ae290aa23425ac6be0c20d
| 162
|
py
|
Python
|
vizdoomaze/envs/vizdoomazethree7.py
|
fanyuzeng/Vizdoomaze
|
5b444f2d861c908c4d96ae374bcce660d364f22e
|
[
"MIT"
] | 3
|
2020-09-25T16:00:49.000Z
|
2020-10-29T10:32:30.000Z
|
vizdoomaze/envs/vizdoomazethree7.py
|
fanyuzeng/Vizdoomaze
|
5b444f2d861c908c4d96ae374bcce660d364f22e
|
[
"MIT"
] | null | null | null |
vizdoomaze/envs/vizdoomazethree7.py
|
fanyuzeng/Vizdoomaze
|
5b444f2d861c908c4d96ae374bcce660d364f22e
|
[
"MIT"
] | 1
|
2021-12-17T07:50:47.000Z
|
2021-12-17T07:50:47.000Z
|
from vizdoomaze.envs.vizdoomenv import VizdoomEnv
class vizdoomazeThree7(VizdoomEnv):
def __init__(self):
super(vizdoomazeThree7, self).__init__(60)
| 27
| 50
| 0.771605
| 17
| 162
| 6.882353
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028777
| 0.141975
| 162
| 6
| 50
| 27
| 0.81295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
433d637616dae001912b5dba289fae6e35c542e6
| 191
|
py
|
Python
|
_service/utils/isValidUrl.py
|
Psyconical/msdocs-python-flask-webapp-quickstart
|
70d31722b9b3ccafc19d2e2089fa05d99fa2514b
|
[
"MIT"
] | null | null | null |
_service/utils/isValidUrl.py
|
Psyconical/msdocs-python-flask-webapp-quickstart
|
70d31722b9b3ccafc19d2e2089fa05d99fa2514b
|
[
"MIT"
] | null | null | null |
_service/utils/isValidUrl.py
|
Psyconical/msdocs-python-flask-webapp-quickstart
|
70d31722b9b3ccafc19d2e2089fa05d99fa2514b
|
[
"MIT"
] | null | null | null |
from urllib.parse import urlparse
def is_valid(url): # Function to check the provided link whether it works
parsed = urlparse(url)
return bool(parsed.netloc) and bool(parsed.scheme)
| 38.2
| 74
| 0.759162
| 29
| 191
| 4.965517
| 0.827586
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167539
| 191
| 5
| 75
| 38.2
| 0.90566
| 0.272251
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
433e24cde0a8783bbd3ad046738396dd5049fe1d
| 243
|
py
|
Python
|
foreignform/models.py
|
uditagarwal/django-foreignform
|
45916b7e2413b38e2d33ce263913a4d01e932ddf
|
[
"MIT"
] | 14
|
2018-06-27T04:44:14.000Z
|
2021-10-05T17:55:01.000Z
|
foreignform/models.py
|
uditagarwal/django-foreignform
|
45916b7e2413b38e2d33ce263913a4d01e932ddf
|
[
"MIT"
] | 15
|
2018-04-16T13:50:04.000Z
|
2021-10-05T13:37:07.000Z
|
foreignform/models.py
|
uditagarwal/django-foreignform
|
45916b7e2413b38e2d33ce263913a4d01e932ddf
|
[
"MIT"
] | 9
|
2018-07-02T09:40:49.000Z
|
2021-10-05T11:35:39.000Z
|
from django.db import models
from .fields import JSONField
class ForeignFormBaseModel(models.Model):
jsonSchema = JSONField(blank=True, null=True)
uiSchema = JSONField(blank=True, null=True)
class Meta:
abstract = True
| 20.25
| 49
| 0.72428
| 29
| 243
| 6.068966
| 0.586207
| 0.159091
| 0.204545
| 0.25
| 0.295455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193416
| 243
| 11
| 50
| 22.090909
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.