hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8ed07a3409d0e714bb4dfd56e354ff0f6396ac95 | 2,394 | py | Python | Important_data/Thesis figure scripts/arccos.py | haakonvt/LearningTensorFlow | 6988a15af2ac916ae1a5e23b2c5bde9630cc0519 | [
"MIT"
] | 5 | 2018-09-06T12:52:12.000Z | 2020-05-09T01:40:12.000Z | Important_data/Thesis figure scripts/arccos.py | haakonvt/LearningTensorFlow | 6988a15af2ac916ae1a5e23b2c5bde9630cc0519 | [
"MIT"
] | null | null | null | Important_data/Thesis figure scripts/arccos.py | haakonvt/LearningTensorFlow | 6988a15af2ac916ae1a5e23b2c5bde9630cc0519 | [
"MIT"
] | 4 | 2018-02-06T08:42:06.000Z | 2019-04-16T11:23:06.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from matplotlib import rc
rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
rc('text', usetex=True)
rc('lines', linewidth=2)
rc('font', family='serif')
rc('legend',**{'fontsize':14}) # Font size for legend
# rc('text.latex', unicode=True)
import os,sys
import matplotlib.pyplot as plt
import numpy as np
from math import pi
# General plot settings:
plot_resolution = 2373
x = np.linspace(0,1,plot_resolution)
N = 3 # Should be an even number because middle color is nearly invisible
colormap = plt.cm.Spectral #nipy_spectral # Other possible colormaps: Set1, Accent, nipy_spectral, Paired
colors = [colormap(i) for i in np.linspace(0, 1, N)]
plt.figure(figsize=(8,3))
# plt.subplots_adjust(left = 0.175, right = 0.85)
plt.plot(x, np.arccos(1-2*x), color=colors[0], label=r"$\arccos(1-2v)$")
plt.xlim([-0.05,1.05])
plt.ylim([-0.15,pi+0.15])
plt.xlabel(r"$v$", fontsize=16)
plt.ylabel(r"$\theta(v)$", fontsize=16)
plt.legend(loc="best", fancybox=True) # Show only labels for G4
plt.title(r"Mapping of $v$ to $\theta$", fontsize=18)
"""
###########################
END OF PLOT COMMANDS
###########################
"""
if len(sys.argv) > 1:
if sys.argv[1] == "-replace":
replace = True
autoSave = True
else:
replace = False
autoSave = False
maybe = ""
if not autoSave:
maybe = raw_input("\nEnter 'YES' to save figure as a copy: ")
if maybe == "YES" or autoSave:
filename = "arccos.pdf"
directory = "/Users/haakonvt/Dropbox/uio/master/latex-master/Illustrations/"
filenameWithPath = directory + filename
i = 1; file_test = filename
while True:
if file_test in os.listdir(directory):
file_test = filename[:-4] + str(i) + filename[-4:]
i += 1
continue
else:
newFilenameWithPath = directory + file_test
break
if replace:
plt.savefig(filenameWithPath, bbox_inches='tight')
print '\nFigure replaced previous with filename:\n"%s"\n' %filename
else:
plt.savefig(newFilenameWithPath, bbox_inches='tight')
if i != 1:
print '\nFigure saved as a copy with filename:\n"%s"\n' %file_test
else:
if replace:
print "Argument '-replace' has no purpose when fig not saved, just FYI."
plt.show()
| 31.090909 | 105 | 0.624478 | 336 | 2,394 | 4.395833 | 0.479167 | 0.027082 | 0.016249 | 0.016249 | 0.020311 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028526 | 0.209273 | 2,394 | 76 | 106 | 31.5 | 0.751717 | 0.129908 | 0 | 0.103448 | 0 | 0 | 0.21267 | 0.031171 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.103448 | null | null | 0.051724 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ed0f8f6ac849f4f689b939cad24cf9545498937 | 1,957 | py | Python | src/aws_environments/migrations/0019_environmentvariable.py | chiliseed/hub | 83f29fbdd12e2260397e18e635f508459fa4990e | [
"Apache-2.0"
] | null | null | null | src/aws_environments/migrations/0019_environmentvariable.py | chiliseed/hub | 83f29fbdd12e2260397e18e635f508459fa4990e | [
"Apache-2.0"
] | 4 | 2021-04-08T20:10:15.000Z | 2021-06-10T20:18:17.000Z | src/aws_environments/migrations/0019_environmentvariable.py | chiliseed/hub | 83f29fbdd12e2260397e18e635f508459fa4990e | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.0.2 on 2020-03-18 18:28
from django.db import migrations, models
import django.db.models.deletion
import fernet_fields.fields
class Migration(migrations.Migration):
dependencies = [
("organizations", "0001_initial"),
("aws_environments", "0018_auto_20200302_2128"),
]
operations = [
migrations.CreateModel(
name="EnvironmentVariable",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("slug", models.SlugField(max_length=20, null=True, unique=True)),
("key_name", models.CharField(max_length=140)),
(
"key_value",
fernet_fields.fields.EncryptedCharField(default="", max_length=250),
),
("is_secret", models.BooleanField(default=True)),
(
"organization",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="env_vars",
to="organizations.Organization",
),
),
(
"service",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="env_vars",
to="aws_environments.Service",
),
),
],
options={"unique_together": {("service_id", "key_name")},},
),
]
| 34.333333 | 88 | 0.459888 | 151 | 1,957 | 5.754967 | 0.503311 | 0.036824 | 0.048331 | 0.075949 | 0.232451 | 0.168009 | 0.168009 | 0.168009 | 0.168009 | 0.168009 | 0 | 0.038844 | 0.434338 | 1,957 | 56 | 89 | 34.946429 | 0.746161 | 0.022994 | 0 | 0.28 | 1 | 0 | 0.133508 | 0.03822 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.06 | 0 | 0.12 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ed34f23fbe9ce6bd24aa540de5a5778018f11cc | 916 | py | Python | src/exploration.py | nikhilnrng/german-credit-risk | da535662f1531a6097696b8367e3d687155f0804 | [
"MIT"
] | 1 | 2020-12-07T10:32:08.000Z | 2020-12-07T10:32:08.000Z | src/exploration.py | nikhilnrng/german-credit-risk | da535662f1531a6097696b8367e3d687155f0804 | [
"MIT"
] | null | null | null | src/exploration.py | nikhilnrng/german-credit-risk | da535662f1531a6097696b8367e3d687155f0804 | [
"MIT"
] | 2 | 2019-05-25T18:35:31.000Z | 2020-03-24T16:20:42.000Z | import pandas
import preprocessing
from defines import Types, Metadata
def print_pivot_tables(data, metadata, numerical=False):
for column in metadata.COLUMNS:
if not numerical and column.TYPE is Types.NUMERICAL or column.CATEGORIES is None:
continue
df_column = pandas.DataFrame(data[column.HEADER])
count = df_column.apply(pandas.value_counts).T
count = count.reindex_axis(sorted(count.columns), axis=1)
print count, '\n'
def print_statistics(data, metadata):
for column in metadata.COLUMNS:
if column.TYPE is not Types.NUMERICAL or column.CATEGORIES is not None:
continue
print pandas.DataFrame(data[column.HEADER]).describe(), '\n'
if __name__ == '__main__':
metadata = Metadata()
data, _ = preprocessing.load(metadata)
print_pivot_tables(data, metadata, numerical=True)
print_statistics(data, metadata)
| 35.230769 | 89 | 0.70524 | 116 | 916 | 5.405172 | 0.396552 | 0.076555 | 0.051037 | 0.063796 | 0.414673 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0.001372 | 0.204148 | 916 | 25 | 90 | 36.64 | 0.858711 | 0 | 0 | 0.190476 | 0 | 0 | 0.0131 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.142857 | null | null | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ed501444f9851627d8e7d9fa767659afb78f197 | 3,137 | py | Python | src/python/search/search.py | adolphlwq/java-algorithms | 43983a5583479e44e7a9b9d7fbeb495896e8f205 | [
"Apache-2.0"
] | 4 | 2017-07-07T04:38:41.000Z | 2019-02-23T04:48:01.000Z | src/python/search/search.py | adolphlwq/java-algorithms | 43983a5583479e44e7a9b9d7fbeb495896e8f205 | [
"Apache-2.0"
] | 13 | 2017-05-23T15:37:29.000Z | 2020-02-24T14:12:28.000Z | src/python/search/search.py | adolphlwq/algorithms | 43983a5583479e44e7a9b9d7fbeb495896e8f205 | [
"Apache-2.0"
] | null | null | null | import pytest
def sequence_search(alist, item):
pos = 0
found = False
while pos<len(alist) and not found:
if item == alist[pos]:
found = True
break
pos += 1
return found
@pytest.mark.parametrize("test_input, item, expected", [
([1,4,5,6,7], 1, True),
([1,4,5,6,7], 2, False),
([1,2,3,4,6], 4, True),
([1,4,5,8,10], 6, False)
])
def test_sequence_search(test_input, item, expected):
assert sequence_search(test_input, item) is expected
def ordered_sequence_search(alist, item):
pos = 0
found, stop = False, False
while pos<len(alist) and not found and not stop:
if alist[pos] == item:
found = True
elif alist[pos] > item:
stop = True
pos += 1
return found
@pytest.mark.parametrize("test_input, item, expected", [
([1,4,5,6,7], 1, True),
([1,4,5,6,7], 2, False),
([1,2,3,4,6], 4, True),
([1,4,5,8,10], 6, False)
])
def test_ordered_sequence_search(test_input, item, expected):
assert ordered_sequence_search(test_input, item) is expected
def binary_search(alist, item):
found = False
last = len(alist) - 1
first = 0
while first <= last and not found:
mid = (first + last)//2
if alist[mid] == item:
found = True
elif alist[mid] > item:
last = mid - 1
else:
first = mid + 1
return found
@pytest.mark.parametrize("test_input, item, expected", [
([1,4,5,6,7], 1, True),
([1,4,5,6,7], 2, False),
([1,2,3,4,6], 4, True),
([1,4,5,8,10], 6, False)
])
def test_binary_search(test_input, item, expected):
assert binary_search(test_input, item) is expected
# slice of list need time
def recursion_binary_search(alist, item):
if len(alist) == 0:
return False
mid = len(alist)//2
if alist[mid] == item:
return True
elif alist[mid] > item:
return recursion_binary_search(alist[:mid], item)
elif alist[mid] < item:
return recursion_binary_search(alist[mid+1:], item)
else:
return False
def recursion_binary_searchi(alist, item, first=None, last=None):
if first >= last:
return False
print('first is {} last is {}'.format(first, last))
mid = (first + last)//2
if alist[mid] == item:
return True
elif alist[mid] > item:
return recursion_binary_searchi(alist, item, first, mid-1)
else:
return recursion_binary_searchi(alist, item, mid+1, last)
@pytest.mark.parametrize("test_input, item, expected", [
([1,4,5,6,7], 1, True),
([1,4,5,6,7], 2, False),
([1,2,3,4,6], 4, True),
([1,4,5,8,10], 6, False)
])
def test_recursion_binary_search(test_input, item, expected):
assert recursion_binary_search(test_input, item) is expected
@pytest.mark.parametrize("test_input, item, expected", [
([1,4,5,6,7], 1, True),
([1,4,5,6,7], 2, False),
([1,2,3,4,6], 4, True),
([1,4,5,8,10], 6, False)
])
def test_recursion_binary_searchi(test_input, item, expected):
assert recursion_binary_searchi(test_input, item) is expected
| 26.361345 | 66 | 0.59643 | 479 | 3,137 | 3.797495 | 0.110647 | 0.074217 | 0.107202 | 0.115448 | 0.788895 | 0.757559 | 0.683343 | 0.496976 | 0.396372 | 0.396372 | 0 | 0.059423 | 0.248964 | 3,137 | 118 | 67 | 26.584746 | 0.712649 | 0.007332 | 0 | 0.604167 | 0 | 0 | 0.048843 | 0 | 0 | 0 | 0 | 0 | 0.052083 | 1 | 0.104167 | false | 0 | 0.010417 | 0 | 0.239583 | 0.010417 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ed59c5d83f2b5cee099d13f26f97acdad78252a | 5,352 | py | Python | villaProductSdk/products.py | thanakijwanavit/villa-product-sdk | d30074daa08c4f9b0175df7578193bb802bd5dc3 | [
"Apache-2.0"
] | 2 | 2020-10-19T07:58:48.000Z | 2020-10-29T02:30:07.000Z | villaProductSdk/products.py | thanakijwanavit/villa-product-sdk | d30074daa08c4f9b0175df7578193bb802bd5dc3 | [
"Apache-2.0"
] | 1 | 2020-10-22T11:10:14.000Z | 2020-10-22T11:10:14.000Z | villaProductSdk/products.py | thanakijwanavit/villa-product-sdk | d30074daa08c4f9b0175df7578193bb802bd5dc3 | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: product-sdk.ipynb (unless otherwise specified).
__all__ = ['FunctionNames', 'ProductSdk', 'querySingleProduct', 'ProductsFromList', 'queryList']
# Cell
from botocore.config import Config
from s3bz.s3bz import S3, Requests
from lambdasdk.lambdasdk import Lambda, InvocationType
from .schema import Event, Response
from typing import Optional, List
import ujson as json
from nicHelper.wrappers import add_method
from nicHelper.exception import errorString
from awsSchema.apigateway import Event, Response
from dataclasses_json import dataclass_json
from dataclasses import dataclass
import bz2, boto3, base64, logging
# Cell
class FunctionNames:
'''determine function and resources name based on branchName'''
def __init__(self, branchName:str = 'dev-manual'):
self.branchName = branchName
dumpToS3 = lambda self: f'product-dump-s3-{self.branchName}'
updateProduct = lambda self: f'product-update-{self.branchName}'
updateS3 = lambda self: f'product-update-s3-{self.branchName}'
singleQuery = lambda self: f'product-get-{self.branchName}'
allQuery = lambda self: f'product-get-all-{self.branchName}'
inputBucket = lambda self: f'input-product-bucket-{self.branchName}'
inventoryBucket = lambda self: f'product-bucket-{self.branchName}'
listQuery = lambda self: f'product-get-list-{self.branchName}'
# Cell
class ProductSdk:
'''
the main class for interacting with product endpoint
user/pw are optional
'''
def __init__(self,
branch:str = 'dev-manual',
user:Optional[str] = None,
pw:Optional[str] = None,
region:str = 'ap-southeast-1'):
self.branchName = branch
self.functionNames = FunctionNames(branchName = branch)
self.lambdaClient = Lambda(user =user, pw=pw, region = region)
self.user = user; self.pw = pw; self.region = region
@staticmethod
def returnLambdaResponse(lambdaResponse:dict):
try:
return Response.fromDict(lambdaResponse).body
except:
logging.exception(f'error parsing body, perhaps there is no body in response\
response is {lambdaResponse}')
logging.error(lambdaResponse)
@staticmethod
def printFirst(inputDict:dict):
return next(iter(inputDict.items()))
def generalInvoke(self, functionName, payload):
lambdaResponse = self.lambdaClient.invoke(
functionName = functionName, input = payload
)
try:
response:Response = Response.fromDict(lambdaResponse)
except:
print('unable to parse response')
print(lambdaResponse)
raise Exception(errorString())
if response.statusCode == 200:
return response.body
else:
print('error')
return response.body
return self.returnLambdaResponse(lambdaResponse)
def updateWithS3(self, data,
inputKeyName = 'input-data-name',
invocationType = InvocationType.event,
user= None, pw= None):
# put users if not specified
user = user or self.user; pw = pw or self.pw
# extract function name and inputbucket name
inputBucketName = self.functionNames.inputBucket()
functionName = self.functionNames.updateS3()
logging.info(f'bucket is {inputBucketName}')
# save data to s3
S3.save(key = inputKeyName,
objectToSave = data ,
bucket = inputBucketName,
user=user, pw=pw)
logging.info(f'data is saved to s3, invoking ingestion function')
# call lambda function
inputValue = Event(body = json.dumps({ 'key': inputKeyName })).to_dict()
logging.info(f'input to lambda is {inputValue}')
lambdaResponse = self.lambdaClient.invoke(
functionName= functionName ,
input=inputValue,
invocationType= invocationType )
logging.info(f'lambdaResponse is {lambdaResponse}')
if invocationType == 'Event': return "successfully sent event, please watch your slack"
if lambdaResponse: return self.returnLambdaResponse(lambdaResponse)
def allQuery(self):
functionName = self.functionNames.allQuery()
lambdaResponse = self.lambdaClient.invoke(
functionName = functionName, input = {}
)
url = Response.fromDict(lambdaResponse).body['url']
result = Requests.getContentFromUrl(url)
return result
def syncS3(self):
'''force s3 to sync with the newly input data'''
functionName = self.functionNames.dumpToS3()
lambdaResponse = self.lambdaClient.invoke(
functionName = functionName, input = {}
)
return self.returnLambdaResponse(lambdaResponse)
# Cell
@add_method(ProductSdk)
def querySingleProduct(self, iprcode = '0171670', user=None, pw=None):
'''query a single product'''
#extract function name
functionName = self.functionNames.singleQuery()
query = {'iprcode': iprcode}
try:
inputValue = Event.getInput(query)
lambdaResponse = self.lambdaClient.invoke( functionName = functionName , input = inputValue )
return self.returnLambdaResponse(lambdaResponse)
except:
print('calling lambda failed')
# Cell
@dataclass_json
@dataclass
class ProductsFromList:
iprcodes: List[str]
# Cell
@add_method(ProductSdk)
def queryList(self,iprcodes:List[str])->List[dict]:
return self.generalInvoke(functionName=self.functionNames.listQuery() ,payload=Event.getInput({'iprcodes': iprcodes}))
| 34.980392 | 120 | 0.710949 | 587 | 5,352 | 6.449744 | 0.289608 | 0.040676 | 0.023244 | 0.033281 | 0.159007 | 0.091125 | 0.091125 | 0.03962 | 0 | 0 | 0 | 0.006904 | 0.188154 | 5,352 | 152 | 121 | 35.210526 | 0.864442 | 0.083333 | 0 | 0.184211 | 1 | 0 | 0.134101 | 0.05471 | 0 | 0 | 0 | 0 | 0 | 1 | 0.087719 | false | 0 | 0.105263 | 0.017544 | 0.377193 | 0.04386 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ed9836ae359b2eb9a17640f939c2e067533cf97 | 325 | py | Python | temapi/api/loaders/loader.py | Leviosar/temapi | 4db795d903f3840228e934aa53abecfd6db5f540 | [
"MIT"
] | 9 | 2020-02-01T12:55:41.000Z | 2020-12-02T17:42:22.000Z | temapi/api/loaders/loader.py | Leviosar/temapi | 4db795d903f3840228e934aa53abecfd6db5f540 | [
"MIT"
] | 11 | 2020-01-26T02:38:45.000Z | 2020-01-26T21:07:31.000Z | temapi/api/loaders/loader.py | archipelagolabs/temtemapi | 4db795d903f3840228e934aa53abecfd6db5f540 | [
"MIT"
] | 1 | 2020-01-26T16:43:51.000Z | 2020-01-26T16:43:51.000Z | import json
from temapi.commons.paths import OUTPUTS_DIR
class Loader:
file = None
def __init__(self):
assert self.file is not None
_file = OUTPUTS_DIR / self.file
with _file.open() as f:
data = json.load(f)
self.setup(data)
def setup(self, data):
pass
| 15.47619 | 44 | 0.590769 | 44 | 325 | 4.181818 | 0.590909 | 0.108696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.329231 | 325 | 20 | 45 | 16.25 | 0.844037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 1 | 0.166667 | false | 0.083333 | 0.166667 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8ed9875e81e8d637268c1cd0dee2cbf7907d4dd1 | 1,092 | py | Python | virtual/lib/python3.6/site-packages/djreservation/migrations/0002_auto_20160903_0030.py | igihozo-stella/smart-parking | 92c5dcd3eb08b8fccfddd34bb3291a240c563ec8 | [
"MIT"
] | null | null | null | virtual/lib/python3.6/site-packages/djreservation/migrations/0002_auto_20160903_0030.py | igihozo-stella/smart-parking | 92c5dcd3eb08b8fccfddd34bb3291a240c563ec8 | [
"MIT"
] | null | null | null | virtual/lib/python3.6/site-packages/djreservation/migrations/0002_auto_20160903_0030.py | igihozo-stella/smart-parking | 92c5dcd3eb08b8fccfddd34bb3291a240c563ec8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-03 06:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djreservation', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Observation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
],
),
migrations.AlterField(
model_name='reservation',
name='status',
field=models.SmallIntegerField(choices=[(0, 'building'), (1, 'Requested'), (2, 'Acepted'), (3, 'Denied'), (4, 'Borrowed'), (5, 'Returned')], default=0),
),
migrations.AddField(
model_name='observation',
name='reservation',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='djreservation.Reservation'),
),
]
| 32.117647 | 164 | 0.589744 | 106 | 1,092 | 5.962264 | 0.660377 | 0.037975 | 0.044304 | 0.06962 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033582 | 0.263736 | 1,092 | 33 | 165 | 33.090909 | 0.752488 | 0.06044 | 0 | 0.115385 | 1 | 0 | 0.150538 | 0.024438 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.115385 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8eddf6bf734a186f7506d4dcffbdd68420a33313 | 5,633 | py | Python | bluebottle/bb_tasks/views.py | maykinmedia/bluebottle | 355d4729662b5e9a03398efb4fe882e0f8cfa28d | [
"BSD-3-Clause"
] | null | null | null | bluebottle/bb_tasks/views.py | maykinmedia/bluebottle | 355d4729662b5e9a03398efb4fe882e0f8cfa28d | [
"BSD-3-Clause"
] | null | null | null | bluebottle/bb_tasks/views.py | maykinmedia/bluebottle | 355d4729662b5e9a03398efb4fe882e0f8cfa28d | [
"BSD-3-Clause"
] | null | null | null | from django.db.models.query_utils import Q
from rest_framework import generics
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from bluebottle.bluebottle_drf2.permissions import IsAuthorOrReadOnly
from bluebottle.utils.serializers import DefaultSerializerMixin
from bluebottle.bb_projects.permissions import IsProjectOwnerOrReadOnly
from .permissions import IsMemberOrAuthorOrReadOnly
from .serializers import (
BaseTaskMemberSerializer, TaskFileSerializer, TaskPreviewSerializer,
MyTaskMemberSerializer, SkillSerializer, MyTasksSerializer)
from bluebottle.utils.model_dispatcher import get_task_model, get_taskmember_model, get_taskfile_model, \
get_task_skill_model
BB_TASK_MODEL = get_task_model()
BB_TASKMEMBER_MODEL = get_taskmember_model()
BB_TASKFILE_MODEL = get_taskfile_model()
BB_SKILL_MODEL = get_task_skill_model()
class TaskPreviewList(generics.ListAPIView):
model = BB_TASK_MODEL
serializer_class = TaskPreviewSerializer
paginate_by = 8
filter_fields = ('status', 'skill', )
def get_queryset(self):
qs = super(TaskPreviewList, self).get_queryset()
project_slug = self.request.QUERY_PARAMS.get('project', None)
if project_slug:
qs = qs.filter(project__slug=project_slug)
country = self.request.QUERY_PARAMS.get('country', None)
if country:
qs = qs.filter(project__country=country)
text = self.request.QUERY_PARAMS.get('text', None)
if text:
qs = qs.filter(Q(title__icontains=text) |
Q(end_goal__icontains=text) |
Q(description__icontains=text))
ordering = self.request.QUERY_PARAMS.get('ordering', None)
if ordering == 'newest':
qs = qs.order_by('-created')
elif ordering == 'deadline':
qs = qs.order_by('deadline')
qs = qs.exclude(status=BB_TASK_MODEL.TaskStatuses.closed)
return qs.filter(project__status__viewable=True)
class TaskList(DefaultSerializerMixin, generics.ListCreateAPIView):
model = BB_TASK_MODEL
paginate_by = 8
permission_classes = (IsProjectOwnerOrReadOnly,)
filter_fields = ('status', 'author')
def get_queryset(self):
qs = super(TaskList, self).get_queryset()
project_slug = self.request.QUERY_PARAMS.get('project', None)
if project_slug:
qs = qs.filter(project__slug=project_slug)
text = self.request.QUERY_PARAMS.get('text', None)
if text:
qs = qs.filter(Q(title__icontains=text) |
Q(end_goal__icontains=text) |
Q(description__icontains=text))
ordering = self.request.QUERY_PARAMS.get('ordering', None)
if ordering == 'newest':
qs = qs.order_by('-created')
elif ordering == 'deadline':
qs = qs.order_by('deadline')
qs = qs.exclude(status=BB_TASK_MODEL.TaskStatuses.closed)
return qs
def pre_save(self, obj):
obj.author = self.request.user
class MyTaskList(generics.ListAPIView):
model = BB_TASK_MODEL
paginate_by = 8
filter_fields = ('author',)
serializer_class = MyTasksSerializer
def get_queryset(self):
if self.request.user.is_authenticated():
return BB_TASK_MODEL.objects.filter(author=self.request.user)
return BB_TASK_MODEL.objects.none()
class TaskDetail(DefaultSerializerMixin, generics.RetrieveUpdateAPIView):
model = BB_TASK_MODEL
permission_classes = (IsAuthorOrReadOnly, )
class TaskMemberList(generics.ListCreateAPIView):
model = BB_TASKMEMBER_MODEL
serializer_class = BaseTaskMemberSerializer
paginate_by = 50
filter_fields = ('task', 'status', )
permission_classes = (IsAuthenticatedOrReadOnly, )
queryset = model.objects.all()
def pre_save(self, obj):
# When creating a task member it should always be by the request.user and have status 'applied'
obj.member = self.request.user
obj.status = BB_TASKMEMBER_MODEL.TaskMemberStatuses.applied
class MyTaskMemberList(generics.ListAPIView):
model = BB_TASKMEMBER_MODEL
serializer_class = MyTaskMemberSerializer
def get_queryset(self):
queryset = super(MyTaskMemberList, self).get_queryset()
# valid_statuses = [TaskMember.TaskMemberStatuses.accepted, TaskMember.TaskMemberStatuses.realized]
return queryset.filter(member=self.request.user)#, status__in=valid_statuses)
class TaskMemberDetail(generics.RetrieveUpdateDestroyAPIView):
model = BB_TASKMEMBER_MODEL
serializer_class = BaseTaskMemberSerializer
permission_classes = (IsMemberOrAuthorOrReadOnly, )
class TaskFileList(generics.ListCreateAPIView):
model = BB_TASKFILE_MODEL
serializer_class = TaskFileSerializer
paginate_by = 50
filter_fields = ('task', )
permission_classes = (IsAuthenticatedOrReadOnly, )
def pre_save(self, obj):
# When creating a task file the author should always be by the request.user
obj.author = self.request.user
class TaskFileDetail(generics.RetrieveUpdateAPIView):
model = BB_TASKFILE_MODEL
serializer_class = TaskFileSerializer
permission_classes = (IsAuthorOrReadOnly, )
class SkillList(generics.ListAPIView):
model = BB_SKILL_MODEL
serializer_class = SkillSerializer
class UsedSkillList(SkillList):
def get_queryset(self):
qs = super(UsedSkillList, self).get_queryset()
skill_ids = BB_TASK_MODEL.objects.values_list('skill', flat=True).distinct()
return qs.filter(id__in=skill_ids)
| 33.730539 | 107 | 0.711699 | 613 | 5,633 | 6.293638 | 0.203915 | 0.025402 | 0.028512 | 0.039917 | 0.42846 | 0.384655 | 0.312079 | 0.223432 | 0.223432 | 0.205806 | 0 | 0.001783 | 0.203444 | 5,633 | 166 | 108 | 33.933735 | 0.858034 | 0.052015 | 0 | 0.512821 | 0 | 0 | 0.028679 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068376 | false | 0 | 0.076923 | 0 | 0.589744 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
8ee090db50f40f1d36abd60d67541a1d01238621 | 664 | py | Python | schema/Dimension1/Advection/equation_type.py | pylbm/pylbm_ui | 0a7202ee6ee5424486ce6ade1d3b18d8139d4ffb | [
"BSD-3-Clause"
] | 3 | 2021-05-17T20:38:32.000Z | 2021-11-16T17:54:26.000Z | schema/Dimension1/Advection/equation_type.py | pylbm/pylbm_ui | 0a7202ee6ee5424486ce6ade1d3b18d8139d4ffb | [
"BSD-3-Clause"
] | 32 | 2021-04-29T13:27:13.000Z | 2021-07-01T07:22:58.000Z | schema/Dimension1/Advection/equation_type.py | pylbm/pylbm_ui | 0a7202ee6ee5424486ce6ade1d3b18d8139d4ffb | [
"BSD-3-Clause"
] | 1 | 2021-04-30T06:40:21.000Z | 2021-04-30T06:40:21.000Z | # Authors:
# Loic Gouarin <loic.gouarin@polytechnique.edu>
# Benjamin Graille <benjamin.graille@universite-paris-saclay.fr>
# Thibaut Van Hoof <thibaut.vanhoof@cenaero.be>
#
# License: BSD 3 clause
# from pydantic import BaseModel
import sympy as sp
from ...symbol import Symbol
from ...equation_type import EquationType
class Transport1D(EquationType):
name = 'Advection with constant velocity'
u = Symbol('u')
c = Symbol('c')
NonReflexiveOutlet = 'NonReflexiveOutlet'
Neumann = 'Neumann'
Dirichlet_u = 'Dirichlet_u'
def get_fields(self):
fields = {
'mass': self.u,
}
return fields
| 23.714286 | 68 | 0.668675 | 75 | 664 | 5.866667 | 0.666667 | 0.05 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003906 | 0.228916 | 664 | 27 | 69 | 24.592593 | 0.855469 | 0.343373 | 0 | 0 | 0 | 0 | 0.173302 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.2 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
8ee21dfaeee39762d5805013e154a52298242afc | 499 | py | Python | setup.py | nkzmsb/logtools | b091fe83eb0ed9010e85c865d23b47e093b12f4d | [
"MIT"
] | null | null | null | setup.py | nkzmsb/logtools | b091fe83eb0ed9010e85c865d23b47e093b12f4d | [
"MIT"
] | 12 | 2022-01-04T12:32:48.000Z | 2022-03-07T10:07:00.000Z | setup.py | nkzmsb/logtools | b091fe83eb0ed9010e85c865d23b47e093b12f4d | [
"MIT"
] | null | null | null | """
To make dist folder
$ python setup.py sdist
"""
from setuptools import setup, find_packages
setup(
name = "logtools"
, version = "0.0.12"
, packages = find_packages()
, zip_safe=False
, author = "nkzmsb"
, url = "https://github.com/nkzmsb/logtools"
, description = "This is a wrapper for logging"
, long_descriptoin=open("README.md", encoding='utf-8').read()
, long_description_content_type = "text/markdown"
, python_requires = ">=3.7"
) | 22.681818 | 65 | 0.629259 | 61 | 499 | 5.016393 | 0.819672 | 0.078431 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018182 | 0.228457 | 499 | 22 | 66 | 22.681818 | 0.776623 | 0.086172 | 0 | 0 | 0 | 0 | 0.256125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.076923 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ee799fdfce62783830341605039f1e6fa322831 | 301 | py | Python | dd_invitation/admin.py | datadealer/dd_auth | 5a183d64035059e88dd83c1ef5dfaf083faa362c | [
"Artistic-2.0"
] | null | null | null | dd_invitation/admin.py | datadealer/dd_auth | 5a183d64035059e88dd83c1ef5dfaf083faa362c | [
"Artistic-2.0"
] | null | null | null | dd_invitation/admin.py | datadealer/dd_auth | 5a183d64035059e88dd83c1ef5dfaf083faa362c | [
"Artistic-2.0"
] | 1 | 2021-06-06T22:29:12.000Z | 2021-06-06T22:29:12.000Z | # -*- coding: utf-8 -*-
from dd_invitation import models
from django.contrib import admin
class TokenAdmin(admin.ModelAdmin):
list_display = ('value', 'consumed', 'created')
ordering = ('-created',)
class Media:
js = ('dd_invitation.js',)
admin.site.register(models.Token, TokenAdmin)
| 20.066667 | 49 | 0.694352 | 36 | 301 | 5.722222 | 0.694444 | 0.116505 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003922 | 0.152824 | 301 | 14 | 50 | 21.5 | 0.803922 | 0.069767 | 0 | 0 | 0 | 0 | 0.158273 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
8eee82d2505a2e4ee34bd5c0db201a387b44f4cc | 967 | py | Python | util/undervolt/setup.py | haller218/MyDotFiles | 81ab1604f97a7aeb38681a4f43ad5a2fed3677bf | [
"MIT"
] | 2 | 2020-07-24T22:04:17.000Z | 2020-08-29T23:46:44.000Z | util/undervolt/setup.py | haller33/MyDotFiles | 81ab1604f97a7aeb38681a4f43ad5a2fed3677bf | [
"MIT"
] | null | null | null | util/undervolt/setup.py | haller33/MyDotFiles | 81ab1604f97a7aeb38681a4f43ad5a2fed3677bf | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from os.path import dirname, join
from setuptools import setup
import doctest
def test_suite():
return doctest.DocTestSuite('undervolt')
setup(
name='undervolt',
version='0.2.9',
description='Undervolt Intel CPUs under Linux',
long_description=open(
join(dirname(__file__), 'README.rst')).read(),
url='http://github.com/georgewhewell/undervolt',
author='George Whewell',
author_email='georgerw@gmail.com',
license='GPL',
py_modules=['undervolt'],
test_suite='setup.test_suite',
entry_points={
'console_scripts': [
'undervolt=undervolt:main',
],
},
keywords=['undervolt', 'intel', 'linux'],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| 26.135135 | 70 | 0.62151 | 104 | 967 | 5.663462 | 0.682692 | 0.04584 | 0.08489 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009321 | 0.223371 | 967 | 36 | 71 | 26.861111 | 0.774967 | 0.043433 | 0 | 0.066667 | 0 | 0 | 0.416035 | 0.026002 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033333 | true | 0 | 0.1 | 0.033333 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8eee92f6366b1b5c1894109454a732d12596d3d6 | 3,692 | py | Python | tests/test_cli.py | jwilges/monocat | 1f5debe505509f90f3cb948b8324b528614dd7b3 | [
"BSD-3-Clause"
] | 1 | 2020-08-16T00:04:40.000Z | 2020-08-16T00:04:40.000Z | tests/test_cli.py | jwilges/monocat | 1f5debe505509f90f3cb948b8324b528614dd7b3 | [
"BSD-3-Clause"
] | 5 | 2021-09-09T15:11:37.000Z | 2021-09-09T15:11:39.000Z | tests/test_cli.py | jwilges/monocat | 1f5debe505509f90f3cb948b8324b528614dd7b3 | [
"BSD-3-Clause"
] | 1 | 2020-06-17T12:01:01.000Z | 2020-06-17T12:01:01.000Z | import logging
import sys
from contextlib import ExitStack
from unittest import TestCase
from unittest.mock import DEFAULT as DEFAULT_MOCK
from unittest.mock import MagicMock, patch
from monocat import cli
class TestConfigureLogging(TestCase):
# TODO: Reduce `_configure_logging` test complexity by decoupling parsed verbosity <-> log level mapping (among other things).
def test_verbosity_none_configures_null_handler(self):
expected_verbosity_argument = None
mock_arguments = MagicMock()
mock_arguments.verbosity = expected_verbosity_argument
with ExitStack() as _scope:
mock_logging = _scope.enter_context(
patch.multiple('logging', basicConfig=DEFAULT_MOCK, getLogger=DEFAULT_MOCK))
cli._configure_logging(expected_verbosity_argument)
mock_logging['basicConfig'].assert_called_once()
mock_basic_config_kwargs = mock_logging['basicConfig'].mock_calls[0][2]
assert isinstance(mock_basic_config_kwargs['handlers'][0], logging.NullHandler)
def test_verbosity_nonzero_configures_logging(self):
# Expectation:
# verbosity argument = 1 (i.e. `-v`) should yield a console log level of: INFO (20) - (10 * 1) = 10
expected_verbosity_argument = 1
expected_verbosity = 10
mock_arguments = MagicMock()
mock_arguments.verbosity = expected_verbosity_argument
with ExitStack() as _scope:
mock_logging = _scope.enter_context(
patch.multiple('logging', basicConfig=DEFAULT_MOCK, getLogger=DEFAULT_MOCK))
cli._configure_logging(expected_verbosity_argument)
mock_logging['basicConfig'].assert_called_once()
mock_basic_config_kwargs = mock_logging['basicConfig'].mock_calls[0][2]
assert mock_basic_config_kwargs['level'] == expected_verbosity
stdout_handlers = (
handler for handler in mock_basic_config_kwargs['handlers']
if isinstance(handler, logging.StreamHandler) and handler.stream.name == sys.stdout.name
)
assert stdout_handlers
assert all(handler.level == expected_verbosity for handler in stdout_handlers)
stderr_handlers = (
handler for handler in mock_basic_config_kwargs['handlers']
if isinstance(handler, logging.StreamHandler) and handler.stream.name == sys.stderr.name
)
assert stderr_handlers
assert all(handler.level == logging.WARNING for handler in stderr_handlers)
class TestMain(TestCase):
# TODO: `main` test complexity by decoupling `Action`-related mappings.
def test_log_configuration(self):
expected_verbosity = 1
mock_parse_arguments = MagicMock(
return_value=(
MagicMock(),
MagicMock(verbosity=expected_verbosity, action='get-release')
))
mock_configure_logging = MagicMock()
with ExitStack() as _scope:
_scope.enter_context(
patch.object(cli.sys, 'exit'))
_scope.enter_context(
patch.object(cli, '_parse_arguments', mock_parse_arguments))
_scope.enter_context(
patch.object(cli, '_configure_logging', mock_configure_logging))
_scope.enter_context(
patch.object(cli, 'ReleaseManager'))
_scope.enter_context(
patch.object(cli.GetReleaseAction, '__call__')
)
cli.main()
mock_parse_arguments.assert_called()
mock_configure_logging.assert_called_with(expected_verbosity)
| 42.436782 | 130 | 0.666306 | 388 | 3,692 | 6.028351 | 0.260309 | 0.087217 | 0.050876 | 0.06584 | 0.48183 | 0.441642 | 0.375374 | 0.375374 | 0.375374 | 0.375374 | 0 | 0.006198 | 0.257042 | 3,692 | 86 | 131 | 42.930233 | 0.846518 | 0.083153 | 0 | 0.358209 | 0 | 0 | 0.046746 | 0 | 0 | 0 | 0 | 0.011628 | 0.149254 | 1 | 0.044776 | false | 0 | 0.104478 | 0 | 0.179104 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d9020898c1f7c0c64f68856e8924389487cbb16e | 771 | py | Python | python/toolkit.py | rjlasko/pst | 1b2b4286524e12e13a3eacd0297272a3a2bfc4aa | [
"MIT"
] | null | null | null | python/toolkit.py | rjlasko/pst | 1b2b4286524e12e13a3eacd0297272a3a2bfc4aa | [
"MIT"
] | null | null | null | python/toolkit.py | rjlasko/pst | 1b2b4286524e12e13a3eacd0297272a3a2bfc4aa | [
"MIT"
] | null | null | null | #!/usr/bin/env python
def wakeHost(hostname, nmap_file):
from Hacks import nmap
from wakeonlan import wol
for mac in nmap.getMac(hostname, nmap_file):
wol.send_magic_packet(mac)
def getLocalIps():
d = getInterfaceIpDict()
for (ifaceName, addresses) in d.iteritems():
for addy in addresses:
if addy not in (None,'127.0.0.1'):
print addy
def getInterfaceIPs():
for (ifaceName, addys) in getInterfaceIpDict().iteritems():
print '%s: %s' % (ifaceName, ', '.join(addys) if addys[0] is not None else '')
def getInterfaceIpDict():
from netifaces import interfaces, ifaddresses, AF_INET
ipDict = {}
for ifaceName in interfaces():
ipDict[ifaceName] = [i['addr'] for i in ifaddresses(ifaceName).setdefault(AF_INET, [{'addr':None}])]
return ipDict
| 25.7 | 102 | 0.709468 | 106 | 771 | 5.103774 | 0.481132 | 0.066543 | 0.05915 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010753 | 0.155642 | 771 | 29 | 103 | 26.586207 | 0.820277 | 0.02594 | 0 | 0 | 0 | 0 | 0.033333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.15 | null | null | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d906370ccd34943d6d1deac1511c603e61a66724 | 3,860 | py | Python | src/xleapp/log/__init__.py | flamusdiu/xleapp | 27379a0a617cb31f12fad63dabcefd8793116c67 | [
"MIT"
] | 10 | 2021-09-25T20:52:39.000Z | 2022-02-24T15:29:09.000Z | src/xleapp/log/__init__.py | flamusdiu/xleapp | 27379a0a617cb31f12fad63dabcefd8793116c67 | [
"MIT"
] | 1 | 2021-11-24T15:06:51.000Z | 2021-11-24T15:10:16.000Z | src/xleapp/log/__init__.py | flamusdiu/xleapp | 27379a0a617cb31f12fad63dabcefd8793116c67 | [
"MIT"
] | null | null | null | import importlib.util
import logging
import logging.config
import os
import typing as t
from pathlib import Path
import yaml
import xleapp.globals as g
from ..helpers.utils import generate_program_header
StrPath = t.Union[str, os.PathLike[str]]
class ProcessFileFilter(logging.Filter):
def filter(self, record):
return record.name == "xleapp.process" and record.levelno >= 20
class InfoLogFileFilter(logging.Filter):
def filter(self, record):
return record.name == "xleapp.logfile" and record.levelno >= 20
class DebugFileFilter(logging.Filter):
def filter(self, record):
return g.app.debug
class StreamHandler(logging.StreamHandler):
def emit(self, record: logging.LogRecord):
if record.msg.startswith("->"):
record.msg = f" {record.msg}"
logging.StreamHandler.emit(self, record)
class FileHandler(logging.FileHandler):
def __init__(
self,
filename: StrPath,
mode: str = "a",
encoding: t.Union[str, None] = None,
delay: bool = False,
errors: t.Union[str, None] = None,
) -> None:
super().__init__(
filename,
mode=mode,
encoding=encoding,
delay=delay,
errors=errors,
)
def emit(self, record: logging.LogRecord):
if record.msg.startswith("->"):
record.msg = f" {record.msg}"
logging.FileHandler.emit(self, record)
class FileHandlerWithHeader(logging.FileHandler):
def __init__(self, filename, header, mode="a", encoding=None, delay=0):
self.header = header
self.file_pre_exists = Path(filename)
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
if not delay and self.stream is not None:
self.stream.write("%s\n" % header)
def emit(self, record: logging.LogRecord):
if self.stream is None:
self.stream = self._open()
if not self.file_pre_exists:
self.stream.write("%s\n" % self.header)
message = record.msg
if message.startswith("->"):
message = f" {message}"
record.msg = message
logging.FileHandler.emit(self, record)
def init() -> None:
mod = importlib.util.find_spec(__name__)
if not mod:
raise FileNotFoundError("Missing package 'log_config.yaml' to configure logging!")
if mod.origin:
logConfig = Path(mod.origin).parent / "log_config.yaml"
with open(logConfig, "r") as file:
config = yaml.safe_load(file.read())
if not g.app.log_folder.exists():
g.app.log_folder.mkdir(parents=True, exist_ok=True)
info_log_file = config["handlers"]["info_file_handler"]["filename"]
config["handlers"]["info_file_handler"]["filename"] = (
g.app.log_folder / info_log_file
)
config["handlers"]["info_file_handler"]["header"] = generate_program_header(
project_version=f"{g.app.project} v{g.app.version}",
input_path=g.app.input_path,
output_path=g.app.output_path,
num_to_process=g.app.num_to_process,
num_of_categories=g.app.num_of_categories,
)
process_log_file = config["handlers"]["process_file_handler"]["filename"]
config["handlers"]["process_file_handler"]["filename"] = (
g.app.log_folder / process_log_file
)
debug_log_file = config["handlers"]["debug_file_handler"]["filename"]
config["handlers"]["debug_file_handler"]["filename"] = (
g.app.log_folder / debug_log_file
)
logging.config.dictConfig(config)
else:
raise FileNotFoundError(
"Package found! Missing 'log_config.yaml' to "
"configure logging! Reinstall package.",
)
| 30.393701 | 90 | 0.621503 | 453 | 3,860 | 5.121413 | 0.249448 | 0.02069 | 0.036207 | 0.028017 | 0.417241 | 0.343534 | 0.222845 | 0.15 | 0.115517 | 0.115517 | 0 | 0.001746 | 0.25829 | 3,860 | 126 | 91 | 30.634921 | 0.808592 | 0 | 0 | 0.125 | 1 | 0 | 0.132124 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.104167 | 0.03125 | 0.291667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d913b95c3ba94ba3e42c6be9063bb4fc4710f196 | 2,346 | py | Python | secretupdater/secretupdater/config.py | matthope/k8s-secret-updater | cf2687079a25b4afdb19385457cf578dcea34431 | [
"Apache-2.0"
] | 5 | 2019-01-22T03:42:30.000Z | 2021-04-10T02:32:58.000Z | secretupdater/secretupdater/config.py | matthope/k8s-secret-updater | cf2687079a25b4afdb19385457cf578dcea34431 | [
"Apache-2.0"
] | 2 | 2019-01-31T03:34:03.000Z | 2021-12-13T11:29:48.000Z | secretupdater/secretupdater/config.py | matthope/k8s-secret-updater | cf2687079a25b4afdb19385457cf578dcea34431 | [
"Apache-2.0"
] | 2 | 2019-04-10T00:14:15.000Z | 2021-08-19T01:34:07.000Z | # Copyright 2019 Nine Entertainment Co.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import getenv
def bool_env(var_name, default=False):
"""
Get an environment variable coerced to a boolean value.
"""
test_val = getenv(var_name, default)
# Explicitly check for 'False', 'false', and '0' since all non-empty
# string are normally coerced to True.
if test_val in ('False', 'false', '0'):
return False
return bool(test_val)
def float_env(var_name, default=0.0):
"""
Get an environment variable coerced to a float value.
"""
return float(getenv(var_name, default))
def int_env(var_name, default=0):
"""
Get an environment variable coerced to an integer value.
"""
return int(getenv(var_name, default))
def str_env(var_name, default=''):
"""
Get an environment variable as a string.
"""
return getenv(var_name, default)
class Config():
# Basic setup
# Whether or not the app is run in debug mode. Never run in debug
# mode outside of development!
DEBUG = bool_env('DEBUG', False)
# The host the WSGI app should use.
HOST = str_env('HOST', '0.0.0.0')
# The port the WSGI app should use.
PORT = int_env('PORT', 8080)
# User auth
BASIC_AUTH_USERNAME = str_env('USERNAME')
BASIC_AUTH_PASSWORD = str_env('PASSWORD')
BASIC_AUTH_REALM = str_env('AUTH_REALM', 'Kubernetes Secret Updater')
# Confidant server details
AUTH_METHOD = str_env('AUTH_METHOD', 'saml') # How to access confidant from app: 'saml' or 'header'
CONFIDANT_SERVER_URL = str_env('CONFIDANT_SERVER_URL', 'http://localhost') # ARN
CONFIDANT_SERVER_AUTH_KEY = str_env(
'CONFIDANT_SERVER_AUTH_KEY', 'auth-key')
CONFIDANT_SERVER_AWS_REGION = str_env(
'CONFIDANT_SERVER_AWS_REGION', 'ap-southeast-2')
| 31.28 | 104 | 0.693947 | 343 | 2,346 | 4.600583 | 0.408163 | 0.034221 | 0.070976 | 0.043093 | 0.140684 | 0.065273 | 0.065273 | 0 | 0 | 0 | 0 | 0.011834 | 0.207587 | 2,346 | 74 | 105 | 31.702703 | 0.837009 | 0.483376 | 0 | 0 | 0 | 0 | 0.183511 | 0.046099 | 0 | 0 | 0 | 0 | 0 | 1 | 0.16 | false | 0.04 | 0.04 | 0 | 0.84 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
d914574a42c517f5d3aea0ddfc7d81cb3c43e9ec | 253 | py | Python | flask_demo/config.py | banzhuanl/full_stack | 2b139a1602693a97f0eb8f8c6b9ab2fa74432156 | [
"Apache-2.0"
] | null | null | null | flask_demo/config.py | banzhuanl/full_stack | 2b139a1602693a97f0eb8f8c6b9ab2fa74432156 | [
"Apache-2.0"
] | null | null | null | flask_demo/config.py | banzhuanl/full_stack | 2b139a1602693a97f0eb8f8c6b9ab2fa74432156 | [
"Apache-2.0"
] | null | null | null | import multiprocessing
import tlib.conf as conf
bind='0.0.0.0:%s' % conf.get("port")
# workers=multiprocessing.cpu_count() * 2 + 1
# workers=multiprocessing.cpu_count()
workers=10
backlog=2048
worker_class="gevent"
debug=False
daemon=False
timeout=30
| 18.071429 | 45 | 0.762846 | 39 | 253 | 4.871795 | 0.666667 | 0.031579 | 0.031579 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.061404 | 0.098814 | 253 | 13 | 46 | 19.461538 | 0.77193 | 0.312253 | 0 | 0 | 0 | 0 | 0.116959 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d9265dcf81c41805c5f7a99bca81b0fb9dd04eb8 | 737 | py | Python | hier/models.py | jtdub/prod2lab | 054c922f731ad377b83714194ef806325f79336a | [
"MIT"
] | 11 | 2019-11-20T02:05:30.000Z | 2021-08-22T13:15:14.000Z | hier/models.py | jtdub/prod2lab | 054c922f731ad377b83714194ef806325f79336a | [
"MIT"
] | 12 | 2019-11-20T02:07:54.000Z | 2019-12-11T14:57:59.000Z | hier/models.py | jtdub/prod2lab | 054c922f731ad377b83714194ef806325f79336a | [
"MIT"
] | 2 | 2019-11-20T02:05:33.000Z | 2019-11-28T01:29:20.000Z | from django.db import models
LINEAGE_CHOICES = (
('startswith', 'startswith'),
('endswith', 'endswith'),
('contains', 'contains'),
('equals', 'equals')
)
class Lineage(models.Model):
parent = models.ForeignKey("self", blank=True, null=True, on_delete=models.CASCADE)
key = models.CharField(choices=LINEAGE_CHOICES, blank=False, null=False, max_length=255)
value = models.CharField(max_length=500, blank=False, null=False)
os = models.CharField(max_length=255, blank=False, null=False)
def __str__(self):
if self.parent:
return f"{self.os}: {self.parent.key}:{self.parent.value} > {self.key}:{self.value}"
else:
return f"{self.os}: {self.key}:{self.value}"
| 32.043478 | 96 | 0.651289 | 93 | 737 | 5.053763 | 0.397849 | 0.095745 | 0.089362 | 0.121277 | 0.07234 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014925 | 0.181818 | 737 | 22 | 97 | 33.5 | 0.764511 | 0 | 0 | 0 | 0 | 0.058824 | 0.238806 | 0.112619 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.058824 | 0 | 0.529412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
d9275745388d0b0a3a8bfd25bb03902760e66473 | 7,803 | py | Python | tests/regressiontests/admin_validation/models.py | Smarsh/django | ffb738e0f56027e16564a79b709cbf44596c2335 | [
"BSD-3-Clause"
] | 19 | 2015-05-01T19:59:03.000Z | 2021-12-09T08:03:16.000Z | tests/regressiontests/admin_validation/models.py | aprefontaine/TMScheduler | 298a332532b9df1d3f6a80b1334630bc106d3b78 | [
"BSD-3-Clause"
] | 1 | 2018-01-03T15:26:49.000Z | 2018-01-03T15:26:49.000Z | tests/regressiontests/admin_validation/models.py | aprefontaine/TMScheduler | 298a332532b9df1d3f6a80b1334630bc106d3b78 | [
"BSD-3-Clause"
] | 30 | 2015-03-25T19:40:07.000Z | 2021-05-28T22:59:26.000Z | """
Tests of ModelAdmin validation logic.
"""
from django.db import models
class Album(models.Model):
title = models.CharField(max_length=150)
class Song(models.Model):
title = models.CharField(max_length=150)
album = models.ForeignKey(Album)
original_release = models.DateField(editable=False)
class Meta:
ordering = ('title',)
def __unicode__(self):
return self.title
def readonly_method_on_model(self):
# does nothing
pass
class TwoAlbumFKAndAnE(models.Model):
album1 = models.ForeignKey(Album, related_name="album1_set")
album2 = models.ForeignKey(Album, related_name="album2_set")
e = models.CharField(max_length=1)
class Author(models.Model):
name = models.CharField(max_length=100)
class Book(models.Model):
name = models.CharField(max_length=100)
subtitle = models.CharField(max_length=100)
price = models.FloatField()
authors = models.ManyToManyField(Author, through='AuthorsBooks')
class AuthorsBooks(models.Model):
author = models.ForeignKey(Author)
book = models.ForeignKey(Book)
__test__ = {'API_TESTS':"""
>>> from django import forms
>>> from django.contrib import admin
>>> from django.contrib.admin.validation import validate, validate_inline
# Regression test for #8027: custom ModelForms with fields/fieldsets
>>> class SongForm(forms.ModelForm):
... pass
>>> class ValidFields(admin.ModelAdmin):
... form = SongForm
... fields = ['title']
>>> class InvalidFields(admin.ModelAdmin):
... form = SongForm
... fields = ['spam']
>>> validate(ValidFields, Song)
>>> validate(InvalidFields, Song)
Traceback (most recent call last):
...
ImproperlyConfigured: 'InvalidFields.fields' refers to field 'spam' that is missing from the form.
# Tests for basic validation of 'exclude' option values (#12689)
>>> class ExcludedFields1(admin.ModelAdmin):
... exclude = ('foo')
>>> validate(ExcludedFields1, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: 'ExcludedFields1.exclude' must be a list or tuple.
>>> class ExcludedFields2(admin.ModelAdmin):
... exclude = ('name', 'name')
>>> validate(ExcludedFields2, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: There are duplicate field(s) in ExcludedFields2.exclude
>>> class ExcludedFieldsInline(admin.TabularInline):
... model = Song
... exclude = ('foo')
>>> class ExcludedFieldsAlbumAdmin(admin.ModelAdmin):
... model = Album
... inlines = [ExcludedFieldsInline]
>>> validate(ExcludedFieldsAlbumAdmin, Album)
Traceback (most recent call last):
...
ImproperlyConfigured: 'ExcludedFieldsInline.exclude' must be a list or tuple.
# Regression test for #9932 - exclude in InlineModelAdmin
# should not contain the ForeignKey field used in ModelAdmin.model
>>> class SongInline(admin.StackedInline):
... model = Song
... exclude = ['album']
>>> class AlbumAdmin(admin.ModelAdmin):
... model = Album
... inlines = [SongInline]
>>> validate(AlbumAdmin, Album)
Traceback (most recent call last):
...
ImproperlyConfigured: SongInline cannot exclude the field 'album' - this is the foreign key to the parent model Album.
# Regression test for #11709 - when testing for fk excluding (when exclude is
# given) make sure fk_name is honored or things blow up when there is more
# than one fk to the parent model.
>>> class TwoAlbumFKAndAnEInline(admin.TabularInline):
... model = TwoAlbumFKAndAnE
... exclude = ("e",)
... fk_name = "album1"
>>> validate_inline(TwoAlbumFKAndAnEInline, None, Album)
# Ensure inlines validate that they can be used correctly.
>>> class TwoAlbumFKAndAnEInline(admin.TabularInline):
... model = TwoAlbumFKAndAnE
>>> validate_inline(TwoAlbumFKAndAnEInline, None, Album)
Traceback (most recent call last):
...
Exception: <class 'regressiontests.admin_validation.models.TwoAlbumFKAndAnE'> has more than 1 ForeignKey to <class 'regressiontests.admin_validation.models.Album'>
>>> class TwoAlbumFKAndAnEInline(admin.TabularInline):
... model = TwoAlbumFKAndAnE
... fk_name = "album1"
>>> validate_inline(TwoAlbumFKAndAnEInline, None, Album)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("title",)
>>> validate(SongAdmin, Song)
>>> def my_function(obj):
... # does nothing
... pass
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = (my_function,)
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("readonly_method_on_modeladmin",)
...
... def readonly_method_on_modeladmin(self, obj):
... # does nothing
... pass
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("readonly_method_on_model",)
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("title", "nonexistant")
>>> validate(SongAdmin, Song)
Traceback (most recent call last):
...
ImproperlyConfigured: SongAdmin.readonly_fields[1], 'nonexistant' is not a callable or an attribute of 'SongAdmin' or found in the model 'Song'.
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("title", "awesome_song")
... fields = ("album", "title", "awesome_song")
>>> validate(SongAdmin, Song)
Traceback (most recent call last):
...
ImproperlyConfigured: SongAdmin.readonly_fields[1], 'awesome_song' is not a callable or an attribute of 'SongAdmin' or found in the model 'Song'.
>>> class SongAdmin(SongAdmin):
... def awesome_song(self, instance):
... if instance.title == "Born to Run":
... return "Best Ever!"
... return "Status unknown."
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = (lambda obj: "test",)
>>> validate(SongAdmin, Song)
# Regression test for #12203/#12237 - Fail more gracefully when a M2M field that
# specifies the 'through' option is included in the 'fields' or the 'fieldsets'
# ModelAdmin options.
>>> class BookAdmin(admin.ModelAdmin):
... fields = ['authors']
>>> validate(BookAdmin, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: 'BookAdmin.fields' can't include the ManyToManyField field 'authors' because 'authors' manually specifies a 'through' model.
>>> class FieldsetBookAdmin(admin.ModelAdmin):
... fieldsets = (
... ('Header 1', {'fields': ('name',)}),
... ('Header 2', {'fields': ('authors',)}),
... )
>>> validate(FieldsetBookAdmin, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: 'FieldsetBookAdmin.fieldsets[1][1]['fields']' can't include the ManyToManyField field 'authors' because 'authors' manually specifies a 'through' model.
>>> class NestedFieldsetAdmin(admin.ModelAdmin):
... fieldsets = (
... ('Main', {'fields': ('price', ('name', 'subtitle'))}),
... )
>>> validate(NestedFieldsetAdmin, Book)
# Regression test for #12209 -- If the explicitly provided through model
# is specified as a string, the admin should still be able use
# Model.m2m_field.through
>>> class AuthorsInline(admin.TabularInline):
... model = Book.authors.through
>>> class BookAdmin(admin.ModelAdmin):
... inlines = [AuthorsInline]
# If the through model is still a string (and hasn't been resolved to a model)
# the validation will fail.
>>> validate(BookAdmin, Book)
# Regression for ensuring ModelAdmin.fields can contain non-model fields
# that broke with r11737
>>> class SongForm(forms.ModelForm):
... extra_data = forms.CharField()
... class Meta:
... model = Song
>>> class FieldsOnFormOnlyAdmin(admin.ModelAdmin):
... form = SongForm
... fields = ['title', 'extra_data']
>>> validate(FieldsOnFormOnlyAdmin, Song)
"""}
| 29.445283 | 173 | 0.694605 | 856 | 7,803 | 6.26285 | 0.244159 | 0.050364 | 0.035441 | 0.042902 | 0.419511 | 0.361686 | 0.296586 | 0.211341 | 0.151091 | 0.127215 | 0 | 0.01154 | 0.167115 | 7,803 | 264 | 174 | 29.556818 | 0.813356 | 0.006536 | 0 | 0.413043 | 0 | 0.032609 | 0.86609 | 0.243673 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01087 | false | 0.021739 | 0.021739 | 0.005435 | 0.152174 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d9275eff44ff89f6a57123da99b8d5c5eef45359 | 3,284 | py | Python | examples/firesat/manager/main_manager.py | code-lab-org/nost-tools | 8b325eb8e7798b71e121de0d2bf6ffc09f7b0bc0 | [
"BSD-3-Clause"
] | 1 | 2022-01-28T15:20:58.000Z | 2022-01-28T15:20:58.000Z | examples/firesat/manager/main_manager.py | code-lab-org/nost-tools | 8b325eb8e7798b71e121de0d2bf6ffc09f7b0bc0 | [
"BSD-3-Clause"
] | null | null | null | examples/firesat/manager/main_manager.py | code-lab-org/nost-tools | 8b325eb8e7798b71e121de0d2bf6ffc09f7b0bc0 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
*This application demonstrates a manager synchronizing a test case between disaggregated applications*
This manager application leverages the manager template in the NOS-T tools library. The manager template is designed to publish information to specific topics, and any applications using the :obj:`ManagedApplication` object class will subscribe to these topics to know when to start and stop simulations, as well as the resolution and time scale factor of the simulation steps.
.. literalinclude:: /../../firesat/manager/main_manager.py
:lines: 12-
"""
import logging
from datetime import datetime, timedelta, timezone
from dotenv import dotenv_values
from nost_tools.application_utils import ConnectionConfig, ShutDownObserver
from nost_tools.manager import Manager
# client credentials should be saved to config.py file in manager_config_files directory
from manager_config_files.config import (
PREFIX,
SCALE,
UPDATE,
)
logging.basicConfig(level=logging.INFO)
# name guard used to ensure script only executes if it is run as the __main__
if __name__ == "__main__":
# Note that these are loaded from a .env file in current working directory
credentials = dotenv_values(".env")
HOST, PORT = credentials["SMCE_HOST"], int(credentials["SMCE_PORT"])
USERNAME, PASSWORD = credentials["SMCE_USERNAME"], credentials["SMCE_PASSWORD"]
# set the client credentials from the config file
config = ConnectionConfig(USERNAME, PASSWORD, HOST, PORT, True)
# create the manager application from the template in the tools library
manager = Manager()
# add a shutdown observer to shut down after a single test case
manager.simulator.add_observer(ShutDownObserver(manager))
# start up the manager on PREFIX from config file
manager.start_up(PREFIX, config, True)
# execute a test plan
manager.execute_test_plan(
datetime(2020, 1, 1, 7, 20, 0, tzinfo=timezone.utc), # scenario start datetime
datetime(2020, 1, 1, 10, 20, 0, tzinfo=timezone.utc), # scenario stop datetime
start_time=None, # optionally specify a wallclock start datetime for synchronization
time_step=timedelta(seconds=1), # wallclock time resolution for simulation
time_scale_factor=SCALE, # initial scale between wallclock and scenario clock (e.g. if SCALE = 60.0 then 1 wallclock second = 1 scenario minute)
time_scale_updates=UPDATE, # optionally schedule changes to the time_scale_factor at a specified scenario time
time_status_step=timedelta(seconds=1)
* SCALE, # optional duration between time status 'heartbeat' messages
time_status_init=datetime(
2020, 1, 1, 7, 21, 0, tzinfo=timezone.utc
), # optional initial scenario datetime to start publishing time status 'heartbeat' messages
command_lead=timedelta(
seconds=5
), # lead time before a scheduled update or stop command
)
| 51.3125 | 381 | 0.671133 | 398 | 3,284 | 5.429648 | 0.419598 | 0.01851 | 0.020824 | 0.019435 | 0.039796 | 0.025914 | 0 | 0 | 0 | 0 | 0 | 0.017442 | 0.266748 | 3,284 | 63 | 382 | 52.126984 | 0.879983 | 0.492692 | 0 | 0.057143 | 0 | 0 | 0.034675 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.057143 | 0.171429 | 0 | 0.171429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
d93c452f540d6485a4e8b95f0fec1e2f27b16d74 | 753 | py | Python | glyphrepository/comment/forms.py | BDAthlon/2017-Triple_Helix-1 | 3aa17be9a69af5d97e151a0be7ce7a1d8ad4f473 | [
"BSD-3-Clause"
] | 1 | 2018-04-15T14:33:58.000Z | 2018-04-15T14:33:58.000Z | glyphrepository/comment/forms.py | BDAthlon/2017-Triple_Helix-1 | 3aa17be9a69af5d97e151a0be7ce7a1d8ad4f473 | [
"BSD-3-Clause"
] | 5 | 2017-08-16T16:40:32.000Z | 2017-08-22T16:30:21.000Z | glyphrepository/comment/forms.py | BDAthlon/2017-Triple_Helix-1 | 3aa17be9a69af5d97e151a0be7ce7a1d8ad4f473 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Comment forms."""
from flask_wtf import Form
from wtforms import StringField, SelectField, TextAreaField
from wtforms.validators import DataRequired
class CommentForm(Form):
"""Form to add a new glyph."""
name = StringField('Comment Title', validators=[DataRequired()])
options = [('-1', '-')]
for i in range(1, 6):
options.append((str(i), str(i)))
rating = SelectField('Rating', choices=options)
comment = TextAreaField('Comment', validators=[])
def __init__(self, *args, **kwargs):
"""Create instance."""
super(CommentForm, self).__init__(*args, **kwargs)
def validate(self):
"""Validate the form."""
return super(CommentForm, self).validate()
| 26.892857 | 68 | 0.63745 | 83 | 753 | 5.674699 | 0.566265 | 0.046709 | 0.084926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006634 | 0.199203 | 753 | 27 | 69 | 27.888889 | 0.774461 | 0.130146 | 0 | 0 | 0 | 0 | 0.045741 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.214286 | 0 | 0.785714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
d95084d6051673a22c55e7fcd3013695f902af6f | 437 | py | Python | settings/plugins_cfg.py | thitta/Someone.tw-Blog | b38669877f269006fcbeb5544ec3054acfef5128 | [
"Apache-2.0"
] | 3 | 2019-05-04T01:30:40.000Z | 2019-10-15T03:21:29.000Z | settings/plugins_cfg.py | thitta/Someone.tw-Blog | b38669877f269006fcbeb5544ec3054acfef5128 | [
"Apache-2.0"
] | 8 | 2020-02-12T00:09:35.000Z | 2022-02-10T08:40:10.000Z | settings/plugins_cfg.py | thitta/Someone.tw-Blog | b38669877f269006fcbeb5544ec3054acfef5128 | [
"Apache-2.0"
] | null | null | null | class Author:
enable = True
name = "Someone"
description = (
"台灣/台北/高雄人。現暫居美國,於伊利諾香檳大學(UIUC)就讀資管碩士。近年內的工作與興趣都是軟體工程。"
)
image_url = "https://storage.googleapis.com/blog-someone-tw-static/post/author.png"
url = "/post/1/about"
class Facebook:
enable = True
app_id = "344199519780600"
class Comment:
enable = True
class GoogleTagManager:
enable = True
app_id = "GTM-T95PGM6"
| 19.863636 | 87 | 0.640732 | 51 | 437 | 5.431373 | 0.705882 | 0.144404 | 0.093863 | 0.108303 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057229 | 0.240275 | 437 | 21 | 88 | 20.809524 | 0.777108 | 0 | 0 | 0.25 | 0 | 0.0625 | 0.384439 | 0.121281 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0.8125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
d950aa076644a7239928f3f04ea00a67aa2acbf0 | 1,043 | py | Python | python/model/webrequest_tor.py | fulanull/public_pentest | eab51332ffe579f5cf31d306d9bd2a13fe01ed0d | [
"MIT"
] | null | null | null | python/model/webrequest_tor.py | fulanull/public_pentest | eab51332ffe579f5cf31d306d9bd2a13fe01ed0d | [
"MIT"
] | null | null | null | python/model/webrequest_tor.py | fulanull/public_pentest | eab51332ffe579f5cf31d306d9bd2a13fe01ed0d | [
"MIT"
] | null | null | null | import requests
import urllib3
adddress = "http://businesscorp.com.br"
def webrequest( ):
site = requests.get(adddress)
site.content
print (site.content)
print ("Status code:", site.status_code)
print ("Headers:", site.headers)
print ("Server:", site.headers['Server'])
site = requests.options("http://businesscorp.com.br")
print(site.headers)
print(site.headers['Allow'])
#https://urllib3.readthedocs.io/en/latest/user-guide.html
def urllibExample():
http = urllib3.PoolManager()
r = http.request('GET', adddress)
print("Response: ", type(r), "Content: ", r )
print("r.data: ", type(r.data), "Content: ", r.data )
print ("r.status(): ", r.status)
print ("r.headers(): ", r.headers)
print ("Server: ", r.headers['Server'])
print ("r.headers(): ", r.headers)
#r.
#print(site.content)
#print("Status code:", site.status_code)
#print("Headers:", site.headers)
#print("Server:", site.headers['Server'])
#urllib3.
#webrequest()
urllibExample() | 24.255814 | 57 | 0.627996 | 125 | 1,043 | 5.224 | 0.28 | 0.101072 | 0.073507 | 0.064319 | 0.358346 | 0.294028 | 0.294028 | 0.294028 | 0.294028 | 0.294028 | 0 | 0.004673 | 0.179291 | 1,043 | 43 | 58 | 24.255814 | 0.758178 | 0.198466 | 0 | 0.086957 | 0 | 0 | 0.218335 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0 | 0.086957 | 0 | 0.173913 | 0.521739 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
d951c0cbb81063ca9c91afc7ee273fca6235590f | 15,453 | py | Python | src/api/comment.py | piwaniuk/critic | 28ed20bb8032d7cc5aa23de98da51e619fd84164 | [
"Apache-2.0"
] | 216 | 2015-01-05T12:48:10.000Z | 2022-03-08T00:12:23.000Z | src/api/comment.py | piwaniuk/critic | 28ed20bb8032d7cc5aa23de98da51e619fd84164 | [
"Apache-2.0"
] | 55 | 2015-02-28T12:10:26.000Z | 2020-11-18T17:45:16.000Z | src/api/comment.py | piwaniuk/critic | 28ed20bb8032d7cc5aa23de98da51e619fd84164 | [
"Apache-2.0"
] | 34 | 2015-05-02T15:15:10.000Z | 2020-06-15T19:20:37.000Z | # -*- mode: python; encoding: utf-8 -*-
#
# Copyright 2017 the Critic contributors, Opera Software ASA
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import api
class CommentError(api.APIError):
pass
class InvalidCommentId(CommentError):
"""Raised when an invalid comment id is used."""
def __init__(self, comment_id):
"""Constructor"""
super(InvalidCommentId, self).__init__(
"Invalid comment id: %d" % comment_id)
self.comment_id = comment_id
class InvalidCommentIds(CommentError):
"""Raised by fetchMany() when invalid comment ids are used."""
def __init__(self, comment_ids):
"""Constructor"""
super(InvalidCommentIds, self).__init__(
"Invalid comment ids: %s" % ", ".join(map(str, comment_ids)))
self.comment_ids = comment_ids
class InvalidLocation(CommentError):
"""Raised when attempting to specify an invalid comment location"""
pass
class Comment(api.APIObject):
TYPE_VALUES = frozenset(["issue", "note"])
@property
def id(self):
"""The comment's unique id"""
return self._impl.id
@property
def type(self):
"""The comment's type
The type is one of "issue" and "note"."""
pass
@property
def is_draft(self):
"""True if the comment is not yet published
Unpublished comments are not displayed to other users."""
return self._impl.is_draft
@property
def review(self):
"""The review to which the comment belongs
The review is returned as an api.review.Review object."""
return self._impl.getReview(self.critic)
@property
def author(self):
"""The comment's author
The author is returned as an api.user.User object."""
return self._impl.getAuthor(self.critic)
@property
def timestamp(self):
"""The comment's timestamp
The return value is a datetime.datetime object."""
return self._impl.timestamp
@property
def location(self):
"""The location of the comment, or None
If the comment was made against lines in a commit message, the return
value is a api.comment.CommitMessageLocation object. If the comment
was made against lines in a file version, the return value is
api.comment.FileVersionLocation object. Otherwise, the return value
is None."""
return self._impl.getLocation(self.critic)
@property
def text(self):
"""The comment's text"""
return self._impl.text
@property
def replies(self):
"""The replies to the comment
The replies are returned as a list of api.reply.Reply objects."""
return self._impl.getReplies(self.critic)
class DraftChanges(object):
"""Draft changes to the comment"""
def __init__(self, author, is_draft, reply, new_type):
self.__author = author
self.__is_draft = is_draft
self.__reply = reply
self.__new_type = new_type
@property
def author(self):
"""The author of these draft changes
The author is returned as an api.user.User object."""
return self.__author
@property
def is_draft(self):
"""True if the comment itself is a draft (not published)"""
return self.__is_draft
@property
def reply(self):
"""The current unpublished reply
The reply is returned as an api.reply.Reply object, or None if
there is no current unpublished reply."""
return self.__reply
@property
def new_type(self):
"""The new type of an unpublished type change
The type is returned as a string. Comment.TYPE_VALUES defines the
set of possible return values."""
return self.__new_type
@property
def draft_changes(self):
"""The comment's current draft changes
The draft changes are returned as a Comment.DraftChanges object, or
None if the current user has no unpublished changes to this comment.
If the comment is currently an issue, or the current user has an
unpublished change of the comment's type to issue, the returned
object will be an Issue.DraftChanges instead."""
return self._impl.getDraftChanges(self.critic)
class Issue(Comment):
STATE_VALUES = frozenset(["open", "addressed", "resolved"])
@property
def type(self):
return "issue"
@property
def state(self):
"""The issue's state
The state is one of the strings "open", "addressed" or "resolved"."""
return self._impl.state
@property
def addressed_by(self):
"""The commit that addressed the issue, or None
The value is an api.commit.Commit object, or None if the issue's
state is not "addressed"."""
return self._impl.getAddressedBy(self.critic)
@property
def resolved_by(self):
"""The user that resolved the issue, or None
The value is an api.user.User object, or None if the issue's state is
not "resolved"."""
return self._impl.getResolvedBy(self.critic)
class DraftChanges(Comment.DraftChanges):
"""Draft changes to the issue"""
def __init__(self, author, is_draft, reply, new_type, new_state,
new_location):
super(Issue.DraftChanges, self).__init__(
author, is_draft, reply, new_type)
self.__new_state = new_state
self.__new_location = new_location
@property
def new_state(self):
"""The issue's new state
The new state is returned as a string, or None if the current
user has not resolved or reopened the issue. Issue.STATE_VALUES
defines the set of possible return values."""
return self.__new_state
@property
def new_location(self):
"""The issue's new location
The new location is returned as a FileVersionLocation objects, or
None if the issue has not been reopened, or if it was manually
resolved rather than addressed and did not need to be relocated
when being reopened.
Since only issues in file version locations can be addressed,
that is the only possible type of new location."""
return self.__new_location
class Note(Comment):
@property
def type(self):
return "note"
class Location(api.APIObject):
TYPE_VALUES = frozenset(["general", "commit-message", "file-version"])
def __len__(self):
"""Return the the length of the location, in lines"""
return (self.last_line - self.first_line) + 1
@property
def type(self):
"""The location's type
The type is one of "commit-message" and "file-version"."""
pass
@property
def first_line(self):
"""The line number of the first commented line
Note that line numbers are one-based."""
return self._impl.first_line
@property
def last_line(self):
"""The line number of the last commented line
Note that line numbers are one-based."""
return self._impl.last_line
class CommitMessageLocation(Location):
@property
def type(self):
return "commit-message"
@property
def commit(self):
"""The commit whose message was commented"""
return self._impl.getCommit(self.critic)
@staticmethod
def make(critic, first_line, last_line, commit):
return api.impl.comment.makeCommitMessageLocation(
critic, first_line, last_line, commit)
class FileVersionLocation(Location):
@property
def type(self):
return "file-version"
@property
def changeset(self):
"""The changeset containing the comment
The changeset is returned as an api.changeset.Changeset object.
If the comment was created while looking at a diff, this will
initially be that changeset. As additional commits are added to the
review, this changeset may be "extended" to contain those added
commits.
This is the ideal changeset to use to display the comment, unless it
is an issue that has been addressed, in which case a better changeset
would be the diff of the commit returned by Issue.addressed_by.
If the user did not make the comment while looking at a diff but
rather while looking at a single version of the file, then this
attribute returns None.
If this is an object returned by translateTo() called with a
changeset argument, then this will be that changeset."""
return self._impl.getChangeset(self.critic)
@property
def side(self):
"""The commented side ("old" or "new") of the changeset
If the user did not make the comment while looking at a changeset
(i.e. a diff) but rather while looking at a single version of the
file, then this attribute returns None."""
return self._impl.side
@property
def commit(self):
"""The commit whose version of the file this location references
The commit is returned as an api.commit.Commit object.
If this is an object returned by translateTo() called with a commit
argument, then this is the commit that was given as an argument to
it. If this is the primary location of the comment (returned from
Comment.location) then this is the commit whose version of the file
the comment was originally made against, or None if the comment was
made while looking at a diff."""
return self._impl.getCommit(self.critic)
@property
def file(self):
"""The commented file"""
return self._impl.getFile(self.critic)
@property
def is_translated(self):
"""True if this is a location returned by |translateTo()|"""
return self._impl.is_translated
def translateTo(self, changeset=None, commit=None):
"""Return a translated file version location, or None
The location is translated to the version of the file in a certain
commit. If |changeset| is not None, that commit is the changeset's
|to_commit|, unless the comment is not present there, and otherwise
the changeset's |from_commit|. If |commit| is not None, that's the
commit.
If the comment is not present in the commit, None is returned.
The returned object's |is_translated| will be True.
If the |changeset| argument is not None, then the returned object's
|changeset| will be that changeset, and its |side| will reflect which
of its |from_commit| and |to_commit| ended up being used. The
returned object's |commit| will be None.
If the |commit| argument is not None, the returned object's |commit|
will be that commit, and its |changeset| and |side| will be None."""
assert changeset is None \
or isinstance(changeset, api.changeset.Changeset)
assert commit is None or isinstance(commit, api.commit.Commit)
assert (changeset is None) != (commit is None)
return self._impl.translateTo(self.critic, changeset, commit)
@staticmethod
def make(critic, first_line, last_line, file, changeset=None, side=None,
commit=None):
# File is required.
assert isinstance(file, api.file.File)
# Changeset and side go together.
assert (changeset is None) == (side is None)
assert (changeset is None) \
or isinstance(changeset, api.changeset.Changeset)
# Commit conflicts with changeset, but one is required.
assert (commit is None) != (changeset is None)
assert (commit is None) or isinstance(commit, api.commit.Commit)
return api.impl.comment.makeFileVersionLocation(
critic, first_line, last_line, file, changeset, side, commit)
def fetch(critic, comment_id):
"""Fetch the Comment object with the given id"""
import api.impl
assert isinstance(critic, api.critic.Critic)
assert isinstance(comment_id, int)
return api.impl.comment.fetch(critic, comment_id)
def fetchMany(critic, comment_ids):
"""Fetch multiple Comment objects with the given ids"""
import api.impl
assert isinstance(critic, api.critic.Critic)
comment_ids = list(comment_ids)
assert all(isinstance(comment_id, int) for comment_id in comment_ids)
return api.impl.comment.fetchMany(critic, comment_ids)
def fetchAll(critic, review=None, author=None, comment_type=None, state=None,
location_type=None, changeset=None, commit=None):
"""Fetch all Comment objects
If |review| is not None, only comments in the specified review are
returned.
If |author| is not None, only comments created by the specified user are
returned.
If |comment_type| is not None, only comments of the specified type are
returned.
If |state| is not None, only issues in the specified state are returned.
This implies type="issue".
If |location_type| is not None, only issues in the specified type of
location are returned.
If |changeset| is not None, only comments against file versions that are
referenced by the specified changeset are returned. Must be combined with
|review|, and can not be combined with |commit|.
If |commit| is not None, only comments against the commit's message or
file versions referenced by the commit are returned. Must be combined
with |review|, and can not be combined with |changeset|."""
import api.impl
assert isinstance(critic, api.critic.Critic)
assert review is None or isinstance(review, api.review.Review)
assert author is None or isinstance(author, api.user.User)
assert comment_type is None or comment_type in Comment.TYPE_VALUES
assert state is None or state in Issue.STATE_VALUES
assert state is None or comment_type in (None, "issue")
assert location_type is None or location_type in Location.TYPE_VALUES
assert changeset is None or isinstance(changeset, api.changeset.Changeset)
assert changeset is None or review is not None
assert commit is None or isinstance(commit, api.commit.Commit)
assert commit is None or review is not None
assert changeset is None or commit is None
return api.impl.comment.fetchAll(critic, review, author, comment_type,
state, location_type, changeset, commit)
| 36.020979 | 80 | 0.649583 | 2,043 | 15,453 | 4.832599 | 0.132159 | 0.035653 | 0.029778 | 0.014585 | 0.353489 | 0.267902 | 0.231338 | 0.187886 | 0.167831 | 0.135015 | 0 | 0.000894 | 0.276386 | 15,453 | 428 | 81 | 36.10514 | 0.882043 | 0.46237 | 0 | 0.336957 | 0 | 0 | 0.021103 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 1 | 0.233696 | false | 0.021739 | 0.021739 | 0.027174 | 0.538043 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d95b1747164effd126c842c9873588e37601ded6 | 4,401 | py | Python | tools/misc/FixTestCasePlacement.py | v-weiguo/test262 | a66c978c5f1faafebf90d1bb13774b7f0643e2c9 | [
"BSD-3-Clause"
] | null | null | null | tools/misc/FixTestCasePlacement.py | v-weiguo/test262 | a66c978c5f1faafebf90d1bb13774b7f0643e2c9 | [
"BSD-3-Clause"
] | 1 | 2020-07-28T04:46:04.000Z | 2020-07-28T04:46:04.000Z | exhibitor/node_modules/6to5/vendor/test262/tools/misc/FixTestCasePlacement.py | scharissis/liberator | a5a65ec3254b638078470b72aadef928c09e8fdc | [
"MIT"
] | null | null | null | # Copyright (c) 2012 Ecma International. All rights reserved.
# Ecma International makes this code available under the terms and conditions set
# forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
# "Use Terms"). Any redistribution of this code must retain the above
# copyright and this notice and otherwise comply with the Use Terms.
#--Imports---------------------------------------------------------------------
import argparse
import os
import sys
import re
#--Globals---------------------------------------------------------------------
PRE_PATH = "TestCases/"
#------------------------------------------------------------------------------
def getAllJSFiles(dirName):
'''
Returns all JS files under dirName
'''
retVal = []
if os.path.isfile(dirName) and dirName.endswith(".js"):
retVal = [dirName]
elif os.path.isdir(dirName):
tempList = [os.path.join(dirName, x) for x in os.listdir(dirName)]
for x in tempList:
retVal += getAllJSFiles(x)
#else:
# raise Exception("getAllJSFiles: encountered a non-file/non-dir:" + dirName)
return retVal
#------------------------------------------------------------------------------
def handleFile(filePath, partialPath):
global PRE_PATH
tempPath = filePath.replace(partialPath + os.path.sep, "", 1)
tempPath = tempPath.replace(os.path.sep, "/")
tempId = tempPath.rsplit("/", 1)[1][:-3]
with open(filePath, "r") as f:
origLines = f.readlines()
with open(filePath, "w") as f:
pathHit = False
idHit = False
testHit = False
descriptHit = False
for line in origLines:
if (not testHit) and re.match("^$", line)!=None:
#Throw away empty lines until we hit the first test function
continue
elif (not testHit) and re.search("test\s*:\s*function\s+testcase\(\)", line)!=None:
testHit = True
line = line.rstrip() + os.linesep
elif (not pathHit) and re.search("path\s*:\s*\"", line)!=None:
pathHit = True
line = "path: \"%s\",%s" % (PRE_PATH + tempPath, os.linesep)
elif (not idHit) and re.search("id\s*:\s*\"", line)!=None:
idHit = True
line = "id: \"%s\",%s" % (tempId, os.linesep)
elif (not descriptHit) and re.search("description\s*:\s*\"", line)!=None:
descriptHit = True
line = line.strip() + os.linesep
f.write(line)
def getPartialPath(tc):
tc = os.path.splitext(os.path.basename(tc))[0]
if not ("-" in tc):
print "'-' not detected in '%s'; cannot continue!" % tc
sys.exit(1)
elif not ("." in tc):
tc = tc.replace("-", ".0-", 1)
#Generate the partial path of the test case
tempList = tc.split("-",1)[0].split(".")
partialPath = ""
for i in xrange(1, len(tempList)+1):
partialPath += ".".join(tempList[0:i]) + os.path.sep
partialPath = os.path.join(partialPath, tc + ".js")
if partialPath.index(os.path.sep)==1:
partialPath = "chapter0" + partialPath
elif partialPath.index(os.path.sep)==2:
partialPath = "chapter" + partialPath
return partialPath
#--Main------------------------------------------------------------------------
if __name__=="__main__":
__parser = argparse.ArgumentParser(description='Tool used to fix the id and path properties of test case objects')
__parser.add_argument('path', action='store',
help='Full path to test cases. E.g., C:\repos\test262-msft\test\suite\ietestcenter')
__parser.add_argument('add', action='store',
help='Command used to add a test file to source control')
__parser.add_argument('del', action='store',
help='Command used to remove a test file from source control')
__parser.add_argument('tc', action='store',
help='test case to move')
ARGS = __parser.parse_args()
if not os.path.exists(ARGS.path):
print "Cannot fix tests in '%s' when it doesn't exist!" % ARGS.path
sys.exit(1)
elif not os.path.isfile(ARGS.tc):
print "Cannot move '%s' when it doesn't exist!" % ARGS.tc
partialPath = getPartialPath(ARGS.tc)
print "Done!", partialPath
| 39.648649 | 118 | 0.547148 | 519 | 4,401 | 4.585742 | 0.354528 | 0.032773 | 0.018908 | 0.020168 | 0.10084 | 0.042017 | 0.018487 | 0 | 0 | 0 | 0 | 0.00812 | 0.24449 | 4,401 | 110 | 119 | 40.009091 | 0.707669 | 0.210407 | 0 | 0.025974 | 0 | 0.025974 | 0.167742 | 0.023167 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.051948 | null | null | 0.051948 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d95d28ed8153e7a22c49ae9c8faab051f1c3f07a | 2,308 | py | Python | lib/_types.py | GaLaXy102/Vacationing | e476f1047deeca8f68897a497716319afab3e7f0 | [
"MIT"
] | null | null | null | lib/_types.py | GaLaXy102/Vacationing | e476f1047deeca8f68897a497716319afab3e7f0 | [
"MIT"
] | null | null | null | lib/_types.py | GaLaXy102/Vacationing | e476f1047deeca8f68897a497716319afab3e7f0 | [
"MIT"
] | null | null | null | from enum import Enum, IntEnum
from dataclasses import dataclass
from typing import List, Dict, Tuple, Set
class Importance(IntEnum):
HIGH = 5
MHI = 4
MID = 3
MLO = 2
LOW = 1
NONE = 0
class Equipment(Enum):
TREKKING_SHOES = "Trekking Shoes"
CAMERA = "Camera"
WATER_SHOES = "Water-proof Shoes"
SWIMMING = "Swimming Gear"
def __str__(self):
return "+ {}".format(self.value)
@dataclass
class ItineraryStep:
duration: float
price: float
equipment: Set[Equipment]
@dataclass
class Attraction(ItineraryStep):
name: str
importance: Importance
def __init__(self, name: str, duration: float, equipment: Set[Equipment], importance: Importance,
price: float = None):
super(Attraction, self).__init__(duration, price, equipment)
self.name = name
self.importance = importance
def __hash__(self) -> int:
return hash(self.name)
def __eq__(self, o) -> bool:
try:
return self.name == o.name
except AttributeError:
return False
def __str__(self):
return "> A {}".format(self.name)
@dataclass
class Ride(ItineraryStep):
def __init__(self, duration: float):
super(Ride, self).__init__(duration, None, {})
def __str__(self):
return "> R {:.2f} h".format(self.duration)
@dataclass
class Dataset:
attractions: frozenset[Attraction]
distances: Dict[Tuple[str, str], float]
base: Attraction
@dataclass
class Region:
dataset: Dataset
clusters: Set[frozenset[Attraction]] # By now, clusters are selected by hand
@dataclass
class Itinerary:
steps: List[ItineraryStep]
def __iter__(self):
return iter(self.steps)
def __str__(self):
equipment = set()
for step in self:
for eq in step.equipment:
equipment.add(eq)
return ("Steps:\n"
"{}\n"
"\n"
"Duration : {:5.2f} h\n"
"Cost : {:5.2f} €\n"
"Equipment:\n"
"{}").format(
"\n".join(str(x) for x in self),
sum(x.duration for x in self),
sum(x.price or 0 for x in self),
"\n".join(str(x) for x in equipment)
)
| 22.407767 | 101 | 0.579289 | 267 | 2,308 | 4.838951 | 0.325843 | 0.065015 | 0.03096 | 0.037152 | 0.040248 | 0.040248 | 0.02322 | 0 | 0 | 0 | 0 | 0.007514 | 0.308059 | 2,308 | 102 | 102 | 22.627451 | 0.800877 | 0.016031 | 0 | 0.12987 | 0 | 0 | 0.065227 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.116883 | false | 0 | 0.090909 | 0.064935 | 0.688312 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
d9618cffcbe6a16a37c4558bb3fc6f6916594eba | 7,713 | py | Python | wikipedia_workload/de/uni-stuttgart/iaas/experiments_results/summaryFilePlotAnalysis.py | sgomezsaez/SCARF-Evaluation | a118039ddd62798ca93b78cb968d6ee8b15ec6f2 | [
"Apache-2.0"
] | null | null | null | wikipedia_workload/de/uni-stuttgart/iaas/experiments_results/summaryFilePlotAnalysis.py | sgomezsaez/SCARF-Evaluation | a118039ddd62798ca93b78cb968d6ee8b15ec6f2 | [
"Apache-2.0"
] | null | null | null | wikipedia_workload/de/uni-stuttgart/iaas/experiments_results/summaryFilePlotAnalysis.py | sgomezsaez/SCARF-Evaluation | a118039ddd62798ca93b78cb968d6ee8b15ec6f2 | [
"Apache-2.0"
] | null | null | null | import pandas as pd
import utils as ut
import constants as cs
import matplotlib.pyplot as plt
import numpy as np
import datetime
import calendar
import time
import math
# Create list of files to analyze
def createFileList(results_path, scenario_list, scenario_file):
fileList = []
for i in scenario_list:
fileList.append(results_path + i[0] + '/' + scenario_file)
return fileList
def line_plot_summary_hourly_analysis(file_list, scenario_list, plot_parameter, plot_output_file, plot_title):
scenario_count = 0
scenario_colors = ['lightblue', 'lightgreen', 'lightgrey', 'cyan', 'magenta', 'darkgrey', 'khaki', 'limegreen']
# Creating Plot
fig1 = plt.figure(figsize=(7, 6))
plt.suptitle(plot_title, fontsize=20)
ax1 = plt.subplot(111)
for i in file_list:
scenario = scenario_list[scenario_count][0]
df = pd.read_csv(i, delimiter=',')
x_axis_data = range(1, len(df) + 1)
y_axis_data = df[plot_parameter].tolist()
print y_axis_data
l1 = ax1.plot(x_axis_data, y_axis_data, color=scenario_colors[scenario_count], label=scenario, linewidth=1.5)
scenario_count += 1
ax1.grid(True)
handles, labels = ax1.get_legend_handles_labels()
plt.legend(ut.flip(handles, 2), ut.flip(labels, 2), bbox_to_anchor=(0.5, 1.12), loc=9, ncol=4, prop={'size':15})
ax1.set_xlabel('Hourly Interval', fontsize=20)
ax1.set_ylabel(plot_parameter, fontsize=20)
fig1.savefig(plot_output_file, format='pdf')
print "Saving to %s" %(plot_output_file)
#plt.show()
def bar_plot_average_hourly_analysis(file_list, scenario_list, plot_parameter, plot_output_file, plot_title):
scenario_patterns = [ "/" , "\\" , "|" , "-" , "+" , "x", "o", "O", ".", "*" ]
scenario_count = 0
# Creating Plot
fig1 = plt.figure(figsize=(7, 6))
plt.suptitle(plot_title, fontsize=20)
ax1 = fig1.add_subplot(111)
# Create X Axis
x_axis_list = []
for i in scenario_list:
x_axis_list.append(i[0])
x_axis_list = x_axis_list
y_axis_list = []
for i in file_list:
scenario = scenario_list[scenario_count][0]
df = pd.read_csv(i, delimiter=',')
if (plot_parameter == cs.HOUR_SUMMARY_SUM_REQS_SUCCESS or plot_parameter == cs.HOUR_SUMMARY_AVG_LATENCY):
y_axis_data = df[plot_parameter].describe().iloc[1]
if plot_parameter == cs.HOUR_SUMMARY_AVG_LATENCY:
y_axis_data = y_axis_data / 1000
if (plot_parameter == cs.HOUR_SUMMARY_SUM_BYTES_TRANSFERRED or plot_parameter == cs.HOUR_SUMMARY_TIMESTAMP_DURATION or plot_parameter == cs.HOUR_SUMMARY_SUM_RESP_500):
y_axis_data = df[plot_parameter].sum()
if (plot_parameter == cs.HOUR_SUMMARY_TIMESTAMP_DURATION):
print df[plot_parameter].tolist()
y_axis_data = df[plot_parameter].sum() / 3600
if (plot_parameter == cs.HOUR_SUMMARY_ERROR_RATE):
y_axis_data = (1 - (float(df[cs.HOUR_SUMMARY_SUM_REQS_SUCCESS].sum()) / float(df[cs.HOUR_SUMMARY_SUM_ALL_REQS].sum())))
y_axis_list.append(y_axis_data)
# Calculating Percentages
if (plot_parameter == cs.HOUR_SUMMARY_ERROR_RATE):
min_error_rate = min(y_axis_list)
percent_diff_t4 = (1 - (min_error_rate / y_axis_list[3])) * 100
percent_diff_t5 = (1 - (min_error_rate / y_axis_list[4])) * 100
print min_error_rate
print y_axis_list
print percent_diff_t4
print percent_diff_t5
if (plot_parameter == cs.HOUR_SUMMARY_AVG_LATENCY):
print y_axis_list
# Average when distributing the stack in two VMs and one VM
# avg T1,T3,T6
avg_t16 = (y_axis_list[0] + y_axis_list[5]) / 2
# avg T2,T7
avg_t27 = (y_axis_list[1] + y_axis_list[6]) / 2
print "Analysis between full stack vm and separate vms"
print 1 - (avg_t27 / avg_t16)
# Analysis between separate vms and database as a service
print "Analysis separate vms and database as a service"
avg_t1 = y_axis_list[0]
avg_t3 = y_axis_list[2]
print 1 - (avg_t1 / avg_t3)
# Analysis between container (azure) and separate vms and DBaaS
print "Analysis between container (azure) and separate vms and DBaaS"
avg_t8 = y_axis_list[7]
avg_t6 = y_axis_list[5]
print 1 - (avg_t6 / avg_t8)
print "Provider Comparison"
print "VMs distributed"
avg_t1 = y_axis_list[0]
avg_t6 = y_axis_list[5]
print 1 - (avg_t1 / avg_t6)
print "VMs full stack"
avg_t2 = y_axis_list[1]
avg_t7 = y_axis_list[6]
print 1 - (avg_t2 / avg_t7)
if (plot_parameter == cs.HOUR_SUMMARY_SUM_BYTES_TRANSFERRED):
print y_axis_list
gb_list = [i / math.pow(2,30) for i in y_axis_list]
print gb_list
x_axis_num = np.arange(1, len(x_axis_list) + 1)
ax1.bar(x_axis_num, y_axis_list, align='center', color='grey', edgecolor='black', alpha=0.85)
ax1.set_xticks(x_axis_num)
ax1.set_xticklabels(x_axis_list, fontsize=20)
if (plot_parameter == cs.HOUR_SUMMARY_TIMESTAMP_DURATION):
ax1.set_ylabel('Experiment Duration (h)', fontsize=20)
elif (plot_parameter == cs.HOUR_SUMMARY_AVG_LATENCY):
ax1.set_ylabel('Average Latency (s)', fontsize=20)
elif (plot_parameter == cs.HOUR_SUMMARY_ERROR_RATE):
ax1.set_ylabel('Average Error Rate', fontsize=20)
elif (plot_parameter == cs.HOUR_SUMMARY_SUM_BYTES_TRANSFERRED):
ax1.set_ylabel('Total Bytes Transferred', fontsize=20)
else:
ax1.set_ylabel(plot_parameter, fontsize=20)
#ax1.set_xlabel('$T^{\mu}_{i}$', fontsize=15)
ax1.grid(True)
#fig1.savefig(plot_output_file, format='pdf')
#print "Saving to %s" %(plot_output_file)
#plt.show()
file_list = createFileList(cs.EXP_RESULTS_DATA_PATH, cs.EXP_RESULTS_DATA_SCENARIOS, cs.SUMMARY_HOURLY_RESULTS_OUTPUT_FILE_NAME + '.csv')
print file_list
#line_plot_summary_hourly_analysis(file_list, cs.EXP_RESULTS_DATA_SCENARIOS_LABELS, cs.HOUR_SUMMARY_SUM_REQS_SUCCESS,
# cs.PLOT_RESULTS_DATA_PATH + cs.PLOT_RESULTS_REQ_SUCCESS_FILE, '')
#bar_plot_average_hourly_analysis(file_list, cs.EXP_RESULTS_DATA_SCENARIOS_LABELS, cs.HOUR_SUMMARY_SUM_REQS_SUCCESS,
# cs.PLOT_RESULTS_DATA_PATH + cs.PLOT_RESULTS_REQ_SUCCESS_FILE, cs.PLOT_RESULTS_TITLE_REQ_SUCCESS)
bar_plot_average_hourly_analysis(file_list, cs.EXP_RESULTS_DATA_SCENARIOS_LABELS, cs.HOUR_SUMMARY_AVG_LATENCY,
cs.PLOT_RESULTS_DATA_PATH + cs.PLOT_RESULTS_LATENCY_FILE, cs.PLOT_RESULTS_TITLE_LATENCY)
bar_plot_average_hourly_analysis(file_list, cs.EXP_RESULTS_DATA_SCENARIOS_LABELS, cs.HOUR_SUMMARY_TIMESTAMP_DURATION,
cs.PLOT_RESULTS_DATA_PATH + cs.PLOT_RESULTS_EXP_DURATION_FILE, cs.PLOT_RESULTS_TITLE_DURATION)
bar_plot_average_hourly_analysis(file_list, cs.EXP_RESULTS_DATA_SCENARIOS_LABELS, cs.HOUR_SUMMARY_SUM_BYTES_TRANSFERRED,
cs.PLOT_RESULTS_DATA_PATH + cs.PLOT_RESULTS_BYTES_TRANSFERRED_FILE, cs.PLOT_RESULTS_TITLE_BYTES_TRANSFERRED)
#bar_plot_average_hourly_analysis(file_list, cs.EXP_RESULTS_DATA_SCENARIOS_LABELS, cs.HOUR_SUMMARY_SUM_RESP_500,
# cs.PLOT_RESULTS_DATA_PATH + cs.PLOT_RESULTS_SERVER_FAILURE_FILE, cs.PLOT_RESULTS_TITLE_REQ_SERVER_FAILURE)
bar_plot_average_hourly_analysis(file_list, cs.EXP_RESULTS_DATA_SCENARIOS_LABELS, cs.HOUR_SUMMARY_ERROR_RATE,
cs.PLOT_RESULTS_DATA_PATH + cs.PLOT_RESULTS_TRANSACTION_ERROR_RATE_FILE, cs.PLOT_RESULTS_TITLE_TRANSACTION_ERROR_RATE)
| 38.758794 | 175 | 0.696227 | 1,129 | 7,713 | 4.360496 | 0.181577 | 0.032501 | 0.063376 | 0.057892 | 0.600244 | 0.550274 | 0.505992 | 0.424335 | 0.32013 | 0.270567 | 0 | 0.026651 | 0.207053 | 7,713 | 198 | 176 | 38.954545 | 0.778286 | 0.147802 | 0 | 0.239669 | 0 | 0 | 0.063196 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.07438 | null | null | 0.181818 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d962c2acb32dee12eda10a25944b61b813dcd7d7 | 411 | py | Python | market/migrations/0005_fiat_details_account_name.py | vuyelwadr/crypto_marketplace | cec2e61d5f7b409801b9f5751b75c12df0def750 | [
"MIT"
] | null | null | null | market/migrations/0005_fiat_details_account_name.py | vuyelwadr/crypto_marketplace | cec2e61d5f7b409801b9f5751b75c12df0def750 | [
"MIT"
] | null | null | null | market/migrations/0005_fiat_details_account_name.py | vuyelwadr/crypto_marketplace | cec2e61d5f7b409801b9f5751b75c12df0def750 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.7 on 2021-10-30 10:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('market', '0004_auto_20211029_1856'),
]
operations = [
migrations.AddField(
model_name='fiat_details',
name='account_name',
field=models.CharField(default='None', max_length=50),
),
]
| 21.631579 | 66 | 0.610706 | 46 | 411 | 5.304348 | 0.826087 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.110368 | 0.272506 | 411 | 18 | 67 | 22.833333 | 0.705686 | 0.109489 | 0 | 0 | 1 | 0 | 0.156593 | 0.063187 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d965de8ac0f83fe2951978c50cee1e3f5ee3ffb3 | 391 | py | Python | covid_19_review/format_data.py | jutzca/Corona-Virus-Meta-Analysis-2020 | e8a44f354b0aa81bf42c12dfc95e4cd50fa21b29 | [
"BSD-3-Clause"
] | 1 | 2021-06-22T20:19:52.000Z | 2021-06-22T20:19:52.000Z | covid_19_review/format_data.py | jutzca/Corona-Virus-Meta-Analysis-2020 | e8a44f354b0aa81bf42c12dfc95e4cd50fa21b29 | [
"BSD-3-Clause"
] | null | null | null | covid_19_review/format_data.py | jutzca/Corona-Virus-Meta-Analysis-2020 | e8a44f354b0aa81bf42c12dfc95e4cd50fa21b29 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import pandas as pd
df = pd.read_csv('../data/Corona_review_labor.csv')
for col1 in df.columns:
if col1.endswith('_Mean'):
col2 = col1.replace('_Mean', '') + '_SD'
means = df[col1]
stds = df[col2]
df[col1] = [f'{mean} (+- {std})' for mean, std in zip(means, stds)]
df.to_csv('../data/Corona_review_labor_edited.csv', index=False)
| 23 | 75 | 0.611253 | 60 | 391 | 3.816667 | 0.533333 | 0.061135 | 0.113537 | 0.165939 | 0.209607 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022801 | 0.214834 | 391 | 16 | 76 | 24.4375 | 0.723127 | 0 | 0 | 0 | 0 | 0 | 0.253197 | 0.176471 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d965e94766b0c827ffb3e210cf77299cf5db72e5 | 239 | py | Python | sims/__init__.py | jsrehak/sims | b4aa956dcc1996334ba914763e97ebd346b5aefd | [
"BSD-3-Clause"
] | null | null | null | sims/__init__.py | jsrehak/sims | b4aa956dcc1996334ba914763e97ebd346b5aefd | [
"BSD-3-Clause"
] | null | null | null | sims/__init__.py | jsrehak/sims | b4aa956dcc1996334ba914763e97ebd346b5aefd | [
"BSD-3-Clause"
] | null | null | null | from sims.sims import *
__version__ = '1.0.0'
__name__ = 'sims'
__author__ = 'Zan Peeters'
__url__ = 'https://github.com/zanpeeters/sims'
__license__ = 'BSD 3-Clause Clear'
__copyright__ = '(c) 2018 Zan Peeters'
__description__ = __doc__
| 23.9 | 46 | 0.732218 | 31 | 239 | 4.612903 | 0.806452 | 0.13986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038647 | 0.133891 | 239 | 9 | 47 | 26.555556 | 0.652174 | 0 | 0 | 0 | 0 | 0 | 0.384937 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d9698cb3a0ac7d01d79e605cabcecf9b02767f8b | 1,192 | py | Python | BookExample/Chapter 08/python-shell-code_08.py | godong9/Spark-Study | a3faf6160164b4040f1a0db69d737757d56de4cd | [
"MIT"
] | 55 | 2016-11-08T11:18:35.000Z | 2022-02-19T20:45:56.000Z | BookExample/Chapter 08/python-shell-code_08.py | godong9/Spark-Study | a3faf6160164b4040f1a0db69d737757d56de4cd | [
"MIT"
] | null | null | null | BookExample/Chapter 08/python-shell-code_08.py | godong9/Spark-Study | a3faf6160164b4040f1a0db69d737757d56de4cd | [
"MIT"
] | 66 | 2016-10-07T20:43:08.000Z | 2022-03-08T07:48:58.000Z | '''
This code is intended to be run in the IPython shell.
You can enter each line in the shell and see the result immediately.
The expected output in the Python console is presented as commented lines following the
relevant code.
'''
%pylab inline
# Populating the interactive namespace from numpy and matplotlib
path = "/PATH/lfw/Aaron_Eckhart/Aaron_Eckhart_0001.jpg"
ae = imread(path)
imshow(ae)
tmpPath = "/tmp/aeGray.jpg"
aeGary = imread(tmpPath)
imshow(aeGary, cmap=plt.cm.gray)
pc = np.loadtxt("/tmp/pc.csv", delimiter=",")
print(pc.shape)
# (2500, 10)
def plot_gallery(images, h, w, n_row=2, n_col=5):
"""Helper function to plot a gallery of portraits"""
plt.figure(figsize=(1.8 * n_col, 2.4 * n_row))
plt.subplots_adjust(bottom=0, left=.01, right=.99, top=.90, hspace=.35)
for i in range(n_row * n_col):
plt.subplot(n_row, n_col, i + 1)
plt.imshow(images[:, i].reshape((h, w)), cmap=plt.cm.gray)
plt.title("Eigenface %d" % (i + 1), size=12)
plt.xticks(())
plt.yticks(())
plot_gallery(pc, 50, 50)
s = np.loadtxt("/tmp/s.csv", delimiter=",")
print(s.shape)
plot(s)
# (300,)
plot(cumsum(s))
plt.yscale('log') | 29.8 | 91 | 0.661913 | 196 | 1,192 | 3.954082 | 0.586735 | 0.020645 | 0.023226 | 0.033548 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036885 | 0.181208 | 1,192 | 40 | 92 | 29.8 | 0.757172 | 0.067114 | 0 | 0 | 0 | 0 | 0.122677 | 0.057001 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d973628ab6ee40c22b58223cd9fd55fa2a4f5ffd | 974 | py | Python | GmailWrapper_JE/venv/Lib/site-packages/requests_oauthlib/compliance_fixes/instagram.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | GmailWrapper_JE/venv/Lib/site-packages/requests_oauthlib/compliance_fixes/instagram.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | GmailWrapper_JE/venv/Lib/site-packages/requests_oauthlib/compliance_fixes/instagram.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | try:
from urlparse import urlparse, parse_qs
except ImportError:
from urllib.parse import urlparse, parse_qs
from oauthlib.common import add_params_to_uri
def instagram_compliance_fix(session):
def _non_compliant_param_name(url, headers, data):
# If the user has already specified the token in the URL
# then there's nothing to do.
# If the specified token is different from ``session.access_token``,
# we assume the user intends to override the access token.
url_query = dict(parse_qs(urlparse(url).query))
token = url_query.get("access_token")
if token:
# Nothing to do, just return.
return url, headers, data
token = [("access_token", session.access_token)]
url = add_params_to_uri(url, token)
return url, headers, data
session.register_compliance_hook("protected_request", _non_compliant_param_name)
return session
| 36.074074 | 85 | 0.673511 | 128 | 974 | 4.90625 | 0.4375 | 0.08758 | 0.066879 | 0.066879 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.256674 | 974 | 26 | 86 | 37.461538 | 0.867403 | 0.240246 | 0 | 0.125 | 0 | 0 | 0.05791 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.25 | 0 | 0.5625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
d98398329f8f48b015878503c75be7e53518e52e | 1,390 | py | Python | Python/sha256_file_readfile_chunk.py | Suraj-Rajesh/code | 3d554c4d1d5cf4bd9d084b8034641c1f6c2a47c9 | [
"MIT"
] | null | null | null | Python/sha256_file_readfile_chunk.py | Suraj-Rajesh/code | 3d554c4d1d5cf4bd9d084b8034641c1f6c2a47c9 | [
"MIT"
] | null | null | null | Python/sha256_file_readfile_chunk.py | Suraj-Rajesh/code | 3d554c4d1d5cf4bd9d084b8034641c1f6c2a47c9 | [
"MIT"
] | null | null | null | #
# Pythonic way of iterating over a large file, by reading it chunk by chunk
#
from hashlib import sha256
from functools import partial
#
# KEY POINT
#
# To call 'iter' over a function, the function needs to take no args.
# But, in our case, we need to iter over the function, f.read(block_size),
# which takes one argument. What do we do?
#
# SOLUTION 1
#
# Use lambda function, and call f.read(block_size) inside the body
# of the lambda function, so,
#
# lambda : f.read(block_size)
#
# ^
# / \
# |||
# |||
#
# def anonymous_lambda_function():
# return f.read(block_size)
#
# Now, we can iterate over it. Cool !!
def sha256_file(filename, block_size = 64*1024):
digest = sha256()
with open(filename, 'rb') as f:
for chunk in iter(lambda : f.read(block_size), b''):
digest.update(chunk)
print digest.hexdigest()
sha256_file('dummy.txt')
#
# Solution 2:
#
# To convert a function with more args to none or lesser arguments, we can
# use 'partial'.
#
# f.read(block_size) <==> partial(f.read, block_size)
#
def new_sha256_file(filename, block_size=64*1024):
digest = sha256()
with open(filename, 'rb') as f:
for chunk in iter(partial(f.read, block_size), b''):
digest.update(chunk)
print digest.hexdigest()
new_sha256_file('dummy.txt')
| 21.060606 | 75 | 0.631655 | 201 | 1,390 | 4.278607 | 0.412935 | 0.104651 | 0.093023 | 0.130233 | 0.401163 | 0.306977 | 0.306977 | 0.306977 | 0.306977 | 0.306977 | 0 | 0.033557 | 0.24964 | 1,390 | 65 | 76 | 21.384615 | 0.790988 | 0.527338 | 0 | 0.5 | 0 | 0 | 0.03537 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.125 | null | null | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d9934b5b31dd1ab6a965f3c1c5def23d62e6d7e1 | 2,714 | py | Python | iss.py | WesleySalesberry/q3-ISS_Location | fec7d8e3eda510ef3d9dadb671dae07faf3bbcfb | [
"MIT"
] | null | null | null | iss.py | WesleySalesberry/q3-ISS_Location | fec7d8e3eda510ef3d9dadb671dae07faf3bbcfb | [
"MIT"
] | null | null | null | iss.py | WesleySalesberry/q3-ISS_Location | fec7d8e3eda510ef3d9dadb671dae07faf3bbcfb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
__author__ = 'Wesley Salesberry'
import turtle
from datetime import datetime
import requests
import json
# Cleans up the json so that it is readable
def jprint(obj):
text = json.dumps(obj, sort_keys=True, indent=4)
print(text)
# gets the number and name of each astronaught
def get_astronauts_information(URL):
people = []
res = requests.get(URL + "/astros.json")
person = res.json()["people"]
amount = res.json()["number"]
# craft = res.json()["craft"]
for p in person:
# person = p["name"]
people.append(p)
# print(amount)
jprint(people)
# Get the ISS current geographic coordinates and a timestamp
def get_ISS_information(URL):
res = requests.get(URL)
time_stamp = datetime.fromtimestamp(res.json()["timestamp"])
location = res.json()["iss_position"]
latitude = location["latitude"]
longitude = location["longitude"]
return [time_stamp, latitude, longitude]
def create_world(shape, lat, long):
screen = turtle.Screen()
screen.title("ISS Location")
screen.setup(720, 360)
screen.setworldcoordinates(-180, -90, 180, 90)
screen.bgpic("map.gif")
screen.register_shape(shape)
create_ISS(shape, lat, long)
pass_over_Indy()
turtle.mainloop()
def create_ISS(shape, lat, long):
iss = turtle.Turtle()
iss.shape(shape)
iss.setheading(90)
iss.penup()
iss.goto(long, lat)
def pass_over_Indy():
indy_lat = 39.7684
indy_long = -86.1581
location = turtle.Turtle()
location.penup()
location.color("yellow")
location.goto(indy_long, indy_lat)
location.dot(5)
time = pass_over_info("http://api.open-notify.org/iss-pass.json",
indy_lat, indy_long)
style = ('Arial', 10, "bold")
location.write(time, font=style)
location.hideturtle()
def pass_over_info(URL, lat, long):
URL = URL + '?lat=' + str(lat) + '&lon=' + str(long)
res = requests.get(URL)
passover_time = datetime.fromtimestamp(
res.json()["response"][1]["risetime"])
return passover_time
def main():
get_astronauts_information("http://api.open-notify.org/")
iss = "iss.gif"
# get_astronauts("http://api.open-notify.org/astros.json")
time_stamp = get_ISS_information(
"http://api.open-notify.org/iss-now.json")[0]
latitude = float(get_ISS_information(
"http://api.open-notify.org/iss-now.json")[1])
longitude = float(get_ISS_information(
"http://api.open-notify.org/iss-now.json")[2])
create_world(iss, latitude, longitude)
print(f"Latitude: {latitude}\nLongitude: {longitude}\nTime :{time_stamp}")
if __name__ == '__main__':
main()
| 24.899083 | 78 | 0.653279 | 358 | 2,714 | 4.807263 | 0.354749 | 0.024404 | 0.03835 | 0.059268 | 0.156886 | 0.12086 | 0.107496 | 0.08774 | 0.08774 | 0.08774 | 0 | 0.017512 | 0.200442 | 2,714 | 108 | 79 | 25.12963 | 0.775576 | 0.104643 | 0 | 0.028169 | 0 | 0 | 0.165979 | 0.009083 | 0 | 0 | 0 | 0 | 0 | 1 | 0.112676 | false | 0.084507 | 0.056338 | 0 | 0.197183 | 0.056338 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
d994afeaeff45c52b68d6ecd1d3cab66dc88c2ae | 6,414 | py | Python | Cpy simplified/wiznet_simplify.py | ronpang/WIZnet-HK_Ron | d9df912ee9afe70c3aad17e0d703428afe2f2b4c | [
"Apache-2.0"
] | null | null | null | Cpy simplified/wiznet_simplify.py | ronpang/WIZnet-HK_Ron | d9df912ee9afe70c3aad17e0d703428afe2f2b4c | [
"Apache-2.0"
] | null | null | null | Cpy simplified/wiznet_simplify.py | ronpang/WIZnet-HK_Ron | d9df912ee9afe70c3aad17e0d703428afe2f2b4c | [
"Apache-2.0"
] | null | null | null | # SPDX-FileCopyrightText: 2010 WIZnet
#
# SPDX-License-Identifier: MIT
import board
import digitalio
import time
import busio
from adafruit_wiznet5k.adafruit_wiznet5k import * #active WIZnet chip library
import adafruit_wiznet5k.adafruit_wiznet5k_socket as socket #open socket from WIZnet library
class network:
"""Interface for wiznet_simplified module.
:param bool DHCP: Turn on / off the DHCP mode (IP assignment from Router)
:param Tuple MY_MAC: The WIZnet's MAC address (Default: 0x00,0x01,0x02,0x03)
:param Tuple IP_ADDRESS: The WIZnet's IP address (Default: 192, 168, 0, 111)
:param Tuple SUBNET_MASK: The WIZnet's Subnet Mask address (Default: 255, 255, 0, 0)
:param Tuple GATEWAY_ADDRESS: The WIZnet's Gateway address (Default: 192, 168, 0, 1)
:param Tuple DNS_SERVER: The Wiznet's DNS address (Default: 8, 8, 8, 8)
"""
mo = None #TCP mode selection
def __init__(self,
DHCP = True,
MY_MAC = (0x00, 0x01, 0x02, 0x03, 0x04, 0x05),
IP_ADDRESS = (192, 168, 0, 111),
SUBNET_MASK = (255, 255, 0, 0),
GATEWAY_ADDRESS = (192, 168, 0, 1),
DNS_SERVER = (8, 8, 8, 8)
):
self.mac = MY_MAC
self.ip = IP_ADDRESS
self.sub = SUBNET_MASK
self.gate = GATEWAY_ADDRESS
self.dns = DNS_SERVER
self.SPI_setup()
self.eth = WIZNET5K(self.spi_bus, self.cs, is_dhcp = DHCP, mac=self.mac, debug=False)
if DHCP == False:
self.eth.ifconfig = self.ip, self.sub, self.gate, self.dns
print("Chip Version:", self.eth.chip)
print("MAC Address:", [hex(i) for i in self.eth.mac_address])
print("My IP address is:", self.eth.pretty_ip(self.eth.ip_address))
def SPI_setup(self):
"""GPIO pins setup and create a SPI communication with WIZnet's chip"""
# Activate GPIO pins for SPI communication
SPI0_SCK = board.GP18
SPI0_TX = board.GP19
SPI0_RX = board.GP16
SPI0_CSn = board.GP17
# Activate Reset pin for communication with W5500 chip
W5x00_RSTn = board.GP20
# Set reset function
ethernetRst = digitalio.DigitalInOut(W5x00_RSTn)
ethernetRst.direction = digitalio.Direction.OUTPUT
# Set this SPI for selecting the correct chip
self.cs = digitalio.DigitalInOut(SPI0_CSn)
# Set the GPIO pins for SPI communication
self.spi_bus = busio.SPI(SPI0_SCK, MOSI=SPI0_TX, MISO=SPI0_RX)
# Reset WIZnet's chip first
ethernetRst.value = False
time.sleep(1)
ethernetRst.value = True
def connection(self,
Server_type = True,
r_ip = None,
r_port = 5000
):
"""Create a socket for TCP connection
:param bool Server_type: Select TCP mode ( On = Server / Off = Client) (Default: True)
:param str r_ip: Set Remote device IP address (Default: None)
:param int r_port: Set Remote device Port number (Default: 5000)
"""
socket.set_interface(self.eth)
self.communicate = socket.socket() # Set and name the socket to be a TCP server
self.remote_ip = r_ip
self.remote_port = r_port
if self.remote_port is None:
assert self.communicate == None, "Port number is required for TCP connnection"
if Server_type == True:
self.communicate.bind((self.remote_ip, self.remote_port)) # Binding the IP address and Port number
self.communicate.listen()
else:
if self.remote_ip is None:
assert self.communicate == None, "IP address is required for TCP client"
else:
self.communicate.connect((self.remote_ip, self.remote_port), None)
def check_mode(self):
"""Check the TCP mode and connect with the remote device"""
self.eth.maintain_dhcp_lease()
if self.communicate.status == SNSR_SOCK_CLOSED:
time.sleep(0.5)
if self.mo is None:
if self.communicate.status == SNSR_SOCK_LISTEN: #TCP server status: WIZnet's chip is listening for a client device's request
self.mo, addr = self.communicate.accept()
else:
if self.communicate.status == SNSR_SOCK_CLOSED: #when the server has disconnect, it will try to reconnect the server
self.communicate.connect((self.remote_ip, self.remote_port), None)
self.mo = self.communicate
else:
self.mo = self.communicate
def communication(self, send_data = None):
"""Communicate with the device and handle differnet kind of communcation status
:param str send_data: The data required to send out through TCP communication (None = loopback mode, contain data = send out data) (Default: None)
"""
if self.mo.status == SNSR_SOCK_SYNRECV: #WIZnet received a request from the remote device, give time for established the connection
time.sleep(0.5)
if self.mo.status == SNSR_SOCK_ESTABLISHED: #WIZnet has established with the remote deivce, it could start communcate
if send_data == None:
data = self.mo.recv() # Data size that you could receive
self.mo.send(data) # Echo message back to client
r_data = None
else:
r_data = self.mo.recv() # Data size that you could receive
data = send_data.encode()
self.mo.send(data) # Echo message back to client
elif self.mo.status == SNSR_SOCK_CLOSE_WAIT: #WIZnet received command to close the socket, confirm to close the connection and socket
self.mo.disconnect() #close the connection
self.mo.close() #close the socket
r_data = None
elif self.mo.status == SNSR_SOCK_FIN_WAIT: #WIZnet has been disconnected from the remote device, close the socket
communicate.close()
r_data = None
elif self.mo.status == SNSR_SOCK_CLOSED: #WIZnet has closed a socket, user could recreate a new socket through check_mode
r_data = None
self.mo = None
return r_data
| 44.234483 | 155 | 0.613814 | 844 | 6,414 | 4.552133 | 0.246446 | 0.024987 | 0.029151 | 0.020822 | 0.18558 | 0.160593 | 0.114003 | 0.084852 | 0.084852 | 0.048412 | 0 | 0.031475 | 0.306517 | 6,414 | 144 | 156 | 44.541667 | 0.832284 | 0 | 0 | 0.21875 | 0 | 0 | 0.031362 | 0 | 0 | 0 | 0.00617 | 0 | 0.020833 | 0 | null | null | 0 | 0.0625 | null | null | 0.03125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
d9984a1eaa310ceffb4b0516dba9fa5c2cb26f67 | 3,116 | py | Python | books/booksdatasourcetests.py | KristinA64/cs257 | c6db0fbe7aeae4355b3f4ef77b722da0d21f233c | [
"MIT"
] | null | null | null | books/booksdatasourcetests.py | KristinA64/cs257 | c6db0fbe7aeae4355b3f4ef77b722da0d21f233c | [
"MIT"
] | null | null | null | books/booksdatasourcetests.py | KristinA64/cs257 | c6db0fbe7aeae4355b3f4ef77b722da0d21f233c | [
"MIT"
] | null | null | null | '''
booksdatasourcetest.py
Jeff Ondich, 24 September 2021
Jayti Arora, Kristin Albright, 11 October 2021
'''
import booksdatasource
import unittest
class BooksDataSourceTester(unittest.TestCase):
def setUp(self):
self.data_source = booksdatasource.BooksDataSource('books_medium.csv')
def tearDown(self):
pass
def test_unique_author(self):
authors = self.data_source.authors('Pratchett')
for author in authors:
self.assertEqual(author, booksdatasource.Author('Pratchett', 'Terry'))
self.assertTrue(authors[0] == booksdatasource.Author('Pratchett', 'Terry'))
self.assertTrue(len(authors) == 1)
def test_blank_author(self):
authors = self.data_source.authors()
self.assertTrue(len(authors) == 8)
def test_authors(self):
authors = self.data_source.authors('Jane')
self.assertTrue(booksdatasource.Author('Austen', 'Jane') in authors)
self.assertTrue(len(authors) == 1)
def test_sorted_authors(self):
authors = self.data_source.authors('te')
self.assertTrue(authors[0] == booksdatasource.Author('Austen', 'Jane'))
self.assertTrue(len(authors) == 3)
'''
Book Tests
'''
def test_blank_books(self):
books = self.data_source.books()
self.assertTrue(books)
self.assertTrue(len(books) == 10)
def test_books(self):
books = self.data_source.books('Sula', 'year')
self.assertTrue(booksdatasource.Book('Sula', 1973, [booksdatasource.Author('Morrison', 'Toni')]) in books)
def test_sorted_title(self):
books = self.data_source.books('There', 'title')
self.assertTrue(books[0] == booksdatasource.Book('And Then There Were None', 1939, [booksdatasource.Author('Christie', 'Agatha')]))
def test_sorted_year(self):
books = self.data_source.books('the', 'year')
self.assertTrue(books[0] == booksdatasource.Book('The Life and Opinions of Tristram Shandy, Gentleman', 1759, [booksdatasource.Author('Sterne', 'Laurence')]))
'''
Between Years Tests
'''
def test_blank_years(self):
books = self.data_source.books_between_years()
self.assertTrue(len(books) == 10)
def test_no_books(self):
books = self.data_source.books_between_years(1500, 1550)
self.assertTrue(len(books) == 0)
self.assertFalse(books)
def test_inclusive(self):
books = self.data_source.books_between_years(1700, 1759)
self.assertTrue(books)
self.assertTrue(len(books) == 1)
def test_sorted(self):
books = self.data_source.books_between_years(1813, 1815)
self.assertTrue(books[0] == booksdatasource.Book('Pride and Prejudice', 1813, [booksdatasource.Author('Austen', 'Jane')]))
self.assertTrue(books[1] == booksdatasource.Book('Sense and Sensibility', 1813, [booksdatasource.Author('Austen', 'Jane')]))
self.assertTrue(books[2] == booksdatasource.Book('Emma', 1815, [booksdatasource.Author('Austen', 'Jane')]))
if __name__ == '__main__':
unittest.main()
| 31.795918 | 166 | 0.660783 | 357 | 3,116 | 5.627451 | 0.266106 | 0.132404 | 0.090592 | 0.067695 | 0.518666 | 0.503235 | 0.337979 | 0.1334 | 0 | 0 | 0 | 0.030961 | 0.201861 | 3,116 | 97 | 167 | 32.123711 | 0.77684 | 0.032413 | 0 | 0.109091 | 0 | 0 | 0.104202 | 0 | 0 | 0 | 0 | 0 | 0.381818 | 1 | 0.254545 | false | 0.018182 | 0.036364 | 0 | 0.309091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79429c3689c82a9d8ee2371c5c13aebcc653abca | 147 | py | Python | gitogether/__init__.py | nklapste/gitogether | 9190ea4a1a32e62f985b3694d3a1b949f1fb819c | [
"MIT"
] | null | null | null | gitogether/__init__.py | nklapste/gitogether | 9190ea4a1a32e62f985b3694d3a1b949f1fb819c | [
"MIT"
] | null | null | null | gitogether/__init__.py | nklapste/gitogether | 9190ea4a1a32e62f985b3694d3a1b949f1fb819c | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""gitogether
Scripts:
+ :mod:`.__main__` - argparse entry point
Module:
"""
__version__ = (0, 0, 0)
| 12.25 | 42 | 0.598639 | 18 | 147 | 4.444444 | 0.888889 | 0.05 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032787 | 0.170068 | 147 | 11 | 43 | 13.363636 | 0.622951 | 0.748299 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
794752ad248ac1e49d852f894df16c035754b838 | 294 | py | Python | towers/monkey_village.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | towers/monkey_village.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | towers/monkey_village.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | from tower import Tower
from config import keybinds
class MonkeyVillage(Tower):
name = 'monkey_village'
range = 215
width = 119
height = 103
size = 'xl'
keybind = keybinds[name]
aquatic = False
def __init__(self, **kwargs):
super().__init__(**kwargs)
| 18.375 | 34 | 0.636054 | 34 | 294 | 5.235294 | 0.764706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.041667 | 0.265306 | 294 | 15 | 35 | 19.6 | 0.782407 | 0 | 0 | 0 | 0 | 0 | 0.054422 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.166667 | 0 | 0.916667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
795032bb0ecff99121df7274f4717f56ce7ae0ce | 546 | py | Python | leetcode/binary_tree_preorder_traversal.py | alexandru-dinu/competitive-programming | 4515d221a649b3ab8bc012d01f38b9e4659e2e76 | [
"MIT"
] | null | null | null | leetcode/binary_tree_preorder_traversal.py | alexandru-dinu/competitive-programming | 4515d221a649b3ab8bc012d01f38b9e4659e2e76 | [
"MIT"
] | 6 | 2021-10-12T09:14:30.000Z | 2021-10-16T19:29:08.000Z | leetcode/binary_tree_preorder_traversal.py | alexandru-dinu/competitive-programming | 4515d221a649b3ab8bc012d01f38b9e4659e2e76 | [
"MIT"
] | null | null | null | # https://leetcode.com/problems/binary-tree-preorder-traversal
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def preorderTraversal(self, root: Optional[TreeNode]) -> List[int]:
if root is None:
return []
acc = [root.val]
acc.extend(self.preorderTraversal(root.left))
acc.extend(self.preorderTraversal(root.right))
return acc
| 28.736842 | 71 | 0.626374 | 66 | 546 | 5.121212 | 0.5 | 0.059172 | 0.076923 | 0.177515 | 0.201183 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002451 | 0.252747 | 546 | 18 | 72 | 30.333333 | 0.82598 | 0.43956 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79624d1c639a3ad314464723a290c9f75da7a2e0 | 587 | py | Python | tasks/serializers.py | rohitdwivedula/ultimate-task-manager | 6ec2bc3ed0cadafb8ebe0015901d21d449d02920 | [
"MIT"
] | null | null | null | tasks/serializers.py | rohitdwivedula/ultimate-task-manager | 6ec2bc3ed0cadafb8ebe0015901d21d449d02920 | [
"MIT"
] | 8 | 2020-06-15T18:50:59.000Z | 2021-09-22T19:11:12.000Z | tasks/serializers.py | rohitdwivedula/ultimate-task-manager | 6ec2bc3ed0cadafb8ebe0015901d21d449d02920 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from tasks.models import Label, Task, SubTask
class LabelSerializer(serializers.ModelSerializer):
class Meta:
model = Label
fields = ('uuid', 'name', 'description', 'created_at')
class SubTaskSerializer(serializers.ModelSerializer):
class Meta:
model = SubTask
fields = ('uuid', 'name', 'task', 'status')
class TaskSerializer(serializers.ModelSerializer):
labels = LabelSerializer(many=True)
subtasks = SubTaskSerializer(many=True)
class Meta:
model = Task
fields = "__all__" | 27.952381 | 62 | 0.686542 | 57 | 587 | 6.964912 | 0.508772 | 0.196474 | 0.105793 | 0.176322 | 0.201511 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.211244 | 587 | 21 | 63 | 27.952381 | 0.857451 | 0 | 0 | 0.1875 | 0 | 0 | 0.091837 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
796543711296bb20b3f8da4652f0c6abe79f2ea5 | 441 | py | Python | open_astral_engine/effects/buff.py | I-dan-mi-I/Open-Astral-Engine | 5f7c62521e852cde4ca0fb0668950ebb5b65dc2b | [
"MIT"
] | null | null | null | open_astral_engine/effects/buff.py | I-dan-mi-I/Open-Astral-Engine | 5f7c62521e852cde4ca0fb0668950ebb5b65dc2b | [
"MIT"
] | null | null | null | open_astral_engine/effects/buff.py | I-dan-mi-I/Open-Astral-Engine | 5f7c62521e852cde4ca0fb0668950ebb5b65dc2b | [
"MIT"
] | null | null | null | from .base_classes import EffectsDict
effects = EffectsDict()
@effects.append
class BuffExample:
__ename__ = "Название Бафф"
__description__ = """Описание"""
__fluttering__ = False
__event__ = False
__duration__ = 0
__eindex__ = -1
__type__ = "buff"
__synergy__ = """Синергии"""
def __init__(self, game, player):
self.game = game
self.player = player
def act(self):
pass
| 18.375 | 37 | 0.632653 | 43 | 441 | 5.627907 | 0.744186 | 0.14876 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006192 | 0.267574 | 441 | 23 | 38 | 19.173913 | 0.743034 | 0 | 0 | 0 | 0 | 0 | 0.07483 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0.058824 | 0.058824 | 0 | 0.705882 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
79795b7289e3b3dff67cd5a4c9be0ff0f959ed6e | 846 | py | Python | jd/api/rest/SellerPromoSingleCreatePlummetedPromoRequest.py | jof2jc/jd | 691bf22c68ed88fb3fb32bfb43dd6da75024994a | [
"MIT"
] | null | null | null | jd/api/rest/SellerPromoSingleCreatePlummetedPromoRequest.py | jof2jc/jd | 691bf22c68ed88fb3fb32bfb43dd6da75024994a | [
"MIT"
] | null | null | null | jd/api/rest/SellerPromoSingleCreatePlummetedPromoRequest.py | jof2jc/jd | 691bf22c68ed88fb3fb32bfb43dd6da75024994a | [
"MIT"
] | null | null | null | from jd.api.base import RestApi
class SellerPromoSingleCreatePlummetedPromoRequest(RestApi):
def __init__(self,domain,port=80):
RestApi.__init__(self,domain, port)
self.riskLevel = None
self.promoChannel = None
self.bindToken = None
self.promoNum = None
self.limitBuyType = None
self.quota = None
self.promoAdword = None
self.beginTime = None
self.areaId = None
self.pId = None
self.areaTag = None
self.skuId = None
self.childType = None
self.userGrade = None
self.promoType = None
self.promoName = None
self.activityUrl = None
self.limitBuyMaxNum = None
self.limitBuyMinNum = None
self.endTime = None
self.mobileActivityUrl = None
self.promoReason = None
self.storeId = None
def getapiname(self):
return 'jingdong.seller.promo.singleCreatePlummetedPromo'
| 22.263158 | 60 | 0.712766 | 97 | 846 | 6.134021 | 0.453608 | 0.295798 | 0.047059 | 0.060504 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002972 | 0.204492 | 846 | 37 | 61 | 22.864865 | 0.881129 | 0 | 0 | 0 | 0 | 0 | 0.057007 | 0.057007 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068966 | false | 0 | 0.034483 | 0.034483 | 0.172414 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
797f69dc0b08f68ab44fb6dc29d14dc9c853d88f | 275 | py | Python | gsm/truc.py | BobcatSMS/Bobcat | 044c390de949966810f2c74167b4d79b8431a886 | [
"WTFPL"
] | null | null | null | gsm/truc.py | BobcatSMS/Bobcat | 044c390de949966810f2c74167b4d79b8431a886 | [
"WTFPL"
] | null | null | null | gsm/truc.py | BobcatSMS/Bobcat | 044c390de949966810f2c74167b4d79b8431a886 | [
"WTFPL"
] | 1 | 2016-04-19T13:34:45.000Z | 2016-04-19T13:34:45.000Z | text = "zeub"
hexa = ""
for i in text:
hexa += str(hex(ord(i)))[2:].zfill(4)
print(hexa)
hexa = "002B00330033003600380039003000300034003000300030" #YOLOO
hexa = [hexa[i:i+4] for i in range(0, len(hexa), 4)]
text=""
for i in hexa:
text+=chr(int(i, 16))
print(text) | 19.642857 | 64 | 0.64 | 45 | 275 | 3.911111 | 0.466667 | 0.068182 | 0.102273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.234783 | 0.163636 | 275 | 14 | 65 | 19.642857 | 0.530435 | 0.018182 | 0 | 0 | 0 | 0 | 0.196296 | 0.177778 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.181818 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7981ae680c524298eeeaec2bc8c1bdc01188d014 | 2,328 | py | Python | otokon_archive/robotics/models.py | bilbeyt/otokon-archive | 2a7e81c59f4489e94cc568661db56eeedcf63bb9 | [
"MIT"
] | null | null | null | otokon_archive/robotics/models.py | bilbeyt/otokon-archive | 2a7e81c59f4489e94cc568661db56eeedcf63bb9 | [
"MIT"
] | null | null | null | otokon_archive/robotics/models.py | bilbeyt/otokon-archive | 2a7e81c59f4489e94cc568661db56eeedcf63bb9 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
from django.db import models
from ckeditor_uploader.fields import RichTextUploadingField
from django.db.models.signals import pre_save
from django.template.defaultfilters import slugify
from django.dispatch import receiver
class Season(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=100)
def __str__(self):
return self.name
class Competition(models.Model):
name = models.CharField(max_length=100)
season = models.ForeignKey(Season)
slug = models.SlugField(max_length=100)
def __str__(self):
return self.name
class Robot(models.Model):
name = models.CharField(max_length=100)
season = models.ForeignKey(Season)
competition = models.ForeignKey(Competition)
software = RichTextUploadingField()
electronic = RichTextUploadingField()
mechanic = RichTextUploadingField()
slug = models.SlugField(max_length=100)
def __str__(self):
return self.name
class RoboticsSponsors(models.Model):
name = models.CharField(max_length=100)
season = models.ForeignKey(Season)
content = RichTextUploadingField()
is_success = models.BooleanField(default=False)
slug = models.SlugField(max_length=100)
def __str__(self):
return self.name
class RoboticsPress(models.Model):
name = models.CharField(max_length=100)
season = models.ForeignKey(Season)
content = RichTextUploadingField()
created_at = models.DateTimeField(auto_now_add=True)
slug = models.SlugField()
def __str__(self):
return self.name
@receiver(pre_save,sender=Season)
def season_slug_handler(sender, instance, *args, **kwargs):
instance.slug = slugify(instance.name)
@receiver(pre_save,sender=Robot)
def robot_slug_handler(sender,instance,*args,**kwargs):
instance.slug = slugify(instance.name)
@receiver(pre_save,sender=Competition)
def competition_slug_handler(sender,instance,*args,**kwargs):
instance.slug = slugify(instance.name)
@receiver(pre_save,sender=RoboticsSponsors)
def sponsors_slug_handler(sender,instance,*args,**kwargs):
instance.slug = slugify(instance.name)
@receiver(pre_save,sender=RoboticsPress)
def press_slug_handler(sender,instance,*args,**kwargs):
instance.slug = slugify(instance.name) | 29.468354 | 61 | 0.74957 | 276 | 2,328 | 6.123188 | 0.224638 | 0.047929 | 0.063905 | 0.06213 | 0.638462 | 0.626036 | 0.611834 | 0.611834 | 0.586982 | 0.586982 | 0 | 0.013623 | 0.148625 | 2,328 | 79 | 62 | 29.468354 | 0.839051 | 0 | 0 | 0.517241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.172414 | false | 0 | 0.103448 | 0.086207 | 0.827586 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
798925517d561ff819d7176bdec9136a6f6fcaf9 | 3,320 | py | Python | native/sta2dfft.py | julianmak/pydra | eee7dbd5fbb2c64ead9f732ed1475634606b035a | [
"MIT"
] | null | null | null | native/sta2dfft.py | julianmak/pydra | eee7dbd5fbb2c64ead9f732ed1475634606b035a | [
"MIT"
] | null | null | null | native/sta2dfft.py | julianmak/pydra | eee7dbd5fbb2c64ead9f732ed1475634606b035a | [
"MIT"
] | 1 | 2020-10-09T09:54:52.000Z | 2020-10-09T09:54:52.000Z | #/usr/bin/env python3
#
# JM: 12 Apr 2018
#
# the sta2dfft.f90 adapted for python
# contains 2d spectral commands which uses stafft
from stafft import *
# This module performs FFTs in two directions on two dimensional arrays using
# the stafft library module to actually compute the FFTs. If FFTs in one
# direction only are required use the stafft module directly. The module can
# compute any combination of sine, cosine and full FFTs in each direction.
# Along with the usual forwards (physical -> Fourier space) and reverse
# (Fourier space -> physical) routines there are also routines for computing
# the first derivatives in either direction.
#
# The convention is that for each direction the array is dimensioned 1:nx or
# 1:ny for either the sine or full transforms. While the cosine transforms
# require the additional endpoint so 0:nx or 0:ny.
#
# The routines contained in this module are:
#
# init2dfft(nx,ny,lx,ly,xfactors,yfactors,xtrig,ytrig,kx,ky)
# This routine initialises all the arrays needed for further
# transforms. The integers nx and ny are the array dimensions. Then
# lx and ly are the domain lengths - these are needed for the correct
# scaling when computing derivatives. The arrays xfactors, yfactors,
# xtrig and ytrig are needed to perform the various FFTs by the stafft
# module (see there for further details. kx and ky are arrays to hold
# the wavenumbers associated with each mode in the domain, and are
# used in computing derivatives.
#
# **If it is known at initialisation that no derivatives are required
# it is possible just to pass 1.d0 for each of lx and ly, along with
# dummy arrays for kx and ky since these are only needed for
# computing the derviatives.**
from numpy import pi, arange
#=====================================================================
def init2dfft(nx, ny, lx, ly):
"""
This subroutine performs the initialisation work for all subsequent
transform and derivative routines.
It calls the initfft() routine from the supproting 1d FFT module for
transforms in both x and y directions.
The routine then defines the two wavenumber arrays, one in each direction.
Input:
n =
Returns:
factors =
"""
xfactors, xtrig = initfft(nx)
yfactors, ytrig = initfft(ny)
if (lx != 0.0):
# Define x wavenumbers:
sc = pi / lx
kx = sc * arange(1, nx + 1)
else:
# Catastrophic end to run if wave number definition fails:
print('**********************************************')
print(' Wavenumber array definition not possible.')
print(' Domain length in x equal to zero not allowed.')
print(' STOPPING...')
print('**********************************************')
if (ly != 0.0):
# Define y wavenumbers:
sc = pi / ly
ky = sc * arange(1, ny + 1)
else:
# Catastrophic end to run if wave number definition fails:
print('**********************************************')
print(' Wavenumber array definition not possible.')
print(' Domain length in y equal to zero not allowed.')
print(' STOPPING...')
print('**********************************************')
return (xfactors, yfactors, xtrig, ytrig, kx, ky)
| 38.16092 | 80 | 0.632831 | 444 | 3,320 | 4.731982 | 0.38964 | 0.008567 | 0.029986 | 0.014279 | 0.193241 | 0.177059 | 0.148501 | 0.148501 | 0.111376 | 0.111376 | 0 | 0.010899 | 0.226205 | 3,320 | 86 | 81 | 38.604651 | 0.806929 | 0.696386 | 0 | 0.416667 | 0 | 0 | 0.411135 | 0.197002 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0 | 0.083333 | 0 | 0.166667 | 0.416667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
798e4ece70e0da7b973d0e06908a694c0669d736 | 869 | py | Python | auto_ripper_daemon/main.py | puujam/auto_ripper_gui | bc181d15c7f1ee285b160ac2a71722b313c514f1 | [
"MIT"
] | null | null | null | auto_ripper_daemon/main.py | puujam/auto_ripper_gui | bc181d15c7f1ee285b160ac2a71722b313c514f1 | [
"MIT"
] | null | null | null | auto_ripper_daemon/main.py | puujam/auto_ripper_gui | bc181d15c7f1ee285b160ac2a71722b313c514f1 | [
"MIT"
] | null | null | null | # Base modules
import time
import os
import multiprocessing
# Non-standard Installed modules
from daemoniker import Daemonizer
# Local modules
import processing
import communication
pid_file_path = "pid_file"
def main():
global processor
processor = processing.Processor()
server = communication.ARServer()
processor.start()
server.start()
if __name__ == "__main__":
# Check if our pid file already exists and if so, delete it
if os.path.exists( pid_file_path ):
os.remove( pid_file_path )
with Daemonizer() as (is_setup, daemonizer):
if is_setup:
# Code before daemonization
pass
is_parent = daemonizer( pid_file_path ) # pid_file isn't used
if is_parent:
# Code run only in the parent after daemonization
pass
main() | 21.725 | 69 | 0.651323 | 105 | 869 | 5.180952 | 0.504762 | 0.090074 | 0.080882 | 0.051471 | 0.066176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.284235 | 869 | 40 | 70 | 21.725 | 0.874598 | 0.240506 | 0 | 0.086957 | 0 | 0 | 0.024502 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043478 | false | 0.086957 | 0.26087 | 0 | 0.304348 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
798fadd02eeba69b476646bf36f8a9892607866d | 189 | py | Python | send_data.py | BiaChacon/weather-api | 743e9552d1411be74594faf2b3c7ff7150b7b21c | [
"MIT"
] | null | null | null | send_data.py | BiaChacon/weather-api | 743e9552d1411be74594faf2b3c7ff7150b7b21c | [
"MIT"
] | null | null | null | send_data.py | BiaChacon/weather-api | 743e9552d1411be74594faf2b3c7ff7150b7b21c | [
"MIT"
] | null | null | null | import requests
import time
i = 0
while True:
response = requests.get(
"http://localhost:5000/send?idNode=ESP")
print(i)
i = i+1
print(response)
time.sleep(60)
| 15.75 | 48 | 0.619048 | 27 | 189 | 4.333333 | 0.703704 | 0.034188 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056738 | 0.253968 | 189 | 11 | 49 | 17.181818 | 0.77305 | 0 | 0 | 0 | 0 | 0 | 0.195767 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79900f3db15534594b2074c86653a8ae7059a989 | 1,011 | py | Python | setup.py | AdvancedThreatAnalytics/django-otp-sns | 6fd8a5eb553cbd865ad0ac2155eee9bc7b792b7a | [
"BSD-2-Clause"
] | 1 | 2020-03-23T09:49:45.000Z | 2020-03-23T09:49:45.000Z | setup.py | vnagendra/django-otp-sns | 6fd8a5eb553cbd865ad0ac2155eee9bc7b792b7a | [
"BSD-2-Clause"
] | 1 | 2019-11-11T12:31:10.000Z | 2019-11-12T12:10:39.000Z | setup.py | AdvancedThreatAnalytics/django-otp-sns | 6fd8a5eb553cbd865ad0ac2155eee9bc7b792b7a | [
"BSD-2-Clause"
] | 2 | 2019-11-11T11:47:18.000Z | 2020-03-23T10:49:14.000Z | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-otp-sns',
version='0.1.1',
description="A django-otp plugin that delivers tokens via Amazon SNS.",
long_description=open('README.rst').read(),
author='Critical Start',
author_email='pavel.yershov@criticalstart.com, vasu@criticalstart.com',
url='https://github.com/vnagendra/django-otp-sns',
license='BSD',
classifiers=[
"Development Status :: 1 - Prototype",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Topic :: Security",
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
],
package_dir={'': 'src'},
packages=find_packages(where='src'),
install_requires=[
'django-otp >= 0.5.0',
'boto3 >= 1.9.223',
'botocore >= 1.12.223',
],
)
| 30.636364 | 75 | 0.607319 | 112 | 1,011 | 5.428571 | 0.678571 | 0.059211 | 0.039474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027027 | 0.231454 | 1,011 | 32 | 76 | 31.59375 | 0.75547 | 0.019782 | 0 | 0.071429 | 0 | 0 | 0.536364 | 0.054545 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.035714 | 0 | 0.035714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7996cc20bf78c96b1f8fe9b7d0d947a419d26d26 | 585 | py | Python | calamari_ocr/test/test_data_pagexml.py | jacektl/calamari | 980477aefe4e56f7fc373119c1b38649798d8686 | [
"Apache-2.0"
] | 922 | 2018-07-06T05:18:22.000Z | 2022-03-22T12:38:32.000Z | calamari_ocr/test/test_data_pagexml.py | jacektl/calamari | 980477aefe4e56f7fc373119c1b38649798d8686 | [
"Apache-2.0"
] | 267 | 2018-07-14T22:10:41.000Z | 2022-03-28T18:38:43.000Z | calamari_ocr/test/test_data_pagexml.py | jacektl/calamari | 980477aefe4e56f7fc373119c1b38649798d8686 | [
"Apache-2.0"
] | 227 | 2018-07-06T07:42:16.000Z | 2022-02-27T05:29:59.000Z | import os
import unittest
this_dir = os.path.dirname(os.path.realpath(__file__))
class TestPageXML(unittest.TestCase):
def run_dataset_viewer(self, add_args):
from calamari_ocr.scripts.dataset_viewer import main
main(add_args + ["--no_plot"])
def test_cut_modes(self):
images = os.path.join(this_dir, "data", "avicanon_pagexml", "*.nrm.png")
self.run_dataset_viewer(["--gen", "PageXML", "--gen.images", images, "--gen.cut_mode", "BOX"])
self.run_dataset_viewer(["--gen", "PageXML", "--gen.images", images, "--gen.cut_mode", "MBR"])
| 34.411765 | 102 | 0.664957 | 79 | 585 | 4.64557 | 0.506329 | 0.141689 | 0.13079 | 0.108992 | 0.299728 | 0.299728 | 0.299728 | 0.299728 | 0.299728 | 0.299728 | 0 | 0 | 0.157265 | 585 | 16 | 103 | 36.5625 | 0.744422 | 0 | 0 | 0 | 0 | 0 | 0.205128 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.272727 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
7999485ec51d2f7bb8175588fb675f238d46f4ae | 1,452 | py | Python | tests/test_cdn.py | datalogics-cgreen/server_core | 4459314cd2cdb92b7cabeed8fd1125d8c5cb7941 | [
"Apache-2.0"
] | null | null | null | tests/test_cdn.py | datalogics-cgreen/server_core | 4459314cd2cdb92b7cabeed8fd1125d8c5cb7941 | [
"Apache-2.0"
] | 1 | 2017-05-12T22:14:16.000Z | 2017-05-12T22:14:16.000Z | tests/test_cdn.py | datalogics-cgreen/server_core | 4459314cd2cdb92b7cabeed8fd1125d8c5cb7941 | [
"Apache-2.0"
] | 2 | 2017-05-12T21:27:53.000Z | 2021-08-04T12:27:25.000Z | # encoding: utf-8
from nose.tools import (
eq_,
set_trace,
)
from util.cdn import cdnify
class TestCDN(object):
def unchanged(self, url, cdns):
self.ceq(url, url, cdns)
def ceq(self, expect, url, cdns):
eq_(expect, cdnify(url, cdns))
def test_no_cdns(self):
url = "http://foo/"
self.unchanged(url, None)
def test_non_matching_cdn(self):
url = "http://foo.com/bar"
self.unchanged(url, {"bar.com" : "cdn.com"})
def test_matching_cdn(self):
url = "http://foo.com/bar#baz"
self.ceq("https://cdn.org/bar#baz", url,
{"foo.com" : "https://cdn.org",
"bar.com" : "http://cdn2.net/"}
)
def test_s3_bucket(self):
# Instead of the foo.com URL we accidentally used the full S3
# address for the bucket that hosts S3. cdnify() handles this
# with no problem.
url = "http://s3.amazonaws.com/foo.com/bar#baz"
self.ceq("https://cdn.org/bar#baz", url,
{"foo.com" : "https://cdn.org/"})
def test_relative_url(self):
# By default, relative URLs are untouched.
url = "/groups/"
self.unchanged(url, {"bar.com" : "cdn.com"})
# But if the CDN list has an entry for the empty string, that
# URL is used for relative URLs.
self.ceq("https://cdn.org/groups/", url,
{"" : "https://cdn.org/"})
| 29.632653 | 69 | 0.552342 | 199 | 1,452 | 3.959799 | 0.356784 | 0.045685 | 0.083756 | 0.053299 | 0.303299 | 0.280457 | 0.280457 | 0.209391 | 0.142132 | 0.142132 | 0 | 0.005854 | 0.294077 | 1,452 | 48 | 70 | 30.25 | 0.762927 | 0.195592 | 0 | 0.129032 | 0 | 0 | 0.240517 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.225806 | false | 0 | 0.064516 | 0 | 0.322581 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
799f02b13d26f742b0afea123e7c0c48233aa0a0 | 724 | py | Python | scripts/injection_studies/create_population.py | MoritzThomasHuebner/memestr | acac0af27272304000007ead581333bf91ea4dbc | [
"MIT"
] | null | null | null | scripts/injection_studies/create_population.py | MoritzThomasHuebner/memestr | acac0af27272304000007ead581333bf91ea4dbc | [
"MIT"
] | null | null | null | scripts/injection_studies/create_population.py | MoritzThomasHuebner/memestr | acac0af27272304000007ead581333bf91ea4dbc | [
"MIT"
] | null | null | null | import json
import warnings
from pathlib import Path
import sys
from bilby.core.utils import logger
from bilby.core.result import BilbyJsonEncoder
from memestr.injection import create_injection
warnings.filterwarnings("ignore")
if len(sys.argv) > 1:
minimum_id = int(sys.argv[1])
maximum_id = int(sys.argv[2])
else:
minimum_id = 0
maximum_id = 100
for i in range(minimum_id, maximum_id):
logger.info(f'Injection ID: {i}')
params = create_injection()
Path('injection_parameter_sets').mkdir(parents=True, exist_ok=True)
with open(f'injection_parameter_sets/{str(i).zfill(3)}.json', 'w') as f:
out = dict(injections=params)
json.dump(out, f, indent=2, cls=BilbyJsonEncoder)
| 26.814815 | 76 | 0.720994 | 108 | 724 | 4.712963 | 0.527778 | 0.041257 | 0.051081 | 0.047151 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014851 | 0.162983 | 724 | 26 | 77 | 27.846154 | 0.825083 | 0 | 0 | 0 | 0 | 0 | 0.131215 | 0.098066 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
79a02b0fca8125acb8727a4f6b22e909b538ce44 | 4,751 | py | Python | gallery/models.py | hzdg/feincms_gallery | 60f7df8c3fbb03c5019ff95df07dbc39c25a1601 | [
"BSD-3-Clause"
] | null | null | null | gallery/models.py | hzdg/feincms_gallery | 60f7df8c3fbb03c5019ff95df07dbc39c25a1601 | [
"BSD-3-Clause"
] | null | null | null | gallery/models.py | hzdg/feincms_gallery | 60f7df8c3fbb03c5019ff95df07dbc39c25a1601 | [
"BSD-3-Clause"
] | null | null | null | #coding=utf-8
from django import forms
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.db import models
from django.http import HttpResponse
from django.template.context import RequestContext
from django.template.loader import render_to_string
from django.utils.translation import ungettext_lazy, ugettext_lazy as _
from feincms.module.medialibrary.models import MediaFile
from .specs.legacy import DEFAULT_SPECS
__all__ = ['Gallery', 'GalleryMediaFile', 'GalleryContent', 'DEFAULT_SPECS']
class Gallery(models.Model):
title = models.CharField(max_length=30)
images = models.ManyToManyField(MediaFile, through='GalleryMediaFile')
def ordered_images(self):
return self.images.select_related().all()\
.order_by('gallerymediafile__ordering')
def count_images(self):
if not getattr(self, '_image_count', None):
self._image_count = self.images.count()
return self._image_count
def verbose_images(self):
count = self.count_images()
return ungettext_lazy('%(count)d Image',
'%(count)d Images', count) % {'count': count }
verbose_images.short_description = _('Image Count')
class Meta:
verbose_name = _('Gallery')
verbose_name_plural = _('Galleries')
def __unicode__(self):
return self.title
class GalleryMediaFile(models.Model):
gallery = models.ForeignKey(Gallery)
mediafile = models.ForeignKey(MediaFile)
ordering = models.IntegerField(default=9999)
class Meta:
verbose_name = 'Image for Gallery'
verbose_name_plural = 'Images for Gallery'
ordering = ['ordering']
def __unicode__(self):
return u'%s' %self.mediafile
class GalleryContent(models.Model):
@classmethod
def initialize_type(cls, types=DEFAULT_SPECS, **kwargs):
if 'feincms.module.medialibrary' not in settings.INSTALLED_APPS:
raise ImproperlyConfigured, 'You have to add \'feincms.module.'\
'medialibrary\' to your INSTALLED_APPS before creating a %s' \
% cls.__name__
cls.specs = dict([ ('%s_%s' % (spec.name, types.index(spec)), spec)
for spec in types ])
cls.spec_choices = [ (spec, cls.specs[spec].verbose_name )
for spec in cls.specs ]
cls.add_to_class('type', models.CharField(max_length=20,
choices=cls.spec_choices,
default=cls.spec_choices[0][0]))
gallery = models.ForeignKey(Gallery,
help_text=_('Choose a gallery to render here'),
related_name='%(app_label)s_%(class)s_gallery')
@property
def spec(self):
try:
return self.specs[self.type]
except KeyError:
return DEFAULT_SPECS[0]
@property
def media(self):
return forms.Media(**self.spec.media)
def has_pagination(self):
return self.spec.paginated
class Meta:
abstract = True
verbose_name = _('Image Gallery')
verbose_name_plural = _('Image Galleries')
def process(self, request, **kwargs):
if int(request.GET.get('gallery', 0)) == self.id and request.is_ajax():
return HttpResponse(self.render(request, **kwargs))
def render(self, **kwargs):
request = kwargs.get('request')
objects = self.gallery.ordered_images()
remaining = []
# check if the type is paginated
if request and self.has_pagination():
paginator = Paginator(objects, self.spec.paginate_by,
orphans=self.spec.orphans)
try:
page = int(request.GET.get('page', 1))
except ValueError:
page = 1
try:
current_page = paginator.page(page)
except (EmptyPage, InvalidPage):
current_page = paginator.page(paginator.num_pages)
images = current_page.object_list
for object in objects:
if object not in images:
remaining.append(object)
else:
current_page, paginator = None, None
images = objects
return render_to_string(self.spec.templates,
{'content': self, 'block':current_page,
'images': images, 'paginator': paginator,
'remaining': remaining, 'request': request },
context_instance = RequestContext(request))
| 35.192593 | 79 | 0.609556 | 506 | 4,751 | 5.547431 | 0.296443 | 0.032063 | 0.026719 | 0.02565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00447 | 0.293622 | 4,751 | 134 | 80 | 35.455224 | 0.831943 | 0.009051 | 0 | 0.096154 | 0 | 0 | 0.09116 | 0.01785 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.105769 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79ae9a40818aa2d1c5227bb932c10ebe1099d2e8 | 2,214 | py | Python | 3 Facebook scraping.py | SajawalChopra/Facebook-Scraping | 089dfd84a7caa255de75b3733deed5a1e3f10543 | [
"MIT"
] | 1 | 2021-07-02T20:52:31.000Z | 2021-07-02T20:52:31.000Z | 3 Facebook scraping.py | SajawalChopra/Facebook-Scraping | 089dfd84a7caa255de75b3733deed5a1e3f10543 | [
"MIT"
] | null | null | null | 3 Facebook scraping.py | SajawalChopra/Facebook-Scraping | 089dfd84a7caa255de75b3733deed5a1e3f10543 | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup as Bs4
from time import sleep
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import pandas as pd
def scroll(driver, timeout):
scroll_pause_time = timeout
# Get scroll height
last_height = driver.execute_script("return document.body.scrollHeight")
while True:
# Scroll down to bottom
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
# Wait to load page
sleep(scroll_pause_time)
# Calculate new scroll height and compare with last scroll height
new_height = driver.execute_script("return document.body.scrollHeight")
if new_height == last_height:
# If heights are the same it will exit the function
break
last_height = new_height
driver = webdriver.Chrome(executable_path='C:/WebDrivers/chromedriver.exe')
url = 'https://www.facebook.com/login'
driver.get(url)
driver.implicitly_wait(10)
email = 'Your Email'
email_xpath = """//*[@id="email"]"""
find_email_element = driver.find_element_by_xpath(email_xpath)
find_email_element.send_keys(email)
driver.implicitly_wait(10)
password = 'Your Password'
password_xpath = """//*[@id="pass"]"""
find_password_element = driver.find_element_by_xpath(password_xpath)
find_password_element.send_keys(password)
find_password_element.send_keys(Keys.ENTER)
sleep(6)
group_url = "https://www.facebook.com/groups/group-name/members"
driver.get(group_url)
driver.implicitly_wait(10)
scroll(driver, 2)
names = []
final_names = []
src = driver.page_source
html_soup = Bs4(src, 'lxml')
html_soup.prettify()
for name in html_soup.find_all('a', {'class': "oajrlxb2 g5ia77u1 qu0x051f esr5mh6w e9989ue4 r7d6kgcz rq0escxv nhd2j8a9 nc684nl6 p7hjln8o kvgmc6g5 cxmmr5t8 oygrvhab hcukyx3x jb3vyjys rz4wbd8a qt6c0cv9 a8nywdso i1ao9s8h esuyzwwr f1sip0of lzcic4wl oo9gr5id gpro0wi8 lrazzd5p"}):
text = name.get_text()
list_0 = names.append(text)
for final_name in names[1:]:
final_names.append(final_name)
df = pd.DataFrame(final_names)
df.to_csv('Group_Members.csv', index=True)
driver.quit()
| 28.384615 | 276 | 0.719512 | 293 | 2,214 | 5.245734 | 0.450512 | 0.023422 | 0.037085 | 0.042941 | 0.208198 | 0.111906 | 0.071568 | 0.071568 | 0 | 0 | 0 | 0.035912 | 0.182475 | 2,214 | 77 | 277 | 28.753247 | 0.81326 | 0.077236 | 0 | 0.06383 | 0 | 0.021277 | 0.269388 | 0.056122 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021277 | false | 0.106383 | 0.106383 | 0 | 0.12766 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
79b1ad5c60eb1d0ecc8ff6daaddcc644f4d0ba0f | 2,183 | py | Python | meiduo_mall/meiduo_mall/apps/users/views.py | yy12950906/meiduo_project | e49f9a784cfb2f35ff0cae6b947ce2eb178d7c29 | [
"MIT"
] | null | null | null | meiduo_mall/meiduo_mall/apps/users/views.py | yy12950906/meiduo_project | e49f9a784cfb2f35ff0cae6b947ce2eb178d7c29 | [
"MIT"
] | null | null | null | meiduo_mall/meiduo_mall/apps/users/views.py | yy12950906/meiduo_project | e49f9a784cfb2f35ff0cae6b947ce2eb178d7c29 | [
"MIT"
] | null | null | null | from django.shortcuts import render, redirect
from django.views import View
from django import http
import re
from .models import User
from django.contrib.auth import login
from meiduo_mall.utils.response_code import RETCODE
class RegisterView(View):
"""用户注册"""
def get(self, request):
return render(request, 'register.html')
def post(self, request):
"""注册业务逻辑"""
# 接收请求体中的表单数据
query_dict = request.POST
username = query_dict.get('username')
password = query_dict.get('password')
password2 = query_dict.get('password2')
mobile = query_dict.get('mobile')
sms_code = query_dict.get('sms_code')
allow = query_dict.get('allow')
# 校验数据
if all([username, password, mobile, sms_code, allow])is False:
return http.HttpResponseForbidden('缺少必传参数')
if not re.match(r'^[a-zA-Z0-9_-]{5,20}$', username):
return http.HttpResponseForbidden('请输入5-20个字符的用户')
if not re.match(r'^[0-9A-Za-z]{8,20}$', password):
return http.HttpResponseForbidden('请输入8-20位的密码')
if password != password2:
return http.HttpResponseForbidden('两次输入的密码不一致')
if not re.match(r'^1[345789]\d{9}$', mobile):
return http.HttpResponseForbidden('请输入正确的手机号码')
# 业务逻辑处理
user = User.objects.create_user(username=username, password=password, mobile=mobile)
# 状态保持
login(request, user)
# 响应
return redirect('/') # 重定向到首页
class UsernameCountView(View):
"""判断用户名是否重复注册"""
def get(self, request, username):
# 使用username查询user表,得到username的数量
count = User.objects.filter(username=username).count()
# 响应
content = {'count': count, 'code': RETCODE.OK, 'errmsg': 'OK'} # 响应体数据
return http.JsonResponse(content)
class MobileCountView(View):
"""判断手机号是否重复注册"""
def get(self, request, mobile):
# 使用mobile查询user表,得到mobile的数量
count = User.objects.filter(mobile=mobile).count()
# 响应
content = {'count': count, 'code': RETCODE.OK, 'errmsg': 'OK'} # 响应体数据
return http.JsonResponse(content) | 29.106667 | 92 | 0.628035 | 244 | 2,183 | 5.561475 | 0.377049 | 0.046426 | 0.053058 | 0.037583 | 0.145173 | 0.116433 | 0.116433 | 0.116433 | 0.116433 | 0.116433 | 0 | 0.016344 | 0.243243 | 2,183 | 75 | 93 | 29.106667 | 0.805085 | 0.070087 | 0 | 0.097561 | 0 | 0 | 0.09905 | 0.010505 | 0 | 0 | 0 | 0 | 0 | 1 | 0.097561 | false | 0.146341 | 0.170732 | 0.02439 | 0.560976 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
79b206ba2bda4a51ecdd46bef66e1c848691ce3b | 1,377 | py | Python | lists/longestMatchingParentheses.py | santoshmano/pybricks | bcb3ab80417e8e896280062494ce6c046329b7e8 | [
"MIT"
] | null | null | null | lists/longestMatchingParentheses.py | santoshmano/pybricks | bcb3ab80417e8e896280062494ce6c046329b7e8 | [
"MIT"
] | null | null | null | lists/longestMatchingParentheses.py | santoshmano/pybricks | bcb3ab80417e8e896280062494ce6c046329b7e8 | [
"MIT"
] | null | null | null | class ArrayStack:
def __init__(self):
self.data = []
def isEmpty(self):
return len(self.data) == 0
def push(self, val):
return self.data.append(val)
def pop(self):
if self.isEmpty():
raise Empty("Stack underflow!")
return self.data.pop()
def peek(self):
if self.isEmpty():
raise Empty("Stack is empty!")
return self.data[-1]
def longestSubstring(expr):
stk = ArrayStack()
subLen = 0
prevLen = 0
for c in expr:
if c == '(':
stk.push(c)
if subLen:
prevLen = subLen
subLen = 0
print(subLen, prevLen)
elif c == ')':
if stk.isEmpty():
if prevLen < subLen:
prevLen = subLen
subLen = 0
print(subLen, prevLen)
else:
stk.pop()
subLen += 2
print(subLen)
print("end", subLen, prevLen)
if stk.isEmpty():
return subLen+prevLen
elif subLen > prevLen:
return subLen
else:
return prevLen
print("length of - ()(())", longestSubstring("()(())"))
#print("length of - ((((", longestSubstring("(((("))
#print("length of - ()()()", longestSubstring("()()()"))
#print("length of -", longestSubstring(""))
| 21.857143 | 56 | 0.480755 | 136 | 1,377 | 4.838235 | 0.272059 | 0.138298 | 0.079027 | 0.176292 | 0.407295 | 0.407295 | 0.407295 | 0.31003 | 0.176292 | 0.176292 | 0 | 0.008178 | 0.378359 | 1,377 | 62 | 57 | 22.209677 | 0.760514 | 0.10748 | 0 | 0.295455 | 0 | 0 | 0.04906 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.136364 | false | 0 | 0 | 0.045455 | 0.318182 | 0.113636 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79c030f9209dc922a8506680c3d71338df012fb2 | 436 | py | Python | tagger.py | medric49/NLTK_POS_tagging | 401971dec4316985f81a2cb2191379d7f0104116 | [
"MIT"
] | null | null | null | tagger.py | medric49/NLTK_POS_tagging | 401971dec4316985f81a2cb2191379d7f0104116 | [
"MIT"
] | null | null | null | tagger.py | medric49/NLTK_POS_tagging | 401971dec4316985f81a2cb2191379d7f0104116 | [
"MIT"
] | null | null | null | from nltk.tag import TaggerI
import spacy.tokens
class SpacyTagger(TaggerI):
def __init__(self):
super(SpacyTagger, self).__init__()
self.nlp = spacy.load('en_core_web_sm', disable=['parser', 'ner'])
def tag(self, tokens):
doc = spacy.tokens.doc.Doc(self.nlp.vocab, words=tokens)
for _, proc in self.nlp.pipeline:
doc = proc(doc)
return [(t.text, t.tag_) for t in doc]
| 22.947368 | 74 | 0.623853 | 61 | 436 | 4.245902 | 0.52459 | 0.081081 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.243119 | 436 | 18 | 75 | 24.222222 | 0.784848 | 0 | 0 | 0 | 0 | 0 | 0.052752 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.181818 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
79c0a3e2f36669a55f29c5710d3a1429b5bf727b | 1,885 | py | Python | exercises/exercise4_test_driven_development/tests/test_dna/test_dna.py | stijn-arends/programming2 | b5ae59afdee80843ee8f893367beb1b794c45437 | [
"MIT"
] | null | null | null | exercises/exercise4_test_driven_development/tests/test_dna/test_dna.py | stijn-arends/programming2 | b5ae59afdee80843ee8f893367beb1b794c45437 | [
"MIT"
] | null | null | null | exercises/exercise4_test_driven_development/tests/test_dna/test_dna.py | stijn-arends/programming2 | b5ae59afdee80843ee8f893367beb1b794c45437 | [
"MIT"
] | null | null | null | # from pytest import capfd
import pytest
from bin.dna import DNA
from bin.dna import NotDNAError
def test_init():
dna = DNA('ACTGACTGACTA')
assert all(c in "ACGT" for c in dna.seq), 'DNA sequence does not exists only of ACTG'
assert len(dna.seq) % 3 ==0, 'DNA sequence is not a multiple of three'
def test_init_wrong():
with pytest.raises(TypeError):
dna = DNA(2)
def test_init_wrong_mutiple_three():
with pytest.raises(ValueError):
dna = DNA('ACTGACTGACTAA')
def test_init_wrong_sequence():
with pytest.raises(NotDNAError):
dna = DNA('AHAKJADWKLH1')
def test_print(capfd):
dna = DNA('ACTGACTGACTA')
print(dna)
out, _ = capfd.readouterr()
assert out.rstrip('\n') == dna.seq, "Print DNA object does not print the DNA sequence."
def test_add_nucs():
dna = DNA('ACTGACTGACTA')
# old = dna.seq
# dna.seq = dna.seq + "ACG"
assert dna.seq + "ACG" == "ACTGACTGACTAACG"
def test_add_wrong_type():
with pytest.raises(TypeError):
dna = DNA('ACTGACTGACTA')
dna + 2
def test_add_wrong_length():
with pytest.raises(ValueError, match = r"Can only add 3 nucleotides not: \d+"):
dna = DNA('ACTGACTGACTA')
dna + "ATCG"
def test_iterator():
dna = DNA('ACTGACTGACTA')
correct_codons = [dna.seq[i:i+3] for i in range(0, len(dna.seq), 3)]
for i, codon in enumerate(dna):
assert codon == correct_codons[i], f"Result is not expected. Expected = {correct_codons[i]}"
def test_immutability():
dna = DNA('ACTGACTGACTA')
id_before = id(dna.seq)
dna.__seq = "ACTGTC"
id_after = id(dna.seq)
assert id_after == id_before, "seq object is not immutable"
def test_docstrings():
for name, method in DNA.__dict__.items():
assert method.__doc__ != None, f"Method: {name} doesn't contain a docstring"
| 22.710843 | 100 | 0.645093 | 267 | 1,885 | 4.404494 | 0.329588 | 0.061224 | 0.107143 | 0.040816 | 0.052721 | 0.052721 | 0 | 0 | 0 | 0 | 0 | 0.006203 | 0.230239 | 1,885 | 82 | 101 | 22.987805 | 0.804273 | 0.033952 | 0 | 0.195652 | 0 | 0 | 0.236784 | 0 | 0 | 0 | 0 | 0 | 0.152174 | 1 | 0.23913 | false | 0 | 0.065217 | 0 | 0.304348 | 0.065217 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79ca0b37b9a48d6ddae9d9700364c147f1488354 | 2,717 | py | Python | plugins/fakeaction.py | FastmoreCrak/Fantasmas | 1ce7a55b956ccf84660ceb91fdc39fedd0384c2a | [
"CC0-1.0"
] | 1 | 2021-10-04T08:02:29.000Z | 2021-10-04T08:02:29.000Z | plugins/fakeaction.py | FastmoreCrak/Fantasmas | 1ce7a55b956ccf84660ceb91fdc39fedd0384c2a | [
"CC0-1.0"
] | null | null | null | plugins/fakeaction.py | FastmoreCrak/Fantasmas | 1ce7a55b956ccf84660ceb91fdc39fedd0384c2a | [
"CC0-1.0"
] | null | null | null | # Ultroid - UserBot
# Copyright (C) 2020 TeamUltroid
#
# This file is a part of < https://github.com/TeamUltroid/Ultroid/ >
# PLease read the GNU Affero General Public License in
# <https://www.github.com/TeamUltroid/Ultroid/blob/main/LICENSE/>.
"""
✘ Commands Available -
• `{i}ftyping <time/in secs>`
`Show Fake Typing in current chat. `
• `{i}faudio <time/in secs>`
`Show Fake Recording Action in current chat. `
• `{i}fvideo <time/in secs>`
`Show Fake video action in current chat. `
• `{i}fgame <time/in secs>`
`Show Fake Game Playing Action in current chat. `
"""
from . import *
@ultroid_cmd(pattern="ftyping ?(.*)")
async def _(e):
t = e.pattern_match.group(1)
if not (t or t.isdigit()):
t = 100
else:
try:
t = int(t)
except BaseException:
try:
t = await ban_time(e, t)
except BaseException:
return await eod(e, "`Incorrect Format`")
await eod(e, f"Starting Fake Typing For {t} sec.")
async with e.client.action(e.chat_id, "typing"):
await asyncio.sleep(t)
@ultroid_cmd(pattern="faudio ?(.*)")
async def _(e):
t = e.pattern_match.group(1)
if not (t or t.isdigit()):
t = 100
else:
try:
t = int(t)
except BaseException:
try:
t = await ban_time(e, t)
except BaseException:
return await eod(e, "`Incorrect Format`")
await eod(e, f"Starting Fake audio recording For {t} sec.")
async with e.client.action(e.chat_id, "record-audio"):
await asyncio.sleep(t)
@ultroid_cmd(pattern="fvideo ?(.*)")
async def _(e):
t = e.pattern_match.group(1)
if not (t or t.isdigit()):
t = 100
else:
try:
t = int(t)
except BaseException:
try:
t = await ban_time(e, t)
except BaseException:
return await eod(e, "`Incorrect Format`")
await eod(e, f"Starting Fake video recording For {t} sec.")
async with e.client.action(e.chat_id, "record-video"):
await asyncio.sleep(t)
@ultroid_cmd(pattern="fgame ?(.*)")
async def _(e):
t = e.pattern_match.group(1)
if not (t or t.isdigit()):
t = 100
else:
try:
t = int(t)
except BaseException:
try:
t = await ban_time(e, t)
except BaseException:
return await eod(e, "`Incorrect Format`")
await eod(e, f"Starting Fake Game Playing For {t} sec.")
async with e.client.action(e.chat_id, "game"):
await asyncio.sleep(t)
HELP.update({f"{__name__.split('.')[1]}": f"{__doc__.format(i=HNDLR)}"})
| 27.17 | 72 | 0.565329 | 372 | 2,717 | 4.067204 | 0.247312 | 0.010575 | 0.10575 | 0.037013 | 0.716457 | 0.658956 | 0.631196 | 0.561798 | 0.561798 | 0.561798 | 0 | 0.010995 | 0.297019 | 2,717 | 99 | 73 | 27.444444 | 0.778534 | 0.20979 | 0 | 0.787879 | 0 | 0 | 0.168229 | 0.022962 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.015152 | 0 | 0.075758 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79cc1c3df6706b1c798c332d2a91bcea3ae91c3c | 565 | py | Python | whatthefood/train/sgd.py | lychanl/WhatTheFood | 94b6eec2c306e7e55b19395cde207d6e6beec7fe | [
"MIT"
] | null | null | null | whatthefood/train/sgd.py | lychanl/WhatTheFood | 94b6eec2c306e7e55b19395cde207d6e6beec7fe | [
"MIT"
] | null | null | null | whatthefood/train/sgd.py | lychanl/WhatTheFood | 94b6eec2c306e7e55b19395cde207d6e6beec7fe | [
"MIT"
] | null | null | null | from whatthefood.train import Minimizer
class SGD(Minimizer):
def __init__(self, model, loss, lr=0.1, regularization=None):
super(SGD, self).__init__(model, loss, regularization)
self.lr = lr
def _run(self, grads, lr_decay=1., *args, **kwargs):
for v, g in zip(self.vars, grads):
v.value -= g * self.lr * lr_decay
def _build_tf_opt(self, grads, tf, sess, *args, **kwargs):
ops = []
for v, g in zip(self.vars, grads):
ops.append(v.assign_sub(g * self.lr))
return tf.group(ops)
| 29.736842 | 65 | 0.59823 | 83 | 565 | 3.891566 | 0.481928 | 0.055728 | 0.049536 | 0.043344 | 0.142415 | 0.142415 | 0.142415 | 0.142415 | 0 | 0 | 0 | 0.007194 | 0.261947 | 565 | 18 | 66 | 31.388889 | 0.767386 | 0 | 0 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.076923 | 0 | 0.461538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79cf99d1fc1e21ceaecfd1eaade790f686337633 | 177 | py | Python | static/test.py | phongchara/thesis | 250e06fde3085d2aad50b32123cceb606a38e726 | [
"MIT"
] | null | null | null | static/test.py | phongchara/thesis | 250e06fde3085d2aad50b32123cceb606a38e726 | [
"MIT"
] | null | null | null | static/test.py | phongchara/thesis | 250e06fde3085d2aad50b32123cceb606a38e726 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
cap = cv2.VideoCapture(0)
while True:
ret, frame = cap.read()
cv2.imshow('frame', frame)
cap.release()
cv2.destroyAllWindows() | 16.090909 | 31 | 0.655367 | 24 | 177 | 4.833333 | 0.666667 | 0.137931 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036232 | 0.220339 | 177 | 11 | 32 | 16.090909 | 0.804348 | 0 | 0 | 0 | 0 | 0 | 0.029762 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79d4894aebfcbab878602eae1f62eeb71a5eb0fe | 613 | py | Python | robustbench/model_zoo/models.py | flymin/robustbench | c51d44e5c9f9425d0a2146dbfd5c54d86ea11609 | [
"Apache-2.0"
] | 2 | 2021-06-21T18:57:53.000Z | 2021-06-21T19:14:08.000Z | robustbench/model_zoo/models.py | flymin/robustbench | c51d44e5c9f9425d0a2146dbfd5c54d86ea11609 | [
"Apache-2.0"
] | null | null | null | robustbench/model_zoo/models.py | flymin/robustbench | c51d44e5c9f9425d0a2146dbfd5c54d86ea11609 | [
"Apache-2.0"
] | null | null | null | from collections import OrderedDict
from typing import Any, Dict, OrderedDict as OrderedDictType
from robustbench.model_zoo.cifar10 import cifar_10_models
from robustbench.model_zoo.cifar100 import cifar_100_models
from robustbench.model_zoo.enums import BenchmarkDataset, ThreatModel
ModelsDict = OrderedDictType[str, Dict[str, Any]]
ThreatModelsDict = OrderedDictType[ThreatModel, ModelsDict]
BenchmarkDict = OrderedDictType[BenchmarkDataset, ThreatModelsDict]
model_dicts: BenchmarkDict = OrderedDict([
(BenchmarkDataset.cifar_10, cifar_10_models),
(BenchmarkDataset.cifar_100, cifar_100_models)
])
| 38.3125 | 69 | 0.841762 | 68 | 613 | 7.382353 | 0.382353 | 0.089641 | 0.119522 | 0.13745 | 0.115538 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036036 | 0.094617 | 613 | 15 | 70 | 40.866667 | 0.868468 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.416667 | 0 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
79d6e83ac128c66234e3ec305f364613fe7a0ee6 | 763 | py | Python | 701-800/771.JewelsAndStones.py | Arrackisarookie/leetcode | 2e98c81d50a6b4ae0373908fbf9fe2e2c89f43a3 | [
"MIT"
] | null | null | null | 701-800/771.JewelsAndStones.py | Arrackisarookie/leetcode | 2e98c81d50a6b4ae0373908fbf9fe2e2c89f43a3 | [
"MIT"
] | null | null | null | 701-800/771.JewelsAndStones.py | Arrackisarookie/leetcode | 2e98c81d50a6b4ae0373908fbf9fe2e2c89f43a3 | [
"MIT"
] | null | null | null | #
# 771. Jewels and Stone
#
# You're given strings J representing the types of stones that are jewels, and
# S representing the stones you have.
#
# Each character in S is a type of stone you have. You want to know how many of
# the stones you have are also jewels.
#
# The letters in J are guaranteed distinct, and all characters in J and S are
# letters. Letters are case sensitive, so "a" is considered a different type of
# stone from "A".
#
class Solution:
def numJewelsInStones(self, J: str, S: str) -> int:
# sum((True, False, True)) = 2
return sum(map(J.count, S))
return sum(S.count(j) for j in J)
return sum(i in J for i in S) # i in J 返回 True 或 False
s = Solution()
print(s.numJewelsInStones("aA", "aAAbbbb"))
| 29.346154 | 79 | 0.669725 | 133 | 763 | 3.842105 | 0.466165 | 0.029354 | 0.046967 | 0.062622 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006861 | 0.235911 | 763 | 25 | 80 | 30.52 | 0.86964 | 0.6173 | 0 | 0 | 0 | 0 | 0.032609 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.714286 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
79dcc934efbfd242b006bddf2a0ee81a25fcdfe9 | 1,713 | py | Python | mdssdk/parsers/vsan/show_vsan.py | akshatha-s13/mdssdk | 615a5528d0af1201e8fe8f305c62b258e5433990 | [
"Apache-2.0"
] | 4 | 2020-12-13T20:02:43.000Z | 2022-02-27T23:36:58.000Z | mdssdk/parsers/vsan/show_vsan.py | akshatha-s13/mdssdk | 615a5528d0af1201e8fe8f305c62b258e5433990 | [
"Apache-2.0"
] | 13 | 2020-09-23T07:30:15.000Z | 2022-03-30T01:12:25.000Z | mdssdk/parsers/vsan/show_vsan.py | akshatha-s13/mdssdk | 615a5528d0af1201e8fe8f305c62b258e5433990 | [
"Apache-2.0"
] | 12 | 2020-05-11T09:33:21.000Z | 2022-03-18T11:11:28.000Z | import logging
import re
log = logging.getLogger(__name__)
class ShowVsan(object):
def __init__(self, outlines, vsan_id=None):
self._all_vsans = []
self._group_dict = {}
self.vsan_id = vsan_id
self.process_all(outlines)
def process_all(self, outlines):
outlines = "".join([eachline.strip("\n") for eachline in outlines])
PAT_VSAN_INFO = "vsan\s(?P<vsan>\d*)(\sinformation\s+name:(?P<name>\S*)\s+state:(?P<state>\S*)\s+interoperability mode:(?P<interop_mode>\S*)\s+loadbalancing:(?P<load_balancing>\S*)\s+operational state:(?P<operational_state>\S*))?"
regex = re.compile(PAT_VSAN_INFO)
match = regex.finditer(outlines)
if match:
self._all_vsans = [m.groupdict() for m in match]
self._group_dict = next(
(v for v in self._all_vsans if v["vsan"] == str(self.vsan_id)), {}
)
log.debug(self._all_vsans)
log.debug(self._group_dict)
@property
def id(self):
vsan_id = self._group_dict.get("vsan", None)
if vsan_id is not None:
return int(vsan_id)
return None
@property
def name(self):
return self._group_dict.get("name", None)
@property
def state(self):
return self._group_dict.get("state", None)
@property
def interop_mode(self):
return self._group_dict.get("interop_mode", None)
@property
def load_balancing(self):
return self._group_dict.get("load_balancing", None)
@property
def operational_state(self):
return self._group_dict.get("operational_state", None)
@property
def vsans(self):
return self._all_vsans
| 30.052632 | 238 | 0.618214 | 228 | 1,713 | 4.390351 | 0.254386 | 0.080919 | 0.116883 | 0.095904 | 0.13986 | 0.13986 | 0.061938 | 0 | 0 | 0 | 0 | 0 | 0.24927 | 1,713 | 56 | 239 | 30.589286 | 0.778383 | 0 | 0 | 0.155556 | 0 | 0.022222 | 0.159953 | 0.122592 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.044444 | 0.133333 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
79e0555ac90dbd19cbb8156f73d726f47b43fd38 | 1,364 | py | Python | sdk/python/pulumi_aws/elasticbeanstalk/get_solution_stack.py | lemonade-hq/pulumi-aws | 9ee22c65c7bad42d38b16879ccd56526d856a01a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/elasticbeanstalk/get_solution_stack.py | lemonade-hq/pulumi-aws | 9ee22c65c7bad42d38b16879ccd56526d856a01a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/elasticbeanstalk/get_solution_stack.py | lemonade-hq/pulumi-aws | 9ee22c65c7bad42d38b16879ccd56526d856a01a | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-03-08T15:05:29.000Z | 2021-03-08T15:05:29.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from .. import utilities, tables
class GetSolutionStackResult:
"""
A collection of values returned by getSolutionStack.
"""
def __init__(__self__, name=None, id=None):
if name and not isinstance(name, str):
raise TypeError('Expected argument name to be a str')
__self__.name = name
"""
The name of the solution stack.
"""
if id and not isinstance(id, str):
raise TypeError('Expected argument id to be a str')
__self__.id = id
"""
id is the provider-assigned unique ID for this managed resource.
"""
async def get_solution_stack(most_recent=None,name_regex=None,opts=None):
"""
Use this data source to get the name of a elastic beanstalk solution stack.
"""
__args__ = dict()
__args__['mostRecent'] = most_recent
__args__['nameRegex'] = name_regex
__ret__ = await pulumi.runtime.invoke('aws:elasticbeanstalk/getSolutionStack:getSolutionStack', __args__, opts=opts)
return GetSolutionStackResult(
name=__ret__.get('name'),
id=__ret__.get('id'))
| 32.47619 | 120 | 0.662757 | 173 | 1,364 | 4.936416 | 0.508671 | 0.045667 | 0.037471 | 0.058548 | 0.105386 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000964 | 0.239736 | 1,364 | 41 | 121 | 33.268293 | 0.822565 | 0.169355 | 0 | 0 | 1 | 0 | 0.164586 | 0.061294 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.238095 | 0 | 0.380952 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8daeeb998fcc047c3c8326c959dd97aac46bcc95 | 3,291 | py | Python | bsddb3/bsddb3-6.2.6/make3.py | mpwillson/spambayes3 | b51d7bb9016066234ce88dad65faabed85f63d78 | [
"PSF-2.0"
] | 1 | 2020-03-21T15:17:22.000Z | 2020-03-21T15:17:22.000Z | bsddb3/bsddb3-6.2.6/make3.py | mpwillson/spambayes3 | b51d7bb9016066234ce88dad65faabed85f63d78 | [
"PSF-2.0"
] | 1 | 2022-02-22T22:23:55.000Z | 2022-02-22T22:23:55.000Z | bsddb3/bsddb3-6.2.6/make3.py | mpwillson/spambayes3 | b51d7bb9016066234ce88dad65faabed85f63d78 | [
"PSF-2.0"
] | null | null | null | #!/usr/bin/env python
"""
Copyright (c) 2008-2018, Jesus Cea Avion <jcea@jcea.es>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Jesus Cea Avion nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
"""
import sys, os
refactor_path="/usr/local/lib/python3.5/"
def copy2to3(path_from, path_to) :
files_to_convert = {}
if os.path.isdir(path_from) :
if path_from.endswith(".hg") : return {}
try :
os.mkdir(path_to)
except :
pass
for i in os.listdir(path_from) :
files_to_convert.update(copy2to3(path_from+"/"+i,path_to+"/"+i))
return files_to_convert
cwd = os.getcwd()
if (not path_from.endswith(".py")) or (os.path.exists(path_to) and \
(os.stat(path_from).st_mtime < os.stat(path_to).st_mtime)) :
return {}
if path_from[0] != "/" :
path_from = cwd+"/"+path_from
if path_to[0] != "/" :
path_to = cwd+"/"+path_to
files_to_convert[path_from] = path_to
try :
open(path_to, "w").write(open(path_from, "r").read())
except :
os.remove(path_to)
raise
return files_to_convert
def make2to3(path_from, path_to) :
files_to_convert = copy2to3(path_from, path_to)
retcode = 0
for path_from, path_to in files_to_convert.iteritems() :
print "*** Converting", path_to
try :
import subprocess
process = subprocess.Popen(["2to3", "-w", path_to], cwd=refactor_path)
retcode = process.wait()
except :
os.remove(path_to)
raise
try :
os.remove(path_to+".bak")
except :
pass
if retcode :
os.remove(path_to)
print "ERROR!"
return bool(retcode)
return bool(retcode)
print "Using '%s' for 2to3 conversion tool" %refactor_path
make2to3("setup2.py", "setup3.py")
make2to3("test2.py", "test3.py")
make2to3("Lib", "Lib3")
| 32.264706 | 82 | 0.681252 | 465 | 3,291 | 4.707527 | 0.419355 | 0.052079 | 0.044769 | 0.031978 | 0.155322 | 0.110553 | 0.087711 | 0.062129 | 0.062129 | 0.062129 | 0 | 0.015379 | 0.229414 | 3,291 | 101 | 83 | 32.584158 | 0.847792 | 0.006077 | 0 | 0.358491 | 0 | 0 | 0.08234 | 0.014196 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.037736 | 0.037736 | null | null | 0.056604 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dafeef9b1315dcf1a385861d1a2b35927ccbd53 | 514 | py | Python | tests/test_tree.py | Schoyen/arbitrary-woodland | 927fe791bcbde733ea283b15de7921a3115b8578 | [
"MIT"
] | null | null | null | tests/test_tree.py | Schoyen/arbitrary-woodland | 927fe791bcbde733ea283b15de7921a3115b8578 | [
"MIT"
] | null | null | null | tests/test_tree.py | Schoyen/arbitrary-woodland | 927fe791bcbde733ea283b15de7921a3115b8578 | [
"MIT"
] | null | null | null | import numpy as np
import sklearn.datasets as skd
import sklearn.model_selection as skms
import sklearn.metrics as skm
import sklearn.tree as skt
from arbitrary_woodland.tree import DecisionTree
def test_decision_tree():
X, y = skd.load_breast_cancer(return_X_y=True)
X_train, X_test, y_train, y_test = skms.train_test_split(
X, y, test_size=0.2
)
tree = DecisionTree()
tree.fit(X_train, y_train)
pred = tree.predict(X_test)
assert skm.accuracy_score(y_test, pred) > 0.5
| 22.347826 | 61 | 0.731518 | 85 | 514 | 4.188235 | 0.470588 | 0.146067 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009569 | 0.18677 | 514 | 22 | 62 | 23.363636 | 0.842105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 1 | 0.066667 | false | 0 | 0.4 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
8db181ed61c9a3ae541faef9c9148c3595083f7c | 2,600 | py | Python | TestModel/migrations/0001_initial.py | WenGeYJ/Mail-Master-in-School | 040585569e5fe0bea9b6f93d6e4f5ed14e78da62 | [
"MIT"
] | null | null | null | TestModel/migrations/0001_initial.py | WenGeYJ/Mail-Master-in-School | 040585569e5fe0bea9b6f93d6e4f5ed14e78da62 | [
"MIT"
] | null | null | null | TestModel/migrations/0001_initial.py | WenGeYJ/Mail-Master-in-School | 040585569e5fe0bea9b6f93d6e4f5ed14e78da62 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11a1 on 2017-05-11 08:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='allBook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('book_id', models.BigIntegerField()),
('ISBN', models.CharField(max_length=20)),
('name', models.CharField(max_length=20)),
('price', models.IntegerField()),
],
),
migrations.CreateModel(
name='favor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.BigIntegerField()),
('book_id', models.BigIntegerField()),
],
),
migrations.CreateModel(
name='message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.TimeField()),
('date', models.DateField(auto_now_add=True)),
('book_id', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='student_users',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.BigIntegerField(unique=True)),
('user_name', models.CharField(max_length=20)),
('name', models.CharField(max_length=20)),
('phone_number', models.CharField(max_length=15)),
('mail', models.EmailField(max_length=254)),
],
),
migrations.CreateModel(
name='subscribeBooks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.BigIntegerField()),
('book_id', models.BigIntegerField()),
],
),
migrations.CreateModel(
name='Test',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
],
),
]
| 37.142857 | 114 | 0.536538 | 238 | 2,600 | 5.680672 | 0.294118 | 0.076923 | 0.093195 | 0.12426 | 0.614645 | 0.595414 | 0.570266 | 0.570266 | 0.570266 | 0.570266 | 0 | 0.019263 | 0.321154 | 2,600 | 69 | 115 | 37.681159 | 0.746742 | 0.026154 | 0 | 0.622951 | 1 | 0 | 0.069988 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.032787 | 0 | 0.098361 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8db22bddd1a7d85992cce67a92f1c7d93bd40095 | 6,564 | py | Python | world_creator/objects.py | lilSpeedwagon/zaWRka-project | e40c6520183c802e9c61faabeaa079bced4e8b00 | [
"MIT"
] | 1 | 2019-12-11T17:18:42.000Z | 2019-12-11T17:18:42.000Z | world_creator/objects.py | lilSpeedwagon/zaWRka-project | e40c6520183c802e9c61faabeaa079bced4e8b00 | [
"MIT"
] | null | null | null | world_creator/objects.py | lilSpeedwagon/zaWRka-project | e40c6520183c802e9c61faabeaa079bced4e8b00 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from data_structures import *
import copy
import logging as log
import converter
import math as m
from enum import Enum
import os
APP_VERSION = 1.0
class SignsTypes(Enum):
STOP = "stop sign"
ONLY_FORWARD = "only forward sign"
ONLY_RIGHT = "only right sign"
ONLY_LEFT = "only left sign"
FORWARD_OR_RIGHT = "forward or right sign"
FORWARD_OR_LEFT = "forward or left sign"
class ImagesPaths():
PATH_TO_IMAGE = 'models'
STOP = os.path.join(PATH_TO_IMAGE, 'brick-sign/brick.png')
ONLY_FORWARD = os.path.join(PATH_TO_IMAGE, 'forward-sign/forward.png')
ONLY_LEFT = os.path.join(PATH_TO_IMAGE, 'left-sign/left.png')
ONLY_RIGHT = os.path.join(PATH_TO_IMAGE, 'right-sign/right.png')
FORWARD_OR_LEFT = os.path.join(PATH_TO_IMAGE, 'forward-left-sign/frwd_left.png')
FORWARD_OR_RIGHT = os.path.join(PATH_TO_IMAGE, 'forward-right-sign/frwd_right.png')
def sign_path_to_sign_type(img_path):
if img_path is ImagesPaths.STOP:
return SignsTypes.STOP.value
elif img_path is ImagesPaths.ONLY_FORWARD:
return SignsTypes.ONLY_FORWARD.value
elif img_path is ImagesPaths.ONLY_LEFT:
return SignsTypes.ONLY_LEFT.value
elif img_path is ImagesPaths.ONLY_RIGHT:
return SignsTypes.ONLY_RIGHT.value
elif img_path is ImagesPaths.FORWARD_OR_LEFT:
return SignsTypes.FORWARD_OR_LEFT.value
elif img_path is ImagesPaths.FORWARD_OR_RIGHT:
return SignsTypes.FORWARD_OR_RIGHT.value
else:
return " "
def sign_type_to_sign_path(sign_type):
if sign_type == SignsTypes.STOP.value:
return ImagesPaths.STOP
elif sign_type == SignsTypes.ONLY_FORWARD.value:
return ImagesPaths.ONLY_FORWARD
elif sign_type == SignsTypes.ONLY_LEFT.value:
return ImagesPaths.ONLY_LEFT
elif sign_type == SignsTypes.ONLY_RIGHT.value:
return ImagesPaths.ONLY_RIGHT
elif sign_type == SignsTypes.FORWARD_OR_LEFT.value:
return ImagesPaths.FORWARD_OR_LEFT
elif sign_type == SignsTypes.FORWARD_OR_RIGHT.value:
return ImagesPaths.FORWARD_OR_RIGHT
else:
return " "
class ObjectType(Enum):
START = 10,
WALL = 11,
BOX = 12,
SQUARE = 13,
SIGN = 14,
TRAFFIC_LIGHT = 15
class CellQuarter(Enum):
RIGHT_TOP = 0
RIGHT_BOT = 1
LEFT_TOP = 2
LEFT_BOT = 3
class MapParams:
def __init__(self, n_cells: Size2D, cell_sz: Size2D):
self.n_cells = n_cells
self.cell_sz = cell_sz
self.phys_size = Size2D(self.n_cells.x * self.cell_sz.x,
self.n_cells.y * self.cell_sz.y)
print("World cells: count={0},size={1}".format(self.n_cells, self.cell_sz))
def serialize(self):
data = {
'cell_cnt': self.n_cells.as_list(),
'cell_sz': self.cell_sz.as_list()
}
return data
def __str__(self):
return 'Map params: count({}) / size({})'.format(self.n_cells, self.cell_sz)
@staticmethod
def deserialize(data: dict):
return MapParams(Point2D.from_list(data['cell_cnt']), Point2D.from_list(data['cell_sz']))
class Object:
def render(self):
pass
def serialized(self):
pass
@staticmethod
def deserialize(data: dict):
if data['name'] not in SERIALIZATION_SUPPORT:
log.error('Object type \'{}\' not found'.format(data['name']))
return None
return SERIALIZATION_SUPPORT[data['name']].deserialize(data)
class Wall():
TYPE = ObjectType.WALL
def __init__(self, point1, point2):
self.p1 = point1
self.p2 = point2
def __str__(self):
return "[({}) p1 = {}, p2 = {}]".format(type(self), self.p1, self.p2)
def distance_2_point(self, pnt):
import numpy
from numpy import arccos, array, dot, pi, cross
from numpy.linalg import det, norm
A = numpy.array(self.p1.as_list())
B = numpy.array(self.p2.as_list())
P = numpy.array(pnt.as_list())
if arccos(dot((P - A) / norm(P - A), (B - A) / norm(B - A))) > m.pi / 2:
return norm(P - A)
if arccos(dot((P - B) / norm(P - B), (A - B) / norm(A - B))) > m.pi / 2:
return norm(P - B)
return norm(cross(A-B, A-P))/norm(B-A)
def render(self, qp):
qp.drawWallLine(self.p1, self.p2, color=(0, 0, 0))
def serialized(self):
for name, _class in SERIALIZATION_SUPPORT.items():
if type(self) == _class:
break
data = {
'name': name,
'pnts': self.p1.as_list() + self.p2.as_list()
}
return data
@staticmethod
def deserialize(data: dict):
return Wall(Point2D.from_list(data['pnts'][0:2]),
Point2D.from_list(data['pnts'][2:4]))
class Sign(Object):
TYPE = ObjectType.SIGN
def __init__(self, pos, orient, signType):
self.pos = pos
self.type = signType
self.orient = orient
def __str__(self):
return "[({}) pose = {}, orient = {}, type = {}]".format(type(self), self.pos, self.orient, self.type)
def render(self, qp):
qp.drawQuarterImg(self.pos, self.orient, sign_type_to_sign_path(self.type))
def serialized(self):
for name, _class in SERIALIZATION_SUPPORT.items():
if type(self) == _class:
break
data = {
'name': name,
'pos': self.pos.as_list(),
'orient': self.orient.value,
'type': self.type
}
return data
@staticmethod
def deserialize(data: dict):
return Sign(Point2D.from_list(data['pos']),
CellQuarter(data['orient']),
data['type'])
class Box(Object):
TYPE = ObjectType.BOX
def __init__(self, pos: Point2D):
self.pos = pos
def render(self, qp):
qp.fillCell(self.pos, color=(150, 150, 150))
def serialized(self):
for name, _class in SERIALIZATION_SUPPORT.items():
if type(self) == _class:
break
data = {
'name': name,
'pos': self.pos.as_list()
}
return data
@staticmethod
def deserialize(data: dict):
return Box(Point2D.from_list(data['pos']))
SERIALIZATION_SUPPORT = {
'wall': Wall,
'sign': Sign,
'box': Box
}
| 29.044248 | 110 | 0.593693 | 849 | 6,564 | 4.391048 | 0.162544 | 0.033798 | 0.026288 | 0.022532 | 0.392167 | 0.263949 | 0.222908 | 0.141631 | 0.107833 | 0.107833 | 0 | 0.014139 | 0.288848 | 6,564 | 226 | 111 | 29.044248 | 0.78449 | 0.003199 | 0 | 0.271186 | 0 | 0 | 0.07764 | 0.013449 | 0 | 0 | 0 | 0 | 0 | 1 | 0.135593 | false | 0.011299 | 0.056497 | 0.039548 | 0.559322 | 0.00565 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
8db2a42c81a6be04ff526dac7fe7fcca39a3412e | 1,968 | py | Python | theano/tensor/io.py | jsalvatier/Theano-1 | 457bae18a0d3284841b9bcae5ce4ceee22f33132 | [
"BSD-3-Clause"
] | null | null | null | theano/tensor/io.py | jsalvatier/Theano-1 | 457bae18a0d3284841b9bcae5ce4ceee22f33132 | [
"BSD-3-Clause"
] | null | null | null | theano/tensor/io.py | jsalvatier/Theano-1 | 457bae18a0d3284841b9bcae5ce4ceee22f33132 | [
"BSD-3-Clause"
] | null | null | null | import numpy
import theano
from theano import gof
from theano.gof import Apply, Constant, Generic, Op, Type, Value, Variable
from basic import tensor
##########################
# Disk Access
##########################
class LoadFromDisk(Op):
"""
An operation to load an array from disk
See Also
load
@note: Non-differentiable.
"""
def __init__(self, dtype, broadcastable, mmap_mode=None):
self.dtype = numpy.dtype(dtype) # turn "float64" into numpy.float64
self.broadcastable = broadcastable
self.mmap_mode = mmap_mode
self._info = (dtype, broadcastable, mmap_mode)
def __eq__(self, other):
return (type(self) == type(other) and self._info == other._info)
def __hash__(self):
return hash(self._info)
def make_node(self, path):
if isinstance(path, str):
path = Constant(Generic(), path)
return gof.Apply(self, [path], [tensor(self.dtype,
broadcastable=self.broadcastable)])
def perform(self, node, inp, out):
path = inp[0]
if (path.split('.')[-1] == 'npz'):
raise ValueError("Expected a .npy file, got %s instead"%path)
result = numpy.load(path, mmap_mode=self.mmap_mode)
if result.dtype != self.dtype:
raise TypeError("Expected an array of type %s, got %s instead"%
(self.dtype, result.dtype))
out[0][0] = result
def __str__(self):
return "Load{dtype:%s, broadcastable:%s, mmep:%s}"%self._info
def load(path, dtype, broadcastable, mmap_mode=None):
"""
Load an array from an .npy file
>>> from theano import *
>>> path = Variable(Generic())
>>> x = tensor.load(path, 'int64', (False,))
>>> y = x*2
>>> fn = function([path], y)
>>> fn("stored-array.npy")
array([0, 2, 4, 6, 8], dtype=int64)
"""
return LoadFromDisk(dtype, broadcastable, mmap_mode)(path)
| 30.276923 | 75 | 0.582317 | 242 | 1,968 | 4.61157 | 0.338843 | 0.057348 | 0.078853 | 0.09319 | 0.053763 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012371 | 0.260671 | 1,968 | 64 | 76 | 30.75 | 0.754639 | 0.188516 | 0 | 0 | 0 | 0 | 0.084918 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.212121 | false | 0 | 0.151515 | 0.090909 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8db47e9494f2e9c57b0c17311001fa23717393a2 | 1,982 | py | Python | file_path/proc_dup_files.py | daineseh/python_code | 1c2c45a62329fbc87ed9a486b3382b112948ba73 | [
"MIT"
] | null | null | null | file_path/proc_dup_files.py | daineseh/python_code | 1c2c45a62329fbc87ed9a486b3382b112948ba73 | [
"MIT"
] | null | null | null | file_path/proc_dup_files.py | daineseh/python_code | 1c2c45a62329fbc87ed9a486b3382b112948ba73 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
import os
import sys
def convert_bytes(bytes):
bytes = float(bytes)
if bytes >= 1099511627776:
terabytes = bytes / 1099511627776
size = '%.2fT' % terabytes
elif bytes >= 1073741824:
gigabytes = bytes / 1073741824
size = '%.2fG' % gigabytes
elif bytes >= 1048576:
megabytes = bytes / 1048576
size = '%.2fM' % megabytes
elif bytes >= 1024:
kilobytes = bytes / 1024
size = '%.2fK' % kilobytes
else:
size = '%sb' % bytes
return size
def main():
if len(sys.argv) < 2:
print 'Input a directory.'
return False
if not os.path.isdir(sys.argv[1]):
print 'Invalid directory - %s' % sys.argv[1]
name_list = []
duplicate_list = []
size_count = 0
for dir_path, dir_list, file_list in os.walk(sys.argv[1]):
for file_name in file_list:
lowercase = file_name.lower()
if lowercase not in name_list:
name_list.append(lowercase)
continue
path = os.path.join(dir_path, file_name)
duplicate_list.append(path)
statinfo = os.stat(path)
size_count += statinfo.st_size
for dup_path in duplicate_list:
statinfo = os.stat(dup_path)
print '%s - %s' % (dup_path, convert_bytes(statinfo.st_size))
if duplicate_list:
print '-' * 12
print 'Total %s files is duplicated. (%s)' % (len(duplicate_list), convert_bytes(size_count))
input_str = raw_input('Do you want to remove this files: [Y/N]')
if input_str.upper() == 'Y':
for idx, dup in enumerate(duplicate_list, start=1):
try:
os.remove(dup)
print '[%s]%s removed.' % (idx, dup)
except OSError, e:
print e
else:
print 'No duplicate file.'
return True
if __name__ == '__main__':
main()
| 27.915493 | 101 | 0.553481 | 240 | 1,982 | 4.408333 | 0.370833 | 0.073724 | 0.022684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.061644 | 0.337033 | 1,982 | 70 | 102 | 28.314286 | 0.743531 | 0.010595 | 0 | 0.035088 | 0 | 0 | 0.094946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.035088 | null | null | 0.140351 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8db775c0cc59f4c1a94f8619d4ddee6b13af06fa | 2,459 | py | Python | nomad/api/allocations.py | shinespb/python-nomad | d5dd6f89dc82a1a2ccc33858ace1257a1df0d29c | [
"MIT"
] | null | null | null | nomad/api/allocations.py | shinespb/python-nomad | d5dd6f89dc82a1a2ccc33858ace1257a1df0d29c | [
"MIT"
] | null | null | null | nomad/api/allocations.py | shinespb/python-nomad | d5dd6f89dc82a1a2ccc33858ace1257a1df0d29c | [
"MIT"
] | null | null | null | from nomad.api.base import Requester
class Allocations(Requester):
"""
The allocations endpoint is used to query the status of allocations.
By default, the agent's local region is used; another region can be
specified using the ?region= query parameter.
https://www.nomadproject.io/docs/http/allocs.html
"""
ENDPOINT = "allocations"
def __init__(self, **kwargs):
super(Allocations, self).__init__(**kwargs)
def __str__(self):
return "{0}".format(self.__dict__)
def __repr__(self):
return "{0}".format(self.__dict__)
def __getattr__(self, item):
raise AttributeError
def __len__(self):
response = self.get_allocations()
return len(response)
def __iter__(self):
response = self.get_allocations()
return iter(response)
def get_allocations(self, prefix=None):
""" Lists all the allocations.
https://www.nomadproject.io/docs/http/allocs.html
arguments:
- prefix :(str) optional, specifies a string to filter allocations on based on an prefix.
This is specified as a querystring parameter.
returns: list of dicts
raises:
- nomad.api.exceptions.BaseNomadException
- nomad.api.exceptions.URLNotFoundNomadException
"""
params = {"prefix": prefix}
return self.request(method="get", params=params).json()
def get_allocations_by_status(self, status='running', prefix=None):
""" Lists plain list of allocations with particular status.
By default, returns only running allocations
https://www.nomadproject.io/docs/http/allocs.html
arguments:
- status :(str) optional, specifies a string to filter allocations on based on a status
- prefix :(str) optional, specifies a string to filter allocations on based on a prefix.
This is specified as a querystring parameter.
returns: list
raises:
- nomad.api.exceptions.BaseNomadException
- nomad.api.exceptions.URLNotFoundNomadException
"""
params = {"prefix": prefix}
alloc_list = []
for alloc in self.request(method="get", params=params).json():
if status == alloc['ClientStatus']:
alloc_list.append(alloc['ID'])
return alloc_list
| 34.633803 | 103 | 0.620171 | 270 | 2,459 | 5.481481 | 0.348148 | 0.027027 | 0.048649 | 0.044595 | 0.577703 | 0.577703 | 0.529054 | 0.442568 | 0.415541 | 0.415541 | 0 | 0.001141 | 0.287109 | 2,459 | 70 | 104 | 35.128571 | 0.843126 | 0.460756 | 0 | 0.222222 | 0 | 0 | 0.052336 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.296296 | false | 0 | 0.037037 | 0.074074 | 0.62963 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dba6df40271f493e222a50eeabc7aca8d53155d | 752 | py | Python | rvpvp/isa/rvv/vwmxxxx_vv.py | ultrafive/riscv-pvp | 843e38422c3d545352b955764927d5e7847e5453 | [
"Unlicense"
] | 5 | 2021-05-10T09:57:00.000Z | 2021-10-05T14:39:20.000Z | rvpvp/isa/rvv/vwmxxxx_vv.py | ultrafive/riscv-pvp | 843e38422c3d545352b955764927d5e7847e5453 | [
"Unlicense"
] | null | null | null | rvpvp/isa/rvv/vwmxxxx_vv.py | ultrafive/riscv-pvp | 843e38422c3d545352b955764927d5e7847e5453 | [
"Unlicense"
] | 1 | 2021-05-14T20:24:11.000Z | 2021-05-14T20:24:11.000Z | from ...isa.inst import *
import numpy as np
class Vwmacc_vv(Inst):
name = 'vwmacc.vv'
# vwmacc.vv vd, vs1, vs2, vm
def golden(self):
if self['vl']==0:
return self['ori']
result = self['ori'].copy()
maskflag = 1 if 'mask' in self else 0
vstart = self['vstart'] if 'vstart' in self else 0
for ii in range(vstart, self['vl']):
if (maskflag == 0) or (maskflag == 1 and np.unpackbits(self['mask'], bitorder='little')[ii] ):
result[ii] = self['vs2'][ii].astype(object) * self['vs1'][ii]+ self['ori'][ii].astype(object)
return result
class Vwmaccu_vv(Vwmacc_vv):
name = 'vwmaccu.vv'
class Vwmaccsu_vv(Vwmacc_vv):
name = 'vwmaccsu.vv'
| 32.695652 | 109 | 0.56516 | 106 | 752 | 3.962264 | 0.40566 | 0.095238 | 0.071429 | 0.052381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018282 | 0.272606 | 752 | 22 | 110 | 34.181818 | 0.749543 | 0.034574 | 0 | 0 | 0 | 0 | 0.103878 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.111111 | 0 | 0.611111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
8dbd46d1ac00ab6d41e5be8b24f85f3d76541844 | 423 | py | Python | support/cross/aio/atexit.py | pmp-p/python-wasm-plus | ccd34cf46fc00924e256a0ad1d8cf5061b9520ac | [
"MIT"
] | 3 | 2022-03-11T22:02:39.000Z | 2022-03-16T08:04:33.000Z | support/cross/aio/atexit.py | pmp-p/python-wasm-plus | ccd34cf46fc00924e256a0ad1d8cf5061b9520ac | [
"MIT"
] | null | null | null | support/cross/aio/atexit.py | pmp-p/python-wasm-plus | ccd34cf46fc00924e256a0ad1d8cf5061b9520ac | [
"MIT"
] | null | null | null | plan = []
def register(func, *args, **kwargs):
global plan
plan.append( (func,arg,kwargs,) )
def unregister(func):
global plan
todel = []
for i,elem in enumerate(plan)
if elem[0] is func:
todel.append(i)
while len(todel):
plan.pop( todel.pop() )
def exiting():
while len(plan):
# replace stock one
import sys
sys.modules['atexit'] = sys.modules['aio.atexit']
| 16.92 | 50 | 0.593381 | 57 | 423 | 4.403509 | 0.54386 | 0.079681 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003195 | 0.260047 | 423 | 24 | 51 | 17.625 | 0.798722 | 0.040189 | 0 | 0.125 | 0 | 0 | 0.039604 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.0625 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dc0455393c09deb7fec8d707856281119f0d40f | 8,631 | py | Python | amla/common/schedule.py | tremblerz/amla | 6ab000d6e7703cc5591451e324c9882408943816 | [
"Apache-2.0"
] | 118 | 2018-06-25T05:01:22.000Z | 2022-03-07T05:46:08.000Z | amla/common/schedule.py | tremblerz/amla | 6ab000d6e7703cc5591451e324c9882408943816 | [
"Apache-2.0"
] | 22 | 2018-06-12T21:22:37.000Z | 2020-04-08T07:59:49.000Z | amla/common/schedule.py | tremblerz/amla | 6ab000d6e7703cc5591451e324c9882408943816 | [
"Apache-2.0"
] | 21 | 2018-07-20T22:01:30.000Z | 2021-05-30T08:57:50.000Z | #Copyright 2018 Cisco Systems All Rights Reserved
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""Task Schedule
"""
import collections
import operator
import json
class Schedule():
"""
Base class for schedule
TODO: Make ABC
A task is in one of 3 states: init, running, waiting, complete
"""
def __init__(self):
return
def get_next_task(self, tasks):
"""
Scheduling algorthm
Find oldest task where state is "init"
or "waiting" and "waitingfor" are completed
TODO: Topological sort of task graph
"""
if len(tasks) == 0:
return None
else:
schedulable =[]
for task in tasks:
if task["state"] == "init":
schedulable.append(task)
elif task["state"] == "waiting":
schedule = True
for task_id in task["waiting_for"]:
#Search for task in schedule
#TODO: Create an index
for t in tasks:
if task_id == t["task_id"]:
if t["state"] != "complete":
schedule = False
break
if schedule == True:
schedulable.append(task)
#After the tasks complete, then the iteration needs to be
#increased before this task is resumed
#TODO: Specific to generate tasks
#Move elsewhere?
task['iteration']+=1
task['state'] = 'running'
task['waiting_for'] = [];
task = None
#TODO: Sort schedulable based on task id
if len(schedulable) > 0:
schedulable.sort(key=operator.itemgetter('task_id'))
task = schedulable[0]
task['state'] = 'running'
task['waiting_for'] = [];
return task
class ScheduleMem(Schedule):
"""
Implements a schedule of tasks stored in memory
Can be used only when AMLA is used in single host mode
Currently a FIFO queue, stored in memory implemented using python's deque
Note: Not safe for concurrent access
TODO:
"""
def __init__(self):
#TODO: Assert in single host mode
self.tasks = collections.deque()
self.nexttask_id = 0
return
def add(self, t):
""" Add a task to the schedule
"""
task_id = self.nexttask_id
task = {'task_id': task_id, 'config': t['config'], 'state': 'init'}
if 'iteration' in t:
task['iteration'] = t['iteration']
else:
task['iteration'] = 0
self.tasks.append(task)
self.nexttask_id += 1
return task
def update(self, task):
for elem in self.tasks:
if elem['task_id'] == task['task_id']:
for key in task:
elem[key] = task[key]
return
def delete(self, task):
if not task:
return -1
for elem in self.tasks:
if elem['task_id'] == task['task_id']:
break
return task['task_id']
def get(self, task):
elem = None
for elem in self.tasks:
if elem['task_id'] == task['task_id']:
break
return elem
def get_next(self):
"""Get the next task to be scheduled
Currently uses a FIFO queue
"""
if len(self.tasks) == 0:
return None
else:
task = self.get_next_task(self.tasks)
return task
def get_all(self):
return list(self.tasks)
class ScheduleDB(Schedule):
"""
Implements a schedule of tasks stored in a DB
Currently uses mysql, with transactions to support
concurrent schedulers
"""
def __init__(self, sys_config):
import MySQLdb
host = sys_config["database"]["host"]
user = sys_config["database"]["user"]
passwd = sys_config["database"]["password"]
db = sys_config["database"]["db"]
self.db = MySQLdb.connect(host=host,
user=user,
passwd=passwd,
db=db)
self.cur = self.db.cursor()
query = "CREATE TABLE IF NOT EXISTS schedule ( \
task_id INT(11) NOT NULL AUTO_INCREMENT, \
config VARCHAR(1024) DEFAULT NULL, \
state VARCHAR(32) DEFAULT 'init', \
steps INT(11) DEFAULT 0, \
iteration INT(11) DEFAULT 0, \
waiting_for VARCHAR(1024) DEFAULT NULL, \
PRIMARY KEY(task_id)) ENGINE=InnoDB;"
self.cur.execute(query)
self.db.commit()
return
def __del__(self):
self.db.close()
def add(self, task):
""" Add a task to the schedule
"""
#Task_id is Find task_id, increment and add new task
#Must be atomic
iteration = 0
if 'iteration' in task:
iteration = task['iteration']
query = "INSERT INTO schedule (config, iteration, state, waiting_for) VALUES \
('"+task['config']+"', "+str(iteration)+", 'init', '[]');"
self.cur.execute(query)
self.db.commit()
task_id = self.cur.lastrowid
task['task_id'] = task_id
return task
def update(self, task):
#TODO
if 'waiting_for' not in task:
task['waiting_for'] = []
query = "UPDATE schedule set state= '"+task['state']+"', waiting_for='"\
+json.dumps(task['waiting_for'])+"' WHERE task_id = "+str(task['task_id'])+";"
self.cur.execute(query)
self.db.commit()
return
def delete(self, task):
query = "DELETE FROM schedule WHERE task_id = "+str(task['task_id'])+";"
self.cur.execute(query)
self.db.commit()
return task['task_id']
def get(self, task):
query = "SELECT task_id, config, state FROM schedule WHERE task_id = "+str(task['task_id'])+";"
self.cur.execute(query)
row = self.cur.fetchone()
task = {"task_id": row[0], "config": row[1], "state": row[2]}
return task
def get_next(self):
"""Get the next task to be scheduled
Gets the task with the least task_id (oldest task) whose state is 'init'
"""
self.db.autocommit(False)
self.cur.execute("START TRANSACTION;")
task = None
try:
query = "SELECT task_id, config, state, iteration, waiting_for FROM schedule \
WHERE state='init' OR state='waiting';"
self.cur.execute(query)
rows = self.cur.fetchall()
if len(rows) == 0:
#No tasks to schedule
return None
tasks = []
for row in rows:
task = {"task_id": row[0], "config": row[1], "state": row[2],\
"iteration": int(row[3]), "waiting_for": json.loads(row[4])}
tasks.append(task)
task = self.get_next_task(tasks)
if task == None:
self.db.rollback()
return None
query = "UPDATE schedule set state = 'running', waiting_for='[]', \
iteration='"+str(task['iteration'])+"' WHERE task_id = "+str(task['task_id'])+";"
self.cur.execute(query)
self.db.commit()
except:
print("Error: Could not commit transaction. Rolling back")
self.db.rollback()
return task
def get_all(self):
query = "SELECT task_id, config, state FROM schedule;"
self.cur.execute(query)
rows = self.cur.fetchall()
tasks = []
for row in rows:
task = {"task_id": row[0], "config": row[1], "state": row[2]}
tasks.append(task)
return tasks
| 34.25 | 103 | 0.526011 | 998 | 8,631 | 4.46493 | 0.233467 | 0.051167 | 0.031418 | 0.034111 | 0.319345 | 0.267729 | 0.225539 | 0.218582 | 0.144524 | 0.134874 | 0 | 0.008956 | 0.366122 | 8,631 | 251 | 104 | 34.386454 | 0.80552 | 0.202062 | 0 | 0.440476 | 0 | 0 | 0.121249 | 0.003301 | 0 | 0 | 0 | 0.023904 | 0 | 1 | 0.10119 | false | 0.011905 | 0.02381 | 0.011905 | 0.267857 | 0.005952 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dc3441aa92017412470a1748b4a4a47bc237d9a | 1,783 | py | Python | Exercise 7: More Printing.py | EarthBeLost/Learning.Python | a3d5e662082d5cb17502ca87a748e1940a4b6a6c | [
"CC-BY-4.0"
] | null | null | null | Exercise 7: More Printing.py | EarthBeLost/Learning.Python | a3d5e662082d5cb17502ca87a748e1940a4b6a6c | [
"CC-BY-4.0"
] | null | null | null | Exercise 7: More Printing.py | EarthBeLost/Learning.Python | a3d5e662082d5cb17502ca87a748e1940a4b6a6c | [
"CC-BY-4.0"
] | null | null | null | print "Mary had a little lamb." # Prints "Mary had a little lamb."
print "It's fleece was white as %s." % 'snow' # Prints "It's fleece was white as snow."
print "And everywhere that Mary went." # Prints "And everywhere that Mary went."
print "." * 10 # What'd that do? I'm sure it prints full stop 10 times.
end1 = "C" # Makes the variable end1 "C"
end2 = "h" # Makes the variable end2 "h"
end3 = "e" # Makes the variable end3 "e"
end4 = "e" # Makes the variable end4 "e"
end5 = "s" # Makes the variable end5 "s"
end6 = "e" # Makes the variable end6 "e"
end7 = "B" # Makes the variable end7 "B"
end8 = "u" # Makes the variable end8 "u"
end9 = "r" # Makes the variable end9 "r"
end10 = "g" # Makes the variable end10 "g"
end11 = "e" # Makes the variable end11 "e"
end12 = "r" # Makes the variable end12 "r"
# Watch that comma at the end, try removing it and seeing what it does!
# Prediction = Moves it to a new line...
# Edit - I was wrong, turns out it seperated it, removing the comma moves it to a new line.
print end1 + end2 + end3 + end4 + end5 + end6, # Prints the variables 1 - 6, which will be "Cheese"
print end7 + end8 + end9 + end10 + end11 + end12 # Prints the variables 7 - 12, which will be "Burger"
# === STUDY DRILLS ===
# 1. Go back through and write a comment on what each line does.
# 2. Read each one backward or out loud to find your errors.
# 3. From now on, when you make mistakes, write down on a piece of paper what kind of mistake you made.
# 4. When you go to the next exercise, look at the mistakes you have made and try not to make them in this new one.
# 5. Remember that everyone makes mistakes. Programmers are like magicians who fool everyone into thinking they are perfect and never wrong, but it's all an act. They make mistakes all the time.
| 55.71875 | 194 | 0.693775 | 317 | 1,783 | 3.902208 | 0.435331 | 0.077607 | 0.155214 | 0.054972 | 0.127728 | 0.058205 | 0 | 0 | 0 | 0 | 0 | 0.041903 | 0.21032 | 1,783 | 31 | 195 | 57.516129 | 0.836648 | 0.76332 | 0 | 0 | 0 | 0 | 0.248731 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dc368a1afac94cd8daf10a5a2d880bfab5d58ef | 1,173 | py | Python | engine/manual_flappy_engine.py | OscarGarciaPeinado/flappy_bird | ceb254392e37ccc5ab24d888712f3dde8add1315 | [
"Apache-2.0"
] | 5 | 2019-01-27T00:05:03.000Z | 2022-01-24T11:55:51.000Z | engine/manual_flappy_engine.py | JoanMartin/flappy_bird | ceb254392e37ccc5ab24d888712f3dde8add1315 | [
"Apache-2.0"
] | null | null | null | engine/manual_flappy_engine.py | JoanMartin/flappy_bird | ceb254392e37ccc5ab24d888712f3dde8add1315 | [
"Apache-2.0"
] | 1 | 2019-01-27T19:39:31.000Z | 2019-01-27T19:39:31.000Z | # coding: utf-8
import pygame
from engine.flappy_engine import FlappyEngine
from entities.bird import Bird
class ManualFlappyEngine(FlappyEngine):
def __init__(self):
self.birds = [Bird(name="Manual")]
def get_birds(self):
return self.birds
def on_update(self, next_pipe_x, next_pipe_y):
for bird in self.birds:
bird.refresh()
def draw(self, screen):
for bird in self.birds:
if not bird.dead:
screen.blit(bird.image, bird.rect)
def on_event(self, event):
if event.type == pygame.KEYDOWN and (event.key == pygame.K_SPACE or event.key == pygame.K_UP):
for bird in self.birds:
bird.jump()
def check_pipes_collision(self, pipes):
for bird in self.birds:
if pipes.is_collision(bird.rect):
bird.dead = True
def check_floor_collision(self, floor):
for bird in self.birds:
if floor.rect.y < bird.rect.centery:
bird.dead = True
def on_finish(self, game, score_panel):
from scenes.home_scene import HomeScene
game.change_scene(HomeScene(game))
| 27.27907 | 102 | 0.617221 | 158 | 1,173 | 4.436709 | 0.405063 | 0.089872 | 0.064194 | 0.092725 | 0.148359 | 0.148359 | 0 | 0 | 0 | 0 | 0 | 0.001199 | 0.289003 | 1,173 | 42 | 103 | 27.928571 | 0.839329 | 0.011083 | 0 | 0.233333 | 0 | 0 | 0.005181 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.266667 | false | 0 | 0.133333 | 0.033333 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dc66bddfa66fadc0b4d769c980f9102881c7113 | 1,436 | py | Python | pikos/monitors/api.py | enthought/pikos | 21255028103a85a0b2761e6b54c74f9d066c2c4a | [
"PSF-2.0",
"BSD-3-Clause"
] | 3 | 2015-01-31T18:08:45.000Z | 2017-05-20T18:19:59.000Z | pikos/monitors/api.py | enthought/pikos | 21255028103a85a0b2761e6b54c74f9d066c2c4a | [
"PSF-2.0",
"BSD-3-Clause"
] | 7 | 2015-01-21T11:20:08.000Z | 2015-03-20T14:20:46.000Z | pikos/monitors/api.py | enthought/pikos | 21255028103a85a0b2761e6b54c74f9d066c2c4a | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Package: Pikos toolkit
# File: monitors/api.py
# License: LICENSE.TXT
#
# Copyright (c) 2014, Enthought, Inc.
# All rights reserved.
#----------------------------------------------------------------------------
__all__ = [
'FunctionMonitor',
'FunctionMemoryMonitor',
'LineMemoryMonitor',
'LineMonitor',
'FocusedFunctionMemoryMonitor',
'FocusedLineMonitor',
'FocusedLineMemoryMonitor',
'FocusedFunctionMonitor',
'MonitorAttach',
'Monitor'
]
from pikos.monitors.function_monitor import FunctionMonitor
from pikos.monitors.line_monitor import LineMonitor
from pikos.monitors.focused_function_monitor import FocusedFunctionMonitor
from pikos.monitors.focused_line_monitor import FocusedLineMonitor
from pikos._internal.monitor_attach import MonitorAttach
from pikos.monitors.monitor import Monitor
try:
import psutil
except ImportError:
import warnings
warnings.warn('Could not import psutil. Memory monitors are not available')
else:
from pikos.monitors.function_memory_monitor import FunctionMemoryMonitor
from pikos.monitors.line_memory_monitor import LineMemoryMonitor
from pikos.monitors.focused_function_memory_monitor import \
FocusedFunctionMemoryMonitor
from pikos.monitors.focused_line_memory_monitor import \
FocusedLineMemoryMonitor
| 34.190476 | 79 | 0.696379 | 128 | 1,436 | 7.640625 | 0.382813 | 0.092025 | 0.156442 | 0.09816 | 0.122699 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004019 | 0.133705 | 1,436 | 41 | 80 | 35.02439 | 0.782154 | 0.20961 | 0 | 0 | 0 | 0 | 0.208 | 0.084444 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.466667 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
8dc9d4a24fcd5779bfcc0dce2b7599c6899d8ef2 | 399 | py | Python | day01/part2.py | BaderSZ/adventofcode2020 | dae705fd093bbd176021118f0898947cb4b02f84 | [
"MIT"
] | null | null | null | day01/part2.py | BaderSZ/adventofcode2020 | dae705fd093bbd176021118f0898947cb4b02f84 | [
"MIT"
] | null | null | null | day01/part2.py | BaderSZ/adventofcode2020 | dae705fd093bbd176021118f0898947cb4b02f84 | [
"MIT"
] | null | null | null | arr = []
b = False
with open("input","r") as f:
for i in f.readlines():
arr = arr + [int(i.rstrip("\n"))]
length = len(arr)
for i in range(0,length):
for j in range(0,length):
for k in range(0,length):
if (arr[i]+arr[j]+arr[k] == 2020):
print("Result = ", arr[i]*arr[j]*arr[k])
b = True
if (b):
break
| 19 | 56 | 0.448622 | 62 | 399 | 2.887097 | 0.451613 | 0.117318 | 0.134078 | 0.234637 | 0.324022 | 0.134078 | 0 | 0 | 0 | 0 | 0 | 0.027668 | 0.365915 | 399 | 20 | 57 | 19.95 | 0.679842 | 0 | 0 | 0 | 0 | 0 | 0.042714 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.071429 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dcdc79d68031ef6f71e208e23bc970ea8a6a950 | 1,102 | py | Python | example_contests/fk_2014_beta/problems/rod/data/secret/gen.py | ForritunarkeppniFramhaldsskolanna/epsilon | a31260ad33aba3d1846cda585840d7e7d2f2349c | [
"MIT"
] | 6 | 2016-03-28T13:57:54.000Z | 2017-07-25T06:04:05.000Z | example_contests/fk_2014_beta/problems/rod/data/secret/gen.py | ForritunarkeppniFramhaldsskolanna/epsilon | a31260ad33aba3d1846cda585840d7e7d2f2349c | [
"MIT"
] | 25 | 2015-01-23T18:02:35.000Z | 2015-03-17T01:40:27.000Z | example_contests/fk_2014_beta/problems/rod/data/secret/gen.py | ForritunarkeppniFramhaldsskolanna/epsilon | a31260ad33aba3d1846cda585840d7e7d2f2349c | [
"MIT"
] | 3 | 2016-06-28T00:48:38.000Z | 2017-05-25T05:29:25.000Z |
import random
ts = [
(-1,-1),
(-1,-1),
(-1,-1),
(3,3),
(3,3),
(3,3),
(4,1),
(4,5),
(4,20),
(10,100),
(10,100),
(10,100),
(20,100),
(26,100),
(26,100),
(26,100),
(10, 'rev'),
(26, 'rev'),
]
for i, (n,k) in enumerate(ts):
if n == -1:
continue
rev = k == 'rev'
if rev:
k = 2 * n - 1 - 2
arr = [ [ '|' if col % 2 == 0 else ' ' for col in range(2*n-1) ] for row in range(k) ]
if rev:
for a in range(k):
for b in range(0, min(a+1, k + 2 - a - 1)):
if (a + b) % 2 == 0:
arr[a][2*b+1] = '-'
else:
cnt = random.randint(0, k * (n - 1))
for _ in range(cnt):
x, y = random.randint(0, k - 1), random.randint(0, n - 2)
if (y == 0 or arr[x][2*(y-1)+1] == ' ') and (y == n - 2 or arr[x][2*(y+1)+1] == ' '):
arr[x][2*y + 1] = '-'
with open('%02d.in' % i, 'w') as f:
f.write('%d %d\n' % (n, k))
for row in range(k):
f.write('%s\n' % ''.join(arr[row]))
| 19.678571 | 97 | 0.354809 | 184 | 1,102 | 2.119565 | 0.25 | 0.035897 | 0.030769 | 0.030769 | 0.217949 | 0.051282 | 0.051282 | 0 | 0 | 0 | 0 | 0.139183 | 0.400181 | 1,102 | 55 | 98 | 20.036364 | 0.450832 | 0 | 0 | 0.325581 | 0 | 0 | 0.030909 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.023256 | 0 | 0.023256 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dd05a0d491e4b246ec3227dbeb967e20dd2dced | 832 | py | Python | sponge-integration-tests/examples/core/unordered_rules_instances.py | mnpas/sponge | 7190f23ae888bbef49d0fbb85157444d6ea48bcd | [
"Apache-2.0"
] | 9 | 2017-12-16T21:48:57.000Z | 2022-01-06T12:22:24.000Z | sponge-integration-tests/examples/core/unordered_rules_instances.py | mnpas/sponge | 7190f23ae888bbef49d0fbb85157444d6ea48bcd | [
"Apache-2.0"
] | 3 | 2020-12-18T11:56:46.000Z | 2022-03-31T18:37:10.000Z | sponge-integration-tests/examples/core/unordered_rules_instances.py | mnpas/sponge | 7190f23ae888bbef49d0fbb85157444d6ea48bcd | [
"Apache-2.0"
] | 2 | 2019-12-29T16:08:32.000Z | 2020-06-15T14:05:34.000Z | """
Sponge Knowledge Base
Unordered rules - instances
"""
from java.util.concurrent.atomic import AtomicInteger
def onInit():
# Variables for assertions only
sponge.setVariable("countAB", AtomicInteger(0))
sponge.setVariable("countA", AtomicInteger(0))
sponge.setVariable("max", 100)
class AB(Rule):
def onConfigure(self):
self.withEvents(["a", "b"]).withOrdered(False)
def onRun(self, event):
sponge.getVariable("countAB").incrementAndGet()
class A(Rule):
def onConfigure(self):
self.withEvents(["a a1", "a a2"]).withOrdered(False)
def onRun(self, event):
sponge.getVariable("countA").incrementAndGet()
def onStartup():
for i in range(sponge.getVariable("max")):
sponge.event("a").send()
sponge.event("b").send()
| 27.733333 | 61 | 0.640625 | 92 | 832 | 5.793478 | 0.5 | 0.095685 | 0.075047 | 0.116323 | 0.326454 | 0.326454 | 0.326454 | 0.187617 | 0 | 0 | 0 | 0.010638 | 0.209135 | 832 | 29 | 62 | 28.689655 | 0.799392 | 0.096154 | 0 | 0.210526 | 0 | 0 | 0.061538 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.315789 | false | 0 | 0.052632 | 0 | 0.473684 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8dd1df6bb043d055d6d1497f985228a20b9ff564 | 385 | py | Python | callback/leehyunseob.py | SanghunOh/share_5GUAV_2021 | bdcc272da4bce5e3e56ff7842bff7fb0c802f083 | [
"Apache-2.0"
] | null | null | null | callback/leehyunseob.py | SanghunOh/share_5GUAV_2021 | bdcc272da4bce5e3e56ff7842bff7fb0c802f083 | [
"Apache-2.0"
] | null | null | null | callback/leehyunseob.py | SanghunOh/share_5GUAV_2021 | bdcc272da4bce5e3e56ff7842bff7fb0c802f083 | [
"Apache-2.0"
] | 3 | 2021-11-30T07:49:08.000Z | 2021-12-01T08:57:48.000Z | def func():
pass
return
#file length check
if __name__ == '__main__':
try:
f = open('./testfile.txt', 'r') #1.file read -> open('')
length = len(f.read()) #2.length 설정
f.close() #3.자원클로즈
print(length)
except Exception as e:
pass | 29.615385 | 67 | 0.392208 | 38 | 385 | 3.763158 | 0.763158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015306 | 0.490909 | 385 | 13 | 68 | 29.615385 | 0.714286 | 0.150649 | 0 | 0.181818 | 0 | 0 | 0.071207 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0.181818 | 0 | 0 | 0.181818 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8dd3201b418fde039b2259ab6bd540e0133bf398 | 16,162 | py | Python | tkp.py | Robin-mlh/TKPass | 69608ca0b4c2f7204b3ef86a34f76f57e4737d53 | [
"MIT"
] | 1 | 2022-01-30T17:55:45.000Z | 2022-01-30T17:55:45.000Z | tkp.py | Robin-mlh/TKPass | 69608ca0b4c2f7204b3ef86a34f76f57e4737d53 | [
"MIT"
] | null | null | null | tkp.py | Robin-mlh/TKPass | 69608ca0b4c2f7204b3ef86a34f76f57e4737d53 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
""" Password toolkit. """
import sys
import os
import getpass
import configparser
from secrets import randbelow # To use random cryptography.
import argparse # Module for the command line system.
import pyperclip # To copy and get the clipboard.
from modules import functions # Functions.
from modules.functions import config # Configuration file.
from modules.data import * # Data and text file.
def required_length(nmin, nmax):
""" Action that returns an error
if the number of arguments (nargs) is out of range. """
class RequiredLength(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
if not nmin <= len(values) <= nmax:
raise argparse.ArgumentTypeError(
f"argument {self.dest} requires between {nmin} and {nmax} arguments")
setattr(args, self.dest, values)
return RequiredLength
def definition_parent_parser():
""" Set the parent parser.
A shortcut to assign the same arguments to several parsers."""
global parent_parser_generation
parent_parser_generation = argparse.ArgumentParser(add_help=False)
parent_parser_generation.add_argument("--copy", "-c", action="store_true",
help="Copiy the result to the clipboard")
parent_parser_generation.add_argument("--hide", "-H", action="store_true",
help="Does not display the result")
parent_parser_generation.add_argument("--output", "-o", type=str, metavar="FILE",
default=False, nargs="?", help="Export result to a file")
# Modification of argparse.ArgumentParser to customize the global help message.
class ArgumentParserCustomGlobal(argparse.ArgumentParser):
def format_help(self):
return """Usage: tkp.py [COMMAND] [OPTION]...
A password toolkit.
Commands:
check, c Test the strength of a password
password, w Generate a password
passphrase, p Generate a passphrase
sentence, s Generate a sentence-based password
doc Shows safety recommendations and tkp sources
Options:
-h, --help Show this help message and exit
-v, --version Show program's version number and exit
Use "tkp.py [COMMAND] --help" for more information about a command.
"""
class TkpCli(object):
""" Command line system. """
def __init__(self):
parser = ArgumentParserCustomGlobal(description="A password toolkit.",
usage="""tkp.py [COMMAND] [OPTION]...""")
parser.add_argument("-v", "--version", action='version', version='TKPass '+VERSION_TKP)
parser.add_argument('command', metavar="COMMAND", help='command')
if len(sys.argv) == 1:
# Displays the help when the program is started without arguments.
parser.print_help(sys.stderr)
sys.exit(1)
args = parser.parse_args(sys.argv[1:2])
# A method = A command.
if not hasattr(self, args.command):
# If no method matches the command entered by the user.
parser.error(message="Unrecognized command."
"\nUse the -h option to see the available commands.")
definition_parent_parser() # Set the parent parser.
# Executes the corresponding method to a command.
getattr(self, args.command)()
def check(self):
parser = argparse.ArgumentParser(description="Test the strength of a password.",
usage="tkp.py {check|c} [-h] [-p PASSWORD | --getpass | --clipboard]"
"\n [-i INFO [INFO ...]] [--wordlist FILE [FILE ...]]")
group_check_password = parser.add_mutually_exclusive_group(required=False)
group_check_password.add_argument("--password", "-p", metavar="PASSWORD", type=str,
help="The password to check")
group_check_password.add_argument("--getpass", "-g", action="store_true",
help="Use the getpass function to securely ask for the password"
"\nDefault method used to get the password.")
group_check_password.add_argument("--clipboard", "-c", action="store_true",
help="Use the clipboard as password")
parser.add_argument("--info", "-i", type=str, nargs="+",
help="Additional information. For example, a name or a date of birth")
parser.add_argument("--wordlist", "-w", type=argparse.FileType('r'),
nargs="+", metavar="FILE", default=[],
help="Additional word list files to load")
args = parser.parse_args(sys.argv[2:])
if args.password not in [False, None]:
password = args.password
elif args.clipboard not in [False, None]:
try:
password = pyperclip.paste()
except pyperclip.PyperclipException as e:
raise SystemExit(f"An error has occurred: the clipboard does not exist or cannot be reached. {e}")
else:
password = getpass.getpass()
print()
functions.check_password(password, infos_sup=args.info, files_wordlists=args.wordlist)
def password(self):
parser = argparse.ArgumentParser(parents=[parent_parser_generation],
formatter_class=argparse.RawTextHelpFormatter,
usage="""tkp.py {password|w} [-cfh] [-o [OUTFILE]] [-l {LENGTH | MIN_LENGTH MAX_LENGTH}]
[-a NUM_LOWERCASE_LETTERS] [-u NUM_UPPER_CASE_LETTERS] [-d NUM_DIGITS] [-s NUM_SPECIAL_SYMBOLS]
[-n GENERATION_NUMBER] [-b BANNED_CHARACTERS [BANNED_CHARACTERS ...]] [--passphrase [WORDLIST_FILE]]""",
description="Generates a cryptographically random password."
"\nUse the command without argument to use the default configuration.",
epilog="""Example:
tkp w -n 7 -l 20 -a 10 -u 0
Generates 7 passwords of 20 characters composed of no capital letters,
10 lowercase letters, numbers and special characters.""")
parser.add_argument("-l", type=int, nargs="+", action=required_length(1, 2), metavar="LENGTH",
help="Number of characters in each password. With two numbers,\n"
"the length will be random between the first and the second")
parser.add_argument("-a", metavar="NUM_LOWERCASE_LETTERS",
default=config["PASSWORD"]["DEFAULT_PASSWORD_LOWER_LETTERS"],
help="Number of lowercase letters in each password")
parser.add_argument("-u", metavar="NUM_UPPER_CASE_LETTERS",
default=config["PASSWORD"]["DEFAULT_PASSWORD_UPPER_LETTERS"],
help="Number of capital letters in each password")
parser.add_argument("-d", metavar="NUM_DIGITS",
default=config["PASSWORD"]["DEFAULT_PASSWORD_DIGITS"],
help="Number of digits in each password")
parser.add_argument("-s", metavar="NUM_SPECIAL_SYMBOLS",
default=config["PASSWORD"]["DEFAULT_PASSWORD_SPECIALS_SYMBOLS"],
help="Number of special symbols in each password")
parser.add_argument("-n", type=int, default=config["GLOBAL"]["DEFAULT_NB_GENERATION"],
metavar="GENERATION_NUMBER", help="Number of password to generate")
parser.add_argument("-b", nargs="+", default=config["PASSWORD"]["BANNED_CHARACTERS_PASSWORD"],
metavar="BANNED_CHARACTER",
help="Characters that cannot be included in the password")
parser.add_argument("--passphrase", "-p", type=str, metavar="WORDLIST_FILE", nargs="?", default=None,
const=config["PASSWORD"]["DEFAULT_WORDLIST_FILE_SENTENCE_PASSWORD"].replace("DICTIONNARY_DIRECTORY",
config["GLOBAL"]["DICTIONNARY_DIRECTORY"]),
help="Generate a phrase based on the password to remember it")
args = parser.parse_args(sys.argv[2:])
if args.l is None:
if not isinstance((config.getint("PASSWORD", "DEFAULT_PASSWORD_LENGTH")), int):
raise SystemExit("ValueError: The password length value to be generated is invalid.\n"
"Check the value of DEFAULT_PASSWORD_LENGTH in the configuration file.")
else:
nb_characters = config.getint("PASSWORD", "DEFAULT_PASSWORD_LENGTH")
elif len(args.l) == 1:
nb_characters = args.l[0]
elif len(args.l) == 2:
nb_characters = randbelow(args.l[1])
while nb_characters < args.l[0]:
nb_characters = randbelow(args.l[1])
# Generation of the password(s).
result = functions.password_generation(args.a, args.u, args.d, args.s,
nb_characters, args.n, args.b)
# Initialization of the arguments used after the generation of the result.
if not args.hide:
print(result)
if args.passphrase is not None:
with open(args.passphrase, "r") as f:
wordlist = list(f.read().split("\n"))
sentence_password = functions.password_generation_sentence(result, wordlist)
print("\n" + sentence_password)
if args.copy or config.getboolean('GLOBAL', 'AUTO_COPY_GENERATED'):
try:
pyperclip.copy(result)
except pyperclip.PyperclipException as e:
raise SystemExit(f"An error has occurred: the clipboard does not exist or cannot be reached. {e}")
if args.output is None:
functions.export_file(result, path=config["GLOBAL"]["DEFAULT_OUTFILE"])
elif args.output is not False:
functions.export_file(result, path=args.output)
def passphrase(self):
parser = argparse.ArgumentParser(usage="tkp.py {passphrase|p} [-cdfhuw]"
" [-l {WORDS_NUMBER | MIN_NUM_WORDS MAX_NUM_WORDS}]"
"\n [-i WORDLIST_FILE] [-s SEPARATOR] [-n GENERATION_NUMBER] [-o [OUTFILE]]",
parents=[parent_parser_generation],
formatter_class=argparse.RawTextHelpFormatter,
description="Generats a cryptographically random passphrase.",
epilog="""Example:
tkp p -n 2 -l 6 -s '-' -o 'passphrase.txt'
Generate and export to a file 2 passphrases of 6 words separated by a dash.""")
parser.add_argument("--separator", "-s", type=str, default=config["PASSPHRASE"]["DEFAUT_SEPARATOR_PASSPHRASE"],
help="Character between words")
parser.add_argument("--generation-number", "-n", type=int, default=config["GLOBAL"]["DEFAULT_NB_GENERATION"],
help="Number of passphrase to generate")
parser.add_argument("--words-number", "-l", type=int, nargs="+", action=required_length(1, 2),
help="Number of words in each passphrase")
parser.add_argument("-u", action="store_true", help="Capitalize the first letter of each word")
parser.add_argument("-w", type=int, default=0, help="Number of symbols (default: 0)", metavar="NUM_SYMBOLS")
parser.add_argument("-d", type=int, default=0, help="Number of digits (default: 0)", metavar="NUM_DIGITS")
parser.add_argument("--wordlist", "-i", type=str, metavar="FILE",
default=config["PASSPHRASE"]["DEFAULT_WORDLIST_FILE_PASSPHRASE"].replace("DICTIONNARY_DIRECTORY",
config["GLOBAL"]["DICTIONNARY_DIRECTORY"]),
help="List of words to be used for the generation of the passphrase")
args = parser.parse_args(sys.argv[2:])
with open(args.wordlist, "r") as f:
wordlist = list(f.read().split("\n"))
# Definition of the number of words
# to put in the passphrase according to the user's arguments.
if args.words_number is None:
nb_words = config.getint("PASSPHRASE", "DEFAULT_NB_WORDS_PASSPHRASE")
elif len(args.words_number) == 1:
nb_words = args.words_number[0]
elif len(args.words_number) == 2:
nb_words = randbelow(args.words_number[1])
while nb_words < args.words_number[0]:
nb_words = randbelow(args.words_number[1])
# Generation of the passphrase(s).
result = functions.passphrase_generation(wordlist, nb_words,
args.separator, args.generation_number,
args.w, args.d, args.u)
# Initialization of the arguments used after the generation of the result.
if not args.hide:
print(result)
if args.copy or config.getboolean('GLOBAL', 'AUTO_COPY_GENERATED'):
try:
pyperclip.copy(result)
except pyperclip.PyperclipException as e:
raise SystemExit(f"An error has occurred: the clipboard does not exist or cannot be reached. {e}")
if args.output is None:
functions.export_file(result, path=config["GLOBAL"]["DEFAULT_OUTFILE"])
elif args.output is not False:
functions.export_file(result, path=args.output)
def sentence(self):
parser = argparse.ArgumentParser(usage="""tkp.py {sentence|s} [-cfh] [--output [FILE]] SENTENCE""",
description="Generate a phrase-based password."
"\nRemember the sentence to remember the password.",
parents=[parent_parser_generation],
formatter_class=argparse.RawTextHelpFormatter,
epilog="""Exemple:
tkp s 'Lorem ipsum dolor 66 sit amet!' -c
Generate and copy the password created using the sentence given as an argument.""")
parser.add_argument("sentence", metavar="SENTENCE", type=str,
help="Sentence")
args = parser.parse_args(sys.argv[2:])
result = functions.password_from_sentence(args.sentence)
if not args.hide:
print(result)
if args.copy or config.getboolean('GLOBAL', 'AUTO_COPY_GENERATED'):
try:
pyperclip.copy(result)
except pyperclip.PyperclipException as e:
raise SystemExit(f"An error has occurred: the clipboard does not exist or cannot be reached. {e}")
if args.output is None:
functions.export_file(result, path=config["GLOBAL"]["DEFAULT_OUTFILE"])
elif args.output is not False:
functions.export_file(result, path=args.output)
def doc(self):
parser = argparse.ArgumentParser(usage="tkp.py doc [-h]",
description="Show safety recommandations and TKPass sources.")
args = parser.parse_args(sys.argv[2:])
print(PASSWORD_DOCUMENTATION)
# Aliases
def c(self):
TkpCli.check(self)
def w(self):
TkpCli.password(self)
def p(self):
TkpCli.passphrase(self)
def s(self):
TkpCli.sentence(self)
if __name__ == '__main__':
TkpCli()
| 53.694352 | 144 | 0.579879 | 1,769 | 16,162 | 5.176371 | 0.174675 | 0.031233 | 0.03713 | 0.012449 | 0.366168 | 0.310582 | 0.260566 | 0.208038 | 0.171017 | 0.140221 | 0 | 0.004255 | 0.316483 | 16,162 | 300 | 145 | 53.873333 | 0.824658 | 0.060141 | 0 | 0.241667 | 0 | 0.008333 | 0.310279 | 0.036332 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058333 | false | 0.3 | 0.041667 | 0.004167 | 0.120833 | 0.029167 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8dd9c8d87549231b3a8663f8575c7fcb907b1f2e | 1,328 | py | Python | ControlSystem.py | towhidabsar/architecture-tactics | d1d22454558cb763c93c5cfa83ede60967d1fadd | [
"MIT"
] | null | null | null | ControlSystem.py | towhidabsar/architecture-tactics | d1d22454558cb763c93c5cfa83ede60967d1fadd | [
"MIT"
] | null | null | null | ControlSystem.py | towhidabsar/architecture-tactics | d1d22454558cb763c93c5cfa83ede60967d1fadd | [
"MIT"
] | null | null | null | import Reactor
import multiprocessing
import sys
from Queue import Empty
import random
import os
import time
'''
Class representing the control system for the nuclear reactor.
'''
class ControlSystem:
def __init__(self, reactor, receiver):
self.reactor = reactor
self.receiver = receiver
def runNuclearReactor(self):
time.sleep(0.01)
kill_process = 0
instruction = "Drop"
timestamp = 0
count = 1
while True:
try:
time.sleep(0.01)
info = self.reactor.get_nowait()
temp = info[0]
safety = info[1]
timestamp = info[2]
if temp > 9 or safety < 2:
instruction = "Drop"
else:
instruction = "Raise"
except Empty:
pass
finally:
self.sendHeartbeat(instruction, timestamp)
kill_process = random.randint(1,100)
if kill_process > 90:
print "Process dead", os.getpid()
break
else:
count +=1
def sendHeartbeat(self, instruction,timestamp):
#os.getpid(), "Alive", instruction
self.receiver.put([os.getpid(), "Alive", instruction, timestamp])
| 26.039216 | 73 | 0.527861 | 130 | 1,328 | 5.330769 | 0.461538 | 0.047619 | 0.02886 | 0.034632 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025926 | 0.39006 | 1,328 | 50 | 74 | 26.56 | 0.82963 | 0.024849 | 0 | 0.15 | 0 | 0 | 0.02457 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.025 | 0.175 | null | null | 0.025 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8de55b924a4d6799ef0e5f21ddd9b96c893d36b5 | 711 | py | Python | area/migrations/0004_alter_area_admin_alter_area_hoodimage.py | Lenus254/NeighbourHood-App | 78e9ab01b67a410098b3173d8b392cfeede9731f | [
"MIT"
] | null | null | null | area/migrations/0004_alter_area_admin_alter_area_hoodimage.py | Lenus254/NeighbourHood-App | 78e9ab01b67a410098b3173d8b392cfeede9731f | [
"MIT"
] | null | null | null | area/migrations/0004_alter_area_admin_alter_area_hoodimage.py | Lenus254/NeighbourHood-App | 78e9ab01b67a410098b3173d8b392cfeede9731f | [
"MIT"
] | null | null | null | # Generated by Django 4.0.3 on 2022-03-22 07:01
import cloudinary.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('area', '0003_alter_profile_profile_pic'),
]
operations = [
migrations.AlterField(
model_name='area',
name='admin',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='admin', to='area.profile'),
),
migrations.AlterField(
model_name='area',
name='hoodimage',
field=cloudinary.models.CloudinaryField(max_length=255, null=True),
),
]
| 27.346154 | 133 | 0.634318 | 79 | 711 | 5.594937 | 0.56962 | 0.054299 | 0.063348 | 0.099548 | 0.167421 | 0.167421 | 0 | 0 | 0 | 0 | 0 | 0.041122 | 0.247539 | 711 | 25 | 134 | 28.44 | 0.785047 | 0.063291 | 0 | 0.315789 | 1 | 0 | 0.10994 | 0.045181 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.157895 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8df154a304faf7dff813111bb6142c7bc3b7ec80 | 3,134 | py | Python | cs15211/MirrorReflection.py | JulyKikuAkita/PythonPrac | 0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c | [
"Apache-2.0"
] | 1 | 2021-07-05T01:53:30.000Z | 2021-07-05T01:53:30.000Z | cs15211/MirrorReflection.py | JulyKikuAkita/PythonPrac | 0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c | [
"Apache-2.0"
] | null | null | null | cs15211/MirrorReflection.py | JulyKikuAkita/PythonPrac | 0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c | [
"Apache-2.0"
] | 1 | 2018-01-08T07:14:08.000Z | 2018-01-08T07:14:08.000Z | __source__ = 'https://leetcode.com/problems/mirror-reflection/'
# Time: O(logP)
# Space: O(1)
#
# Description: Leetcode # 858. Mirror Reflection
#
# There is a special square room with mirrors on each of the four walls.
# Except for the southwest corner, there are receptors on each of the remaining corners, numbered 0, 1, and 2.
#
# The square room has walls of length p,
# and a laser ray from the southwest corner first meets the east wall at a distance q from the 0th receptor.
#
# Return the number of the receptor that the ray meets first.
# (It is guaranteed that the ray will meet a receptor eventually.)
#
# Example 1:
#
# Input: p = 2, q = 1
# Output: 2
# Explanation: The ray meets receptor 2 the first time it gets reflected back to the left wall.
#
# Note:
# 1 <= p <= 1000
# 0 <= q <= p
#
import unittest
# 24ms 50%
class Solution(object):
def mirrorReflection(self, p, q):
"""
:type p: int
:type q: int
:rtype: int
"""
from fractions import gcd
g = gcd(p, q)
p = (p / g) % 2
q = (q / g) % 2
return 1 if p and q else 0 if p else 2
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/mirror-reflection/solution/
#
Approach 1: Simulation
Complexity Analysis
Time Complexity: O(p). We can prove (using Approach #2) that the number of bounces is bounded by this.
Space Complexity: O(1)
# 6ms 11.06%
class Solution {
double EPS = 1e-6;
public int mirrorReflection(int p, int q) {
double x = 0, y = 0;
double rx = p, ry = q;
// While it hasn't reached a receptor,...
while ( !(close(x, p) && (close(y, 0) || close(y, p)) || close(x, 0) && close(y, p))) {
// Want smallest t so that some x + rx, y + ry is 0 or p
// x + rxt = 0, then t = -x/rx etc.
double t = 1e9;
if ((-x / rx) > EPS) t = Math.min(t, -x / rx);
if ((-y / ry) > EPS) t = Math.min(t, -y / ry);
if (((p-x) / rx) > EPS) t = Math.min(t, (p-x) / rx);
if (((p-y) / ry) > EPS) t = Math.min(t, (p-y) / ry);
x += rx * t;
y += ry * t;
if (close(x, p) || close(x, 0)) rx *= -1;
if (close(y, p) || close(y, 0)) ry *= -1;
}
if (close(x, p) && close(y, p)) return 1;
return close(x, p) ? 0 : 2;
}
private boolean close(double x, double y) {
return Math.abs(x - y) < EPS;
}
}
Approach 2: Mathematical
Complexity Analysis
Time Complexity: O(logP), the complexity of the gcd operation.
Space Complexity: O(1)
The mathematical answer is k = p / gcd(p, q).
# 2ms 100%
class Solution {
public int mirrorReflection(int p, int q) {
int g = gcd(p, q);
p /= g; p %= 2;
q /= g; q %= 2;
if (p == 1 && q == 1) return 1;
return p == 1 ? 0 : 2;
}
private int gcd(int a, int b) {
return a == 0 ? b : gcd(b % a, a);
}
}
'''
| 27.734513 | 110 | 0.544352 | 486 | 3,134 | 3.483539 | 0.3107 | 0.012404 | 0.016539 | 0.025989 | 0.19492 | 0.122859 | 0.074424 | 0 | 0 | 0 | 0 | 0.03268 | 0.316528 | 3,134 | 112 | 111 | 27.982143 | 0.757703 | 0.238354 | 0 | 0.119403 | 0 | 0.089552 | 0.814384 | 0 | 0 | 0 | 0 | 0 | 0.014925 | 1 | 0.029851 | false | 0 | 0.029851 | 0 | 0.164179 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8df39b9e1c345522795d8812fdbf9cbdb7a3b688 | 3,188 | py | Python | symjax/data/dclde.py | RandallBalestriero/TheanoXLA | d8778c2eb3254b478cef4f45d934bf921e695619 | [
"Apache-2.0"
] | 67 | 2020-02-21T21:26:46.000Z | 2020-06-14T14:25:42.000Z | symjax/data/dclde.py | RandallBalestriero/TheanoXLA | d8778c2eb3254b478cef4f45d934bf921e695619 | [
"Apache-2.0"
] | 8 | 2020-02-22T14:45:56.000Z | 2020-06-07T16:56:47.000Z | symjax/data/dclde.py | RandallBalestriero/TheanoXLA | d8778c2eb3254b478cef4f45d934bf921e695619 | [
"Apache-2.0"
] | 4 | 2020-02-21T17:34:46.000Z | 2020-05-30T08:30:14.000Z | import io
import os
import time
import urllib.request
import zipfile
import numpy as np
from scipy.io.wavfile import read as wav_read
from tqdm import tqdm
class dclde:
"""
The high-frequency dataset consists of marked encounters with echolocation
clicks of species commonly found along the US Atlantic Coast, and in the
Gulf of Mexico:
Mesoplodon europaeus - Gervais' beaked whale
Ziphius cavirostris - Cuvier's beaked whale
Mesoplodon bidens - Sowerby's beaked whale
Lagenorhynchus acutus - Atlantic white-sided dolphin
Grampus griseus - Risso's dolphin
Globicephala macrorhynchus - Short-finned pilot whale
Stenella sp. - Stenellid dolphins
Delphinid type A
Delphinid type B
Unidentified delphinid - delphinid other than those described above
The goal for these datasets is to identify acoustic encounters by species
during times when animals were echolocating. Analysts examined data for
echolocation clicks and approximated the start and end times of acoustic
encounters. Any period that was separated from another one by five minutes
or more was marked as a separate encounter. Whistle activity was not
considered. Consequently, while the use of whistle information during
echolocation activity is appropriate, reporting a species based on whistles
in the absence of echolocation activity will be considered a false positive
for this classification task.
"""
def download(path):
"""ToDo"""
# Load the dataset (download if necessary) and set
# the class attributes.
print("Loading DCLDE")
t = time.time()
if not os.path.isdir(path + "DCLDE"):
print("\tCreating Directory")
os.mkdir(path + "DCLDE")
if not os.path.exists(path + "DCLDE/DCLDE_LF_Dev.zip"):
url = "http://sabiod.univ-tln.fr/workspace/DCLDE2018/DCLDE_LF_Dev.zip"
with DownloadProgressBar(
unit="B", unit_scale=True, miniters=1, desc="Wav files"
) as t:
urllib.request.urlretrieve(url, path + "DCLDE/DCLDE_LF_Dev.zip")
def load(window_size=441000, path=None):
"""ToDo"""
if path is None:
path = os.environ["DATASET_path"]
dclde.download(path)
# Loading the files
f = zipfile.ZipFile(path + "DCLDE/DCLDE_LF_Dev.zip")
wavs = list()
# labels = list()
for zipf in tqdm(f.filelist, ascii=True):
if ".wav" in zipf.filename and ".d100." in zipf.filename:
wavfile = f.read(zipf)
byt = io.BytesIO(wavfile)
wav = wav_read(byt)[1].astype("float32")
for s in range(len(wav) // window_size):
wavs.append(wav[s * window_size : (s + 1) * window_size])
# labels.append(zipf.filename.split('/')[2])
# return wavs,labels
wavs = np.expand_dims(np.asarray(wavs), 1)
dataset.add_variable({"signals": {"train_set": wavs}})
print(
"Dataset freefield1010 loaded in", "{0:.2f}".format(time.time() - t), "s."
)
return dataset
| 37.505882 | 86 | 0.643664 | 407 | 3,188 | 4.995086 | 0.506143 | 0.026562 | 0.019675 | 0.025578 | 0.032464 | 0.032464 | 0 | 0 | 0 | 0 | 0 | 0.011188 | 0.271016 | 3,188 | 84 | 87 | 37.952381 | 0.863597 | 0.434442 | 0 | 0 | 0 | 0 | 0.157583 | 0.0391 | 0 | 0 | 0 | 0.011905 | 0 | 1 | 0.05 | false | 0 | 0.2 | 0 | 0.3 | 0.075 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c01a7f85b64aae6b1ef0f541d5bbcd7aadbba53 | 515 | py | Python | okr/migrations/0045_auto_20210114_1812.py | wdr-data/wdr-okr | 71c9e6e8d3521b1bb67d30310a93584389de2127 | [
"MIT"
] | 2 | 2021-07-28T08:46:13.000Z | 2022-01-19T17:05:48.000Z | okr/migrations/0045_auto_20210114_1812.py | wdr-data/wdr-okr | 71c9e6e8d3521b1bb67d30310a93584389de2127 | [
"MIT"
] | 3 | 2020-11-10T23:34:17.000Z | 2021-03-31T16:19:21.000Z | okr/migrations/0045_auto_20210114_1812.py | wdr-data/wdr-okr | 71c9e6e8d3521b1bb67d30310a93584389de2127 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.5 on 2021-01-14 17:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("okr", "0044_auto_20210113_2354"),
]
operations = [
migrations.AlterField(
model_name="sophoraid",
name="sophora_id",
field=models.TextField(
help_text="Sophora ID des Dokuments",
unique=True,
verbose_name="Sophora ID",
),
),
]
| 22.391304 | 53 | 0.549515 | 52 | 515 | 5.307692 | 0.788462 | 0.097826 | 0.094203 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.092262 | 0.347573 | 515 | 22 | 54 | 23.409091 | 0.729167 | 0.087379 | 0 | 0.125 | 1 | 0 | 0.168803 | 0.049145 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c0322e9fcbd97a7c07e00e86b4ec9ca80702fb7 | 5,006 | py | Python | kivy-greeter.py | jegger/kivy-lightdm-greeter | f2c673bf690583262c6b9339f453236ec45de3b8 | [
"MIT"
] | 4 | 2015-06-30T10:13:46.000Z | 2021-05-12T04:25:45.000Z | kivy-greeter.py | jegger/kivy-lightdm-greeter | f2c673bf690583262c6b9339f453236ec45de3b8 | [
"MIT"
] | null | null | null | kivy-greeter.py | jegger/kivy-lightdm-greeter | f2c673bf690583262c6b9339f453236ec45de3b8 | [
"MIT"
] | null | null | null | import sys
from kivy.app import App
from kivy.support import install_gobject_iteration
from kivy.lang import Builder
from kivy.core.window import Window
from kivy.config import Config
from gi.repository import LightDM
kv = '''
FloatLayout:
username_spinner: username_spinner
session_spinner: session_spinner
info_label: info_label
AnchorLayout:
BoxLayout:
size_hint: None, None
size: 800, 280
info_label: info_label
orientation: 'vertical'
GridLayout:
cols: 2
spacing: 5
Label:
text: "Session"
haling: 'middle'
valing: 'left'
text_size: self.size
font_size: 40
size_hint_x: 0.4
Spinner:
id: session_spinner
font_size: 40
text: self.values[0] if self.values else ""
Label:
text: "Username"
haling: 'middle'
valing: 'left'
text_size: self.size
font_size: 40
size_hint_x: 0.4
Spinner:
id: username_spinner
font_size: 40
text: self.values[0] if self.values else ""
Label:
text: "Password"
haling: 'middle'
valing: 'left'
text_size: self.size
font_size: 40
size_hint_x: 0.4
TextInput:
id: password_input
text: ""
password: True
font_size: 40
multiline: False
background_normal: 'images/textinput.png'
background_active: 'images/textinput-active.png'
on_text_validate:
login_button.trigger_action()
Label:
id: info_label
size_hint_y: None
height: 30
color: 1,0,0,1
Button:
id: login_button
text: "Login"
size_hint_y: 0.3
on_press: app.login(username_spinner.text, password_input.text, session_spinner.text)
Image:
source: 'images/kivy_logo.png'
size: 183,120
pos: (self.parent.width-self.width)/2, 50
size_hint: None, None
'''
class GreeterApp(App):
def __init__(self, **kwargs):
super(GreeterApp, self).__init__(**kwargs)
self.password = ""
self.session = ""
# Connect to lightDM
install_gobject_iteration()
self.greeter = LightDM.Greeter()
self.greeter.connect("authentication-complete", self.authentication_complete_cb)
self.greeter.connect("show-prompt", self.show_prompt_cb)
self.greeter.connect_sync()
# Get all available sessions
available_sessions = []
for sess in LightDM.get_sessions():
available_sessions.append(LightDM.Session.get_key(sess))
# Get all available users
available_users = []
inst = LightDM.UserList.get_instance()
for user in LightDM.UserList.get_users(inst):
user_name = LightDM.User.get_name(user)
available_users.append(user_name)
self.root_widget = Builder.load_string(kv)
self.root_widget.username_spinner.values = available_users
self.root_widget.session_spinner.values = available_sessions
def build(self):
return self.root_widget
def login(self, username, password, session):
self.password = password
self.session = session
print >> sys.stderr, "Initial entry of username, send it to LightDM"
self.greeter.authenticate(username)
def show_prompt_cb(self, greeter, text, promptType):
print >> sys.stderr, "prompt type: " + str(promptType) + str(text)
if greeter.get_in_authentication():
greeter.respond(self.password)
def authentication_complete_cb(self, greeter):
if greeter.get_is_authenticated():
if not greeter.start_session_sync(self.session):
self.root_widget.info_label.text = "Error while starting session %s" % self.session
else:
print >> sys.stderr, "AUTH COMPLETED"
self.root_widget.info_label.text = ":-)"
self.stop()
else:
print >> sys.stderr, "Login failed"
self.root_widget.info_label.text = "Wrong credentials :-("
if __name__ == '__main__':
# set keyboard to onscreen
Config.set('kivy', 'keyboard_mode', 'systemandmulti')
Config.write()
Window.clearcolor = (0.4274509804, 0.4274509804, 0.4274509804, 1)
GreeterApp().run()
| 34.287671 | 101 | 0.543548 | 514 | 5,006 | 5.085603 | 0.307393 | 0.027544 | 0.03749 | 0.025249 | 0.193573 | 0.149197 | 0.11821 | 0.11821 | 0.11821 | 0.11821 | 0 | 0.025224 | 0.374351 | 5,006 | 145 | 102 | 34.524138 | 0.809387 | 0.018777 | 0 | 0.258065 | 0 | 0 | 0.52313 | 0.043 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040323 | false | 0.064516 | 0.056452 | 0.008065 | 0.112903 | 0.032258 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
5c035e049d462ac6fb13d8a19589b38e798b40b0 | 1,660 | py | Python | alipay/aop/api/response/AlipayPayAppSmartwearStatusQueryResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/response/AlipayPayAppSmartwearStatusQueryResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/response/AlipayPayAppSmartwearStatusQueryResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayPayAppSmartwearStatusQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayPayAppSmartwearStatusQueryResponse, self).__init__()
self._device_model = None
self._product_type = None
self._security_solution = None
self._status = None
@property
def device_model(self):
return self._device_model
@device_model.setter
def device_model(self, value):
self._device_model = value
@property
def product_type(self):
return self._product_type
@product_type.setter
def product_type(self, value):
self._product_type = value
@property
def security_solution(self):
return self._security_solution
@security_solution.setter
def security_solution(self, value):
self._security_solution = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
def parse_response_content(self, response_content):
response = super(AlipayPayAppSmartwearStatusQueryResponse, self).parse_response_content(response_content)
if 'device_model' in response:
self.device_model = response['device_model']
if 'product_type' in response:
self.product_type = response['product_type']
if 'security_solution' in response:
self.security_solution = response['security_solution']
if 'status' in response:
self.status = response['status']
| 29.642857 | 113 | 0.683735 | 178 | 1,660 | 6.078652 | 0.207865 | 0.091497 | 0.055453 | 0.033272 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000786 | 0.233735 | 1,660 | 55 | 114 | 30.181818 | 0.849843 | 0.025301 | 0 | 0.093023 | 0 | 0 | 0.058168 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.232558 | false | 0 | 0.046512 | 0.093023 | 0.395349 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c03af92ab300c1e67a6c327adc3b89e2c185616 | 36,983 | py | Python | SAM.py | Tks4Fish/SAM | 2a3a3177fd8eedad5f5728b83330ea3b86ee36dd | [
"MIT"
] | null | null | null | SAM.py | Tks4Fish/SAM | 2a3a3177fd8eedad5f5728b83330ea3b86ee36dd | [
"MIT"
] | null | null | null | SAM.py | Tks4Fish/SAM | 2a3a3177fd8eedad5f5728b83330ea3b86ee36dd | [
"MIT"
] | null | null | null | import requests
import sqlite3
import json
import re
from requests_oauthlib import OAuth1
from sopel import module
SAM_DB = "/home/ubuntu/.sopel/modules/SAM.db"
CONTACT_OP = "You are not configured. Please contact Operator873."
def addtomemory(user, payload):
result = {}
db = sqlite3.connect(SAM_DB)
c = db.cursor()
check = c.execute('''SELECT * FROM memory WHERE user="%s" AND payload="%s";''' % (user, payload)).fetchall()
if len(check) == 0:
try:
c.execute('''INSERT INTO memory VALUES("%s", "%s");''' % (user, payload))
db.commit()
result['status'] = "Success"
result['data'] = "'" + payload + "' saved"
except Exception as e:
result['status'] = "Failure"
result['data'] = str(e)
else:
result['status'] = "Success"
result['data'] = "'" + payload + "' is already in memory."
db.close()
return result
def getfrommemory(user):
result = {}
db = sqlite3.connect(SAM_DB)
c = db.cursor()
try:
result['data'] = c.execute('''SELECT payload FROM memory WHERE user="%s";''' % user).fetchall()
result['status'] = "Success"
except Exception as e:
result['status'] = "Failure"
result['data'] = str(e)
db.close()
return result
def delfrommemory(user, payload):
result = {}
db = sqlite3.connect(SAM_DB)
c = db.cursor()
check = c.execute('''SELECT * FROM memory WHERE user="%s" AND payload="%s";''' % (user, payload)).fetchall()
if len(check) > 0:
c.execute('''DELETE FROM memory WHERE user="%s" AND payload="%s";''' % (user, payload))
db.commit()
result['status'] = "Success"
result['data'] = "'" + payload + "' removed from memory."
else:
result['status'] = "Failure"
result['data'] = "'" + payload + "' is not currently in memory for " + user + "."
db.close()
return result
def clearmemory(user):
result = {}
db = sqlite3.connect(SAM_DB)
c = db.cursor()
try:
c.execute('''DELETE FROM memory WHERE user="%s";''' % user)
db.commit()
result['status'] = "Success"
result['data'] = "Memory Cleared."
except Exception as e:
result['status'] = "Failure"
result['data'] = str(e)
db.close()
return result
def xmit(site, creds, payload, method):
# This handles the post/get requests
AUTH = OAuth1(creds[1], creds[2], creds[3], creds[4])
if method == "post":
return requests.post(site, data=payload, auth=AUTH).json()
elif method == "get":
return requests.get(site, params=payload, auth=AUTH).json()
def getWiki(project):
# Define dbase connection
db = sqlite3.connect(SAM_DB)
c = db.cursor()
site = c.execute('''SELECT apiurl FROM wikis WHERE wiki="%s";''' % project).fetchone()[0]
db.close()
if site is None:
return None
else:
return site
def getCSRF(bot, site, creds, type):
reqtoken = {
'action':"query",
'meta':"tokens",
'format':"json",
'type':type
}
token = xmit(site, creds, reqtoken, "get")
# Check for errors and return csrf
if 'error' in token:
bot.say(token['error']['info'])
return False
else:
csrfToken = token['query']['tokens']['%stoken' % type]
return csrfToken
def getCreds(name):
# Setup dbase connection
db = sqlite3.connect(SAM_DB)
c = db.cursor()
# Get user credentials and prepare api url for use
creds = c.execute('''SELECT * from auth where account="%s";''' % name).fetchall()[0]
db.close()
if creds is not None:
return creds
else:
return None
def doBlock(bot, name, project, target, until, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki(project)
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "csrf")
if csrfToken is False:
return
if until == "indef" or until == "forever":
until = "never"
reqBlock = {
"action": "block",
"user": target,
"expiry": until,
"reason": reason,
"token": csrfToken,
"allowusertalk":"",
"nocreate":"",
"autoblock":"",
"format": "json"
}
# Send block request
block = xmit(site, creds, reqBlock, "post")
if 'error' in block:
reason = block['error']['code']
if reason == "badtoken":
bot.say("Received CSRF token error. Try again...")
elif reason == "alreadyblocked":
bot.say(target + " is already blocked. Use !reblock to change the current block.")
elif reason == "permissiondenied":
bot.say("Received permission denied error. Are you a sysop on " + project + "?")
elif reason == "invalidexpiry":
bot.say("The expiration time isn't valid. I understand things like 31hours, 1week, 6months, infinite, indefinite.")
else:
info = block['error']['info']
code = block['error']['code']
bot.say("Unhandled error: " + code + " " + info)
elif 'block' in block:
user = block['block']['user']
expiry = block['block']['expiry']
reason = block['block']['reason']
bot.say(user + " was blocked until " + expiry + " with reason: " + reason)
else:
bot.say("Unknown error: " + block)
def doReblock(bot, name, project, target, until, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki(project)
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "csrf")
if csrfToken is False:
return
if until == "indef" or until == "forever":
until = "never"
reqBlock = {
"action": "block",
"user": target,
"expiry": until,
"reason": reason,
"token": csrfToken,
"allowusertalk":"",
"nocreate":"",
"autoblock":"",
"reblock":"",
"autoblock":"",
"format": "json"
}
# Send block request
block = xmit(site, creds, reqBlock, "post")
if 'error' in block:
reason = block['error']['code']
if reason == "badtoken":
bot.say("Received CSRF token error. Try again...")
elif reason == "alreadyblocked":
bot.say(target + " is already blocked. Use !reblock to change the current block.")
elif reason == "permissiondenied":
bot.say("Received permission denied error. Are you a sysop on " + project + "?")
else:
info = block['error']['info']
bot.say("Unhandled error: " + info)
elif 'block' in block:
user = block['block']['user']
expiry = block['block']['expiry']
reason = block['block']['reason']
bot.say(user + " was blocked until " + expiry + " with reason: " + reason)
else:
bot.say("Unknown error: " + block)
def doGlobalblock(bot, name, target, until, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki("metawiki")
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "csrf")
if csrfToken is False:
return
if until == "indef" or until == "forever":
until = "never"
block = {
"action": "globalblock",
"format": "json",
"target": target,
"expiry": until,
"reason": reason,
"alsolocal": True,
"token": csrfToken
}
# Send block request
gblock = xmit(site, creds, block, "post")
if 'error' in gblock:
failure = gblock['error']['info']
bot.say("Block failed! " + failure)
elif 'block' in gblock or 'globalblock' in gblock:
user = gblock['globalblock']['user']
expiry = gblock['globalblock']['expiry']
bot.say("Block succeeded. " + user + " was blocked until " + expiry)
else:
bot.say("Unknown failure... " + gblock)
def doLock(bot, name, target, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki("metawiki")
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "setglobalaccountstatus")
if csrfToken is False:
return
lockRequest = {
"action":"setglobalaccountstatus",
"format":"json",
"user":target,
"locked":"lock",
"reason":reason,
"token":csrfToken
}
# Send block request
lock = xmit(site, creds, lockRequest, "post")
if 'error' in lock:
bot.say("lock failed! " + lock['error']['info'])
else:
bot.say(target + " locked.")
def doUnlock(bot, name, target, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki("metawiki")
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "setglobalaccountstatus")
if csrfToken is False:
return
lockRequest = {
"action":"setglobalaccountstatus",
"format":"json",
"user":target,
"locked":"unlock",
"reason":reason,
"token":csrfToken
}
# Send block request
lock = xmit(site, creds, lockRequest, "post")
if 'error' in lock:
bot.say("Unlock failed! " + lock['error']['info'])
else:
bot.say("Unlock succeeded. ")
def dorevokeTPA(bot, name, project, target, until, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki(project)
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "csrf")
if csrfToken is False:
return
if until == "indef" or until == "forever":
until = "never"
reqBlock = {
"action": "block",
"user": target,
"expiry": until,
"reason": reason,
"token": csrfToken,
"noemail":"",
"nocreate":"",
"reblock":"",
"autoblock":"",
"format": "json"
}
# Send block request
block = xmit(site, creds, reqBlock, "post")
if 'error' in block:
reason = block['error']['code']
if reason == "badtoken":
bot.say("Received CSRF token error. Try again...")
elif reason == "alreadyblocked":
bot.say(target + " is already blocked. Use !reblock to change the current block.")
elif reason == "permissiondenied":
bot.say("Received permission denied error. Are you a sysop on " + project + "?")
else:
info = block['error']['info']
bot.say("Unhandled error: " + info)
elif 'block' in block:
user = block['block']['user']
expiry = block['block']['expiry']
reason = block['block']['reason']
bot.say(user + " was blocked until " + expiry + " with reason: " + reason)
else:
bot.say("Unknown error: " + block)
def doltaBlock(bot, name, project, target):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki(project)
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "csrf")
if csrfToken is False:
return
reqBlock = {
"action": "block",
"user": target,
"expiry": "1week",
"reason": "LTA / Block evasion",
"token": csrfToken,
"noemail":"",
"nocreate":"",
"reblock":"",
"autoblock":"",
"format": "json"
}
# Send block request
block = xmit(site, creds, reqBlock, "post")
if 'error' in block:
reason = block['error']['code']
if reason == "badtoken":
bot.say("Received CSRF token error. Try again...")
elif reason == "alreadyblocked":
bot.say(target + " is already blocked. Use !reblock to change the current block.")
elif reason == "permissiondenied":
bot.say("Received permission denied error. Are you a sysop on " + project + "?")
else:
info = block['error']['info']
bot.say("Unhandled error: " + info)
elif 'block' in block:
user = block['block']['user']
expiry = block['block']['expiry']
reason = block['block']['reason']
bot.say(user + " was blocked until " + expiry + " with reason: " + reason)
else:
bot.say("Unknown error: " + block)
def doSoftblock(bot, name, project, target, until, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki(project)
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "csrf")
if csrfToken is False:
return
if until == "indef" or until == "forever":
until = "never"
reqBlock = {
"action": "block",
"user": target,
"expiry": until,
"reason": reason,
"token": csrfToken,
"allowusertalk":"",
"format": "json"
}
# Send block request
block = xmit(site, creds, reqBlock, "post")
if 'error' in block:
reason = block['error']['code']
if reason == "badtoken":
bot.say("Received CSRF token error. Try again...")
elif reason == "alreadyblocked":
bot.say(target + " is already blocked. Use !reblock to change the current block.")
elif reason == "permissiondenied":
bot.say("Received permission denied error. Are you a sysop on " + project + "?")
else:
info = block['error']['info']
bot.say("Unhandled error: " + info)
elif 'block' in block:
user = block['block']['user']
expiry = block['block']['expiry']
reason = block['block']['reason']
bot.say(user + " was blocked until " + expiry + " with reason: " + reason)
else:
bot.say("Unknown error: " + block)
def doUnblock(bot, name, project, target, reason):
creds = getCreds(name)
if creds is None:
bot.say(CONTACT_OP)
return
site = getWiki(project)
if site is None:
bot.say("I don't know that wiki.")
return
csrfToken = getCSRF(bot, site, creds, "csrf")
if csrfToken is False:
return
reqBlock = {
"action": "unblock",
"user": target,
"reason": reason,
"token": csrfToken,
"format": "json"
}
# Send block request
unblock = xmit(site, creds, reqBlock, "post")
if 'error' in unblock:
reason = unblock['error']['info']
bot.say(reason)
elif 'unblock' in unblock:
user = unblock['unblock']['user']
reason = unblock['unblock']['reason']
bot.say(user + " was unblocked with reason: " + reason)
else:
bot.say("Unhandled error: " + unblock)
def addUser(bot, name):
# Setup dbase connection
db = sqlite3.connect(SAM_DB)
c = db.cursor()
# Check for user already existing
check = c.execute('''SELECT * FROM auth WHERE account="%s";''' % name).fetchall()
if len(check) != 0:
bot.say("User already exists!")
db.close()
return
else:
# Add new user to database
c.execute('''INSERT INTO auth VALUES("%s", NULL, NULL, NULL, NULL);''' % name)
db.commit()
db.close()
bot.say("User added.")
def remUser(bot, name):
# Setup dbase connection
db = sqlite3.connect(SAM_DB)
c = db.cursor()
# Check for user already existing
check = c.execute('''SELECT * FROM auth WHERE account="%s";''' % name).fetchall()
if len(check) == 0:
bot.say("User does not exist!")
db.close()
else:
c.execute('''DELETE FROM auth WHERE account="%s";''' % name)
db.commit()
db.close()
bot.say("User deleted.")
def addKeys(bot, name, info):
# Setup dbase connection
db = sqlite3.connect(SAM_DB)
c = db.cursor()
try:
c_token, c_secret, a_token, a_secret = info.split(" ")
except Exception as e:
bot.say(str(e))
check = c.execute('''SELECT * FROM auth WHERE account="%s";''' % name).fetchall()
if len(check) == 0:
bot.say("You are not approved to add tokens. Contact Operator873.")
db.close()
return
else:
try:
c.execute('''UPDATE auth SET consumer_token="%s", consumer_secret="%s", access_token="%s", access_secret="%s" WHERE account="%s";''' % (c_token, c_secret, a_token, a_secret, name))
bot.say("Keys added.")
except Exception as e:
bot.say(str(e))
finally:
db.commit()
db.close()
def processinfo(info):
info = "a=" + info
l = re.split(r"(\w)=", info)[1:]
data = {l[i]: l[i+1] for i in range(0, len(l), 2)}
for key in data:
data[key] = data[key].strip()
if 'd' in data:
adjust = re.sub(r"([0-9]+([0-9]+)?)",r" \1 ", data['d'])
data['d'] = re.sub(' +', ' ', adjust).strip()
return data
@module.commands('testblock')
@module.nickname_commands('testblock')
def commandtestBlock(bot, trigger):
# New syntax: !block Some Nick Here p=project d=duration r=reason
data = processinfo(trigger.group(2))
if len(data) < 4:
bot.say("Command missing arguements: !block <target account> p=project d=duration r=reason for block")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !block <target account> p=project d=duration r=reason for block")
return
else:
try:
project = data['p']
target = data['a']
until = data['d']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
bot.say(target + " would be blocked on " + project + " for " + until + " with reason: " + reason)
@module.commands('block')
@module.nickname_commands('block')
def commandBlock(bot, trigger):
# New syntax: !block Some Nick Here p=project d=duration r=reason
data = processinfo(trigger.group(2))
if len(data) < 4:
bot.say("Command missing arguements: !block <target account> p=project d=duration r=reason for block")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !block <target account> p=project d=duration r=reason for block")
return
else:
try:
project = data['p']
target = data['a']
until = data['d']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
doBlock(bot, trigger.account, project, target, until, reason)
@module.commands('lta')
@module.nickname_commands('lta')
def commandltablock(bot, trigger):
# New syntax: !lta Some Nick Here p=project
data = processinfo(trigger.group(2))
if len(data) < 2:
bot.say("Command missing arguements: !lta Some Nick Here p=project")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !lta Some Nick Here p=project")
return
else:
try:
project = data['p']
target = data['a']
except Exception as e:
bot.say("Error! " + str(e))
return
doltaBlock(bot, trigger.account, project, target)
@module.commands('tpa')
@module.nickname_commands('tpa')
def commandRevoketpa(bot, trigger):
# New syntax: !tpa Some Nick Here p=project d=duration r=reason
data = processinfo(trigger.group(2))
if len(data) < 4:
bot.say("Command missing arguements: !tpa <target account> p=project d=duration r=reason for block")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !tpa <target account> p=project d=duration r=reason for block")
return
else:
try:
project = data['p']
target = data['a']
until = data['d']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
dorevokeTPA(bot, trigger.account, project, target, until, reason)
@module.commands('reblock')
@module.nickname_commands('reblock')
def commandreBlock(bot, trigger):
# New syntax: !reblock Some Nick Here p=project d=duration r=reason
data = processinfo(trigger.group(2))
if len(data) < 4:
bot.say("Command missing arguements: !reblock <target account> p=project d=duration r=reason for block")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !reblock <target account> p=project d=duration r=reason for block")
return
else:
try:
project = data['p']
target = data['a']
until = data['d']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
doReblock(bot, trigger.account, project, target, until, reason)
@module.commands('proxyblock')
@module.nickname_commands('proxyblock')
def commandproxyBlock(bot, trigger):
# New syntax: !proxyblock Some Nick Here p=project d=duration
data = processinfo(trigger.group(2))
if len(data) < 3:
bot.say("Command missing arguements: !proxyblock Some Nick Here p=project d=duration")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !proxyblock Some Nick Here p=project d=duration")
return
else:
try:
project = data['p']
target = data['a']
until = data['d']
except Exception as e:
bot.say("Error! " + str(e))
return
reason = "[[m:NOP|Open proxy]]"
doReblock(bot, trigger.account, project, target, until, reason)
@module.commands('gblock')
@module.nickname_commands('gblock')
def commandglobalBlock(bot, trigger):
# New syntax: !gblock Some IP Here d=duration r=reason
data = processinfo(trigger.group(2))
if len(data) < 3:
bot.say("Command missing arguements: !gblock Some IP Here d=duration r=reason")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !gblock Some IP Here d=duration r=reason")
return
else:
try:
project = data['p']
target = data['a']
until = data['d']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
if reason == "proxy":
reason = "[[m:NOP|Open proxy]]"
elif reason == "LTA" or reason == "lta":
reason = "Long term abuse"
elif reason == "spam":
reason = "Cross wiki spam"
elif reason == "abuse":
reason = "Cross wiki abuse"
else:
pass
doGlobalblock(bot, trigger.account, target, until, reason)
@module.commands('lock')
@module.nickname_commands('lock')
def commandLock(bot, trigger):
# New syntax: !lock Some Account r=reason
data = processinfo(trigger.group(2))
if len(data) < 2:
bot.say("Command missing arguements: !lock Some Account r=reason")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !lock Some Account r=reason")
return
else:
try:
target = data['a']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
if reason == "proxy":
reason = "[[m:NOP|Open proxy]]"
elif reason == "LTA" or reason == "lta":
reason = "Long term abuse"
elif reason == "spam":
reason = "Cross wiki spam"
elif reason == "abuse":
reason = "Cross wiki abuse"
elif reason == "banned" or reason == "banned user":
reason = "Globally banned user"
else:
pass
doLock(bot, trigger.account, target, reason)
@module.commands('unlock')
@module.nickname_commands('unlock')
def commandUnlock(bot, trigger):
# !unlock Some Account
reason = "Unlock"
doUnlock(bot, trigger.account, trigger.group(2), reason)
@module.commands('softblock')
@module.nickname_commands('softblock')
def commandSoftblock(bot, trigger):
# New syntax: # !softblock Some Nick Here p=project d=duration r=Some reason here.
data = processinfo(trigger.group(2))
if len(data) < 4:
bot.say("Command missing arguements: !softblock Some Nick Here p=project d=duration r=Some reason here.")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !softblock Some Nick Here p=project d=duration r=Some reason here.")
return
else:
try:
project = data['p']
target = data['a']
until = data['d']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
doSoftblock(bot, trigger.account, project, target, until, reason)
@module.commands('unblock')
@module.nickname_commands('unblock')
def commandUnblock(bot, trigger):
# New syntax: !unblock Some Account Here p=project r=reason
data = processinfo(trigger.group(2))
if len(data) < 4:
bot.say("Command missing arguements: !unblock Some Account Here p=project r=reason")
return
elif data['a'] == '':
bot.say("Target of block must go first or be indicated with 'a=target account'. !unblock Some Account Here p=project r=reason")
return
else:
try:
project = data['p']
target = data['a']
reason = data['r']
except Exception as e:
bot.say("Error! " + str(e))
return
doUnblock(bot, trigger.account, project, target, reason)
@module.require_owner(message="This function is only available to Operator873.")
@module.commands('addUser')
@module.nickname_commands('addUser')
def commandAdd(bot, trigger):
addUser(bot, trigger.group(2))
@module.require_owner(message="This function is only available to Operator873.")
@module.commands('remUser')
@module.nickname_commands('remUser')
def commandRem(bot, trigger):
remUser(bot, trigger.group(2))
@module.require_privmsg(message="This function must be used in PM.")
@module.commands('tokens')
@module.nickname_commands('tokens')
def commandTokens(bot, trigger):
addKeys(bot, trigger.account, trigger.group(2))
@module.commands('getapi')
def getAPI(bot, trigger):
# Setup dbase connection
db = sqlite3.connect(SAM_DB)
c = db.cursor()
wiki = str(trigger.group(3))
check = c.execute('''SELECT apiurl FROM wikis WHERE wiki="%s";''' % wiki).fetchone()
db.close()
if check is not None:
bot.say(check[0])
else:
bot.say("I don't know " + wiki + ". You can add it with !addapi <project> <api url>")
@module.commands('addapi')
def addapi(bot, trigger):
try:
wiki, apiurl = trigger.group(2).split(' ', 1)
except:
bot.say("Malformed command. Syntax is '!addapi <project> <api url>")
return
db = sqlite3.connect(SAM_DB)
c = db.cursor()
check = c.execute('''SELECT * FROM wikis WHERE wiki="%s";''' % wiki).fetchone()
if check is not None:
bot.say("I already know " + wiki + ". The api url is " + check[1])
else:
c.execute('''INSERT INTO wikis VALUES("%s", "%s");''' % (wiki, apiurl))
db.commit()
bot.say(wiki + " was added with url: " + apiurl)
db.close()
@module.require_owner(message="This function is only available to Operator873.")
@module.commands('delapi')
def delapi(bot, trigger):
db = sqlite3.connect(SAM_DB)
c = db.cursor()
check = c.execute('''SELECT * FROM wikis WHERE wiki="%s";''' % trigger.group(3)).fetchone()
if check is None:
bot.say(trigger.group(3) + " doesn't exist in the database.")
else:
c.execute('''DELETE FROM wikis WHERE wiki="%s";''' % trigger.group(3))
db.commit()
bot.say(trigger.group(3) + " was removed from the database.")
db.close()
@module.commands('whoami')
def whoami(bot, trigger):
bot.say("You are " + trigger.nick + " using Freenode account: " + trigger.account + ".")
@module.commands('memadd')
def memadd(bot, trigger):
response = addtomemory(trigger.account, trigger.group(2))
if response['status'] == "Success":
bot.say(response['data'])
else:
bot.say("Operator873 something blew up! " + response['data'])
@module.commands('memclear')
def memclear(bot, trigger):
response = clearmemory(trigger.account)
if response['status'] == "Success":
bot.say(response['data'])
else:
bot.say("Operator873 something blew up! " + response['data'])
@module.commands('memdel')
def memdel(bot, trigger):
response = delfrommemory(trigger.account, trigger.group(2))
if response['status'] == "Success":
bot.say(response['data'])
else:
bot.say("Operator873 something blew up! " + response['data'])
@module.commands('memshow')
def memshow(bot, trigger):
payload = getfrommemory(trigger.account)
if payload['status'] == "Success":
if len(payload['data']) > 0:
response = ""
for entry in payload['data']:
if len(response) > 0:
response = response + ", " + entry[0]
else:
response = entry[0]
bot.say("Items currently in memory: " + response)
else:
bot.say("It doesn't appear you have anything stored in memory.")
else:
bot.say("An error occured fetching memory items. Ping Operator873")
bot.say(payload['data'])
@module.commands('memory')
def domemory(bot, trigger):
try:
action, info = trigger.group(2).split(" ", 1)
except:
bot.say("Missing data. Syntax is !memory <action> <optional args>")
return
# New syntax: !memory <action> a=account p=project d=duration r=reason
dump = getfrommemory(trigger.account)
data = processinfo(info)
if len(dump['data']) > 0:
if action.lower() == "lock":
# !memory lock r=reason
try:
reason = data['r']
except:
bot.say("Malformed command. Syntax is !memory lock r=reason")
return
if reason.lower() == "proxy":
reason = "[[m:NOP|Open proxy]]"
elif reason.lower() == "lta":
reason = "Long term abuse"
elif reason.lower() == "spam":
reason = "Cross wiki spam"
elif reason.lower() == "abuse":
reason = "Cross wiki abuse"
elif reason.lower() == "banned" or reason.lower() == "banned user":
reason = "Globally banned user"
else:
pass
for item in dump['data']:
doLock(bot, trigger.account, item[0], reason.strip())
devnull = clearmemory(trigger.account)
elif action.lower() == "block":
# !memory block p=project d=duration r=reason
try:
reason = data['r']
until = data['d']
project = data['p']
except:
bot.say("Malformed command. Syntax is !memory block p=project d=duration r=reason")
return
for item in dump['data']:
doBlock(bot, trigger.account, project.lower(), item[0], until, reason)
devnull = clearmemory(trigger.account)
elif action.lower() == "lta":
# !memory lta p=project
try:
project = data['p']
except:
bot.say("Malformed command. Syntax is !memory lta p=project")
return
for item in dump['data']:
doltaBlock(bot, trigger.account, project, item[0])
devnull = clearmemory(trigger.account)
elif action.lower() == "gblock":
# !memory gblock d=duration r=reason
try:
project = data['p']
reason = data['r']
except:
bot.say("Malformed command. Syntax is !memory gblock d=duration r=reason")
return
if reason.lower() == "proxy":
reason = "[[m:NOP|Open proxy]]"
elif reason.lower() == "lta":
reason = "Long term abuse"
elif reason.lower() == "spam":
reason = "Cross wiki spam"
elif reason.lower() == "abuse":
reason = "Cross wiki abuse"
elif reason.lower() == "banned" or reason.lower() == "banned user":
reason = "Globally banned user"
else:
pass
for item in dump['data']:
doGlobalblock(bot, trigger.account, item[0], until, reason)
devnull = clearmemory(trigger.account)
elif action.lower() == "test":
# !memory test p=project d=duration r=reason
try:
reason = data['r']
until = data['d']
project = data['p']
except:
bot.say("Malformed command. Syntax is !memory test p=project d=duration r=reason")
return
for item in dump['data']:
bot.say(item[0] + " would be blocked on " + project + ". Length: " + until + ". Reason: " + reason)
bot.say("I would clear memory now, but I haven't for testing.")
else:
bot.say("Error! I currently know lock, block, lta, and gblock. Ping Operator873 if additional command is needed.")
bot.say("Your stored information has not been altered. Please try again.")
else:
bot.say("It doesn't appear I have anything in memory to act on for you.")
@module.commands('!samhelp')
def samhelp(bot, trigger):
bot.say("Commands are listed at https://github.com/Operator873/SAM")
| 31.394737 | 193 | 0.531785 | 4,135 | 36,983 | 4.743168 | 0.085127 | 0.039464 | 0.012747 | 0.019936 | 0.725947 | 0.695355 | 0.675266 | 0.654056 | 0.628053 | 0.604242 | 0 | 0.004883 | 0.335451 | 36,983 | 1,177 | 194 | 31.42141 | 0.79314 | 0.036774 | 0 | 0.697137 | 0 | 0.019824 | 0.259461 | 0.004156 | 0 | 0 | 0 | 0 | 0 | 1 | 0.049559 | false | 0.004405 | 0.006608 | 0 | 0.143172 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c0492449332d3db99f66e60d23603daa7ad18fa | 1,717 | py | Python | tests/cli_snippets/test_cli_versioning.py | shalevy1/DataFS | 0d32c2b4e18d300a11b748a552f6adbc3dd8f59d | [
"MIT"
] | 7 | 2017-01-27T16:41:26.000Z | 2021-11-05T00:42:31.000Z | tests/cli_snippets/test_cli_versioning.py | ClimateImpactLab/DataFS | e37811d51e51c23f688909e59e74b67a8325ce27 | [
"MIT"
] | 522 | 2016-11-21T20:30:39.000Z | 2021-11-15T17:47:11.000Z | tests/cli_snippets/test_cli_versioning.py | shalevy1/DataFS | 0d32c2b4e18d300a11b748a552f6adbc3dd8f59d | [
"MIT"
] | 3 | 2017-01-17T03:09:28.000Z | 2019-06-28T21:08:40.000Z |
import pytest
import os
@pytest.mark.examples
@pytest.mark.cli_snippets
def test_cli_versioning_snippets(cli_validator):
cli_validator(r'''
.. EXAMPLE-BLOCK-1-START
.. code-block:: bash
$ datafs create my_archive \
> --my_metadata_field 'useful metadata'
created versioned archive <DataArchive local://my_archive>
.. EXAMPLE-BLOCK-1-END
Snippet 2
.. EXAMPLE-BLOCK-2-START
.. code-block:: bash
$ datafs update my_archive --string \
> 'barba crescit caput nescit' # doctest: +NORMALIZE_WHITESPACE
uploaded data to <DataArchive local://my_archive>. new version 0.0.1
created.
.. EXAMPLE-BLOCK-2-END
Snippet 3
.. EXAMPLE-BLOCK-3-START
.. code-block:: bash
$ datafs update my_archive --bumpversion patch --string \
> 'Aliquando et insanire iucundum est' # doctest: +NORMALIZE_WHITESPACE
uploaded data to <DataArchive local://my_archive>. version bumped 0.0.1 -->
0.0.2.
$ datafs update my_archive --bumpversion minor --string \
> 'animum debes mutare non caelum' # doctest: +NORMALIZE_WHITESPACE
uploaded data to <DataArchive local://my_archive>. version bumped 0.0.2 -->
0.1.
.. EXAMPLE-BLOCK-3-END
Snippet 4
.. EXAMPLE-BLOCK-4-START
.. code-block:: bash
$ datafs versions my_archive
['0.0.1', '0.0.2', '0.1']
.. EXAMPLE-BLOCK-4-END
Snippet 5
.. EXAMPLE-BLOCK-5-START
.. code-block:: bash
$ datafs download my_archive my_archive_versioned.txt --version 0.0.2
downloaded v0.0.2 to my_archive_versioned.txt
.. EXAMPLE-BLOCK-5-END
cleanup:
.. code-block:: bash
$ datafs delete my_archive
deleted archive <DataArchive local://my_archive>
''')
os.remove('my_archive_versioned.txt')
| 19.292135 | 79 | 0.687828 | 237 | 1,717 | 4.860759 | 0.308017 | 0.117188 | 0.067708 | 0.098958 | 0.452257 | 0.296875 | 0.290799 | 0.263021 | 0.195313 | 0.195313 | 0 | 0.030043 | 0.185789 | 1,717 | 88 | 80 | 19.511364 | 0.793991 | 0 | 0 | 0.12 | 0 | 0 | 0.900932 | 0.13986 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02 | false | 0 | 0.04 | 0 | 0.06 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c067aec8b07feba720d2a779edaf34ba9724469 | 1,320 | py | Python | generateFunctions.py | yudasong/Reinforcement-Learning-Branch-and-Bound | 052a64425ca969f421a079dc62049fb68b4957c5 | [
"MIT"
] | 14 | 2020-01-03T13:11:36.000Z | 2022-01-14T04:13:34.000Z | generateFunctions.py | yudasong/Reinforcement-Learning-Branch-and-Bound | 052a64425ca969f421a079dc62049fb68b4957c5 | [
"MIT"
] | null | null | null | generateFunctions.py | yudasong/Reinforcement-Learning-Branch-and-Bound | 052a64425ca969f421a079dc62049fb68b4957c5 | [
"MIT"
] | 5 | 2020-02-24T09:23:50.000Z | 2021-10-10T10:33:50.000Z | #This file will generate functions in polynomials
import numpy as np
import random
import matplotlib.pyplot as plt
class generateFunctions():
#the initial function taking 4 inputs
def __init__(self, x_vector, high_degree_vector, rangeLow, rangeHigh):
#the input processing
self.x_vector = x_vector
self.high_degree_vector = high_degree_vector
self.rangeLow = rangeLow
self.rangeHigh = rangeHigh
self.functionString = ""
def generate(self):
#allowed values for the highest degree and others can be zeros
allowed_values = list(range(self.rangeLow,self.rangeHigh))
allowed_values.remove(0)
for i in range(len(self.x_vector)):
highestVar = self.high_degree_vector[i]
ppar = np.random.randint(low=self.rangeLow,high=self.rangeHigh,size=(highestVar+1))
#make sure the highest is not zero coefficient
if ppar[0] == 0:
ppar[0] = random.choice(allowed_values)
for j in range(len(ppar)):
add = ""
if ppar[j] != 0:
add = str(ppar[j])
if (highestVar-j) != 0:
add = add +"*"+self.x_vector[i]
if(highestVar-j)!=1:
add = add +"^"+str(highestVar-j)
if ppar[j] > 0:
add = "+" + add
self.functionString = self.functionString + add
return self.functionString
#p = generateFunctions()
#function = p.generate()
#print(function)
| 29.333333 | 86 | 0.692424 | 185 | 1,320 | 4.832432 | 0.378378 | 0.03915 | 0.049217 | 0.049217 | 0.045861 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009381 | 0.192424 | 1,320 | 44 | 87 | 30 | 0.829268 | 0.207576 | 0 | 0 | 1 | 0 | 0.002899 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.1 | 0 | 0.233333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c0db54dcf8f6e2ff5203af6d9e96256c8381459 | 1,833 | py | Python | portfolio_project/guestbook/models.py | KimEunYeol/web-portfolio | cd56191bdf12c712b252015d3ecb31b037f922ed | [
"MIT"
] | null | null | null | portfolio_project/guestbook/models.py | KimEunYeol/web-portfolio | cd56191bdf12c712b252015d3ecb31b037f922ed | [
"MIT"
] | 7 | 2021-03-19T03:41:33.000Z | 2022-03-12T00:30:35.000Z | portfolio_project/guestbook/models.py | KimEunYeol/web-portfolio | cd56191bdf12c712b252015d3ecb31b037f922ed | [
"MIT"
] | null | null | null | from django.db import models
from django.utils import timezone
from user.models import User
class GuestBook(models.Model):
username = models.ForeignKey(User, models.DO_NOTHING, verbose_name='username')
title = models.CharField(verbose_name='Title', max_length=64, blank=False)
content = models.TextField(verbose_name='Content')
image = models.ImageField(blank=True, null=True)
create_dt = models.DateTimeField(verbose_name='Create date', auto_now_add=True)
modify_dt = models.DateTimeField(verbose_name='Modify date', auto_now=True)
def __str__(self):
return '[%d] %.40s' % (self.id, self.title)
class Meta:
verbose_name = 'guestbook'
verbose_name_plural = 'guestbook'
db_table = 'guestbook'
class Comment(models.Model):
guestbook_post = models.ForeignKey(GuestBook, models.DO_NOTHING)
username = models.ForeignKey(User, models.DO_NOTHING, verbose_name='username')
level = models.IntegerField(blank=True, null=True)
content = models.TextField(verbose_name='Contnet')
reference_comment_id = models.IntegerField(blank=True, null=True)
create_dt = models.DateTimeField(verbose_name='Create date', auto_now_add=True)
modify_dt = models.DateTimeField(verbose_name='Modify date', auto_now=True)
def __str__(self):
return '[%d] %.40s - [%d] %.40s' % (self.guestbook_post.id, self.guestbook_post.title, self.id, self.content)
class Meta:
verbose_name = 'comment'
verbose_name_plural = 'comment'
db_table = 'comment'
class Like(models.Model):
guestbook_post = models.ForeignKey(GuestBook, models.DO_NOTHING)
username = models.ForeignKey(User, models.DO_NOTHING, verbose_name='username')
def __str__(self):
return '[%d] %.40 - %s' % (self.guestbook_post.id, self.guestbook_post.title, self.username.name)
class Meta:
verbose_name = 'like'
verbose_name_plural = 'like'
db_table = 'like' | 35.25 | 111 | 0.758865 | 249 | 1,833 | 5.353414 | 0.228916 | 0.132033 | 0.056264 | 0.084021 | 0.642161 | 0.579895 | 0.540135 | 0.540135 | 0.540135 | 0.472618 | 0 | 0.006146 | 0.112384 | 1,833 | 52 | 112 | 35.25 | 0.813153 | 0 | 0 | 0.384615 | 0 | 0 | 0.10578 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.076923 | 0.076923 | 0.769231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
5c1564960a1c44fb64e9e0e02b3642c32a4c08b0 | 6,348 | py | Python | library/gcloud_accessor/rest_library/shared/gcloud_rest_lib_base.py | anchitarnav/gcloud-resource-cleanup | a3b220f406529df43ffd5afa8adb929c718caba5 | [
"MIT"
] | null | null | null | library/gcloud_accessor/rest_library/shared/gcloud_rest_lib_base.py | anchitarnav/gcloud-resource-cleanup | a3b220f406529df43ffd5afa8adb929c718caba5 | [
"MIT"
] | 6 | 2020-04-29T09:09:48.000Z | 2021-04-30T21:13:57.000Z | library/gcloud_accessor/rest_library/shared/gcloud_rest_lib_base.py | anchitarnav/gcloud-resource-cleanup | a3b220f406529df43ffd5afa8adb929c718caba5 | [
"MIT"
] | null | null | null | from library.utilities.misc import parse_link, get_resource_type
from library.utilities.logger import get_logger
from google.auth.transport.requests import AuthorizedSession
from google.auth import default
from requests import codes, exceptions
import time
import re
from json import JSONDecodeError
class GcloudRestLibBase:
"""
Expected to house all common functionality for Rest Client
"""
operation_polling_time_sleep_secs = 5
def __init__(self, project_id, **kwargs):
# Add authentication check here
# Add common object instantiation
# TODO: fetch the default project from the APPLICATION CREDENTIALS JSON
self.project_id = project_id
self.credentials, self.default_project_id = default(scopes=['https://www.googleapis.com/auth/cloud-platform'])
self.session = AuthorizedSession(self.credentials)
self.logger = get_logger(__name__)
def wait_for_operation(self, operation, max_timeout_mins=15):
"""
:param operation: the operation object
:param max_timeout_mins:
:return: Bool(status), Dict(Last Operation Recieved)
"""
# TODO: Implement max_timeout_mins
operation_status = operation['status']
self.logger.debug('Beginning to poll for operation')
operation_self_link = operation['selfLink']
start_time = time.time()
while operation_status != 'DONE' and time.time() - start_time < max_timeout_mins * 60:
self.logger.debug(f'Sleeping for {self.operation_polling_time_sleep_secs} secs before polling')
time.sleep(self.operation_polling_time_sleep_secs)
self.logger.debug("Making post call for operation status on wait endpoint ..")
operation_response = self.session.post(operation_self_link + "/wait")
self.logger.error(f'Recieved operation response: {operation_response.text}')
if not operation_response.status_code == codes.ok:
if operation_response.status_code == codes.not_found:
self.logger.debug('Apprehending 404 not found as ')
return True
self.logger.error(f'Error while polling for operation')
return False
operation = operation_response.json()
operation_status = operation['status']
self.logger.debug(operation)
error = operation.get('error')
if error:
self.logger.exception('Error while polling for operation: {}'.format(error))
return False
self.logger.debug(f"Final operation status: {operation}")
return operation_status == 'DONE'
def delete_self_link(self, self_link, delete_dependencies=True):
max_retries = 5
count = 0
self.logger.debug(f'Received request to delete: {self_link}')
while count < max_retries:
count += 1
self.logger.debug(f"Attempt #{count}")
if count > 1:
self.logger.debug(f"Sleeping for {self.operation_polling_time_sleep_secs} secs before re-attempting")
time.sleep(self.operation_polling_time_sleep_secs)
del_response = self.session.delete(self_link)
self.logger.info(del_response.status_code)
# Apprehending 404 not_found as resource already deleted
if del_response.status_code == codes.not_found:
self.logger.info("Apprehending 404 as resource already deleted")
return True
# If response == 400, trying to check if it a resourceInUseByAnotherResource and resolve it
if del_response.status_code == codes.bad_request:
self.logger.debug("Bad Request on delete request. Trying to debug it .. ")
self.logger.debug(f"Response text : {del_response.text}")
try:
self.logger.debug("Decoding Error JSON")
response_json = del_response.json()
for error in response_json['error'].get('errors', []):
if error.get('reason', "") == "resourceInUseByAnotherResource" \
and ("used" in error.get("message", "") or "depend" in error.get("message", "")):
error_message = error['message']
possible_dependency_search = re.search(pattern=r"'[0-9a-zA-Z_/-]+'$", string=error_message)
self.logger.debug('Using regex to figure out dependency')
if possible_dependency_search:
dependent_resource = possible_dependency_search.group()
old = self_link.split('/')
new = dependent_resource.strip("'").split('/')
for i in range(old.index(new[0])):
new.insert(i, old[i])
dependent_resource_self_link = '/'.join(new)
self.logger.info(f"Dependent resource self_link : {dependent_resource_self_link}")
self.logger.info("Attempting to delete it .. ")
self.delete_self_link(dependent_resource_self_link)
else:
self.logger.debug('Dependency could not be identified using regex')
else:
self.logger.debug('The error message is not known .. cant debug that')
except ValueError:
self.logger.exception('ValueError while reading JSON from response body')
pass
except KeyError:
pass
# Checking if an operation object was returned
try:
response_json = del_response.json()
if "operation" in response_json.get("kind", ""):
return self.wait_for_operation(operation=response_json)
except ValueError:
pass
# Anything in 400 and 500 series
try:
del_response.raise_for_status()
except exceptions.HTTPError as ex:
self.logger.exception(del_response.text)
raise ex
return True
| 48.090909 | 119 | 0.594833 | 685 | 6,348 | 5.334307 | 0.277372 | 0.068418 | 0.061576 | 0.026273 | 0.216475 | 0.143131 | 0.108374 | 0.083744 | 0.038314 | 0.038314 | 0 | 0.006962 | 0.321204 | 6,348 | 131 | 120 | 48.458015 | 0.84103 | 0.088689 | 0 | 0.212121 | 0 | 0 | 0.192382 | 0.028831 | 0 | 0 | 0 | 0.015267 | 0 | 1 | 0.030303 | false | 0.030303 | 0.080808 | 0 | 0.20202 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c16ce96d31342333f8d62c67e0396edc0906f37 | 907 | py | Python | ObjectWrapper/GlyphsApp/UI/HTMLView.py | Mark2Mark/GlyphsSDK | 810d98d09b4fea083dfcfdd743f56f8b8810d881 | [
"Apache-2.0"
] | 58 | 2015-02-03T05:15:30.000Z | 2022-03-24T14:19:54.000Z | ObjectWrapper/GlyphsApp/UI/HTMLView.py | Mark2Mark/GlyphsSDK | 810d98d09b4fea083dfcfdd743f56f8b8810d881 | [
"Apache-2.0"
] | 54 | 2015-07-11T00:43:07.000Z | 2022-03-01T11:48:27.000Z | ObjectWrapper/GlyphsApp/UI/HTMLView.py | Mark2Mark/GlyphsSDK | 810d98d09b4fea083dfcfdd743f56f8b8810d881 | [
"Apache-2.0"
] | 35 | 2015-02-13T19:59:03.000Z | 2022-03-11T15:56:17.000Z | # -*- coding: utf-8 -*-
from __future__ import print_function
__all__ = ["HTMLView"]
from WebKit import WebView
from vanilla.vanillaBase import VanillaBaseObject
class HTMLView(VanillaBaseObject):
"""
A view that allows for showing HTML
from vanilla import *
from objectsGS import HTMLView
class HTMLViewDemo(object):
def __init__(self):
self.title = "HTML View"
self.w = FloatingWindow((600, 350), self.title)
self.w.Preview = HTMLView((0, 0, 0, 0))
self.w.Preview.setHTMLPath("https://www.glyphsapp.com")
self.w.open()
HTMLViewDemo()
**posSize** Tuple of form *(left, top, width, height)* representing the position and size of the color well.
"""
nsHTMLViewClass = WebView
def __init__(self, posSize):
self._setupView(self.nsHTMLViewClass, posSize)
#self._nsObject.setDelegate_(self)
def setHTMLPath(self, path):
if (path):
self._nsObject.setMainFrameURL_(path) | 27.484848 | 109 | 0.728776 | 115 | 907 | 5.556522 | 0.556522 | 0.031299 | 0.034429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014267 | 0.149945 | 907 | 33 | 110 | 27.484848 | 0.814527 | 0.649394 | 0 | 0 | 0 | 0 | 0.021448 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.272727 | 0 | 0.636364 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
5c282e74ecce40368a460659e8e3a9d347182392 | 1,313 | py | Python | Web/Member/FunctionForPI/RoundAdd.py | tratitude/BridgeMaster | e3916b077d96f3520d0a8ed9bb548d614465aa2e | [
"Apache-2.0"
] | 1 | 2021-01-05T14:40:08.000Z | 2021-01-05T14:40:08.000Z | Web/Member/FunctionForPI/RoundAdd.py | fdmdkw/BridgeMaster | e3916b077d96f3520d0a8ed9bb548d614465aa2e | [
"Apache-2.0"
] | 1 | 2021-10-19T08:05:06.000Z | 2021-10-19T08:05:06.000Z | Web/Member/FunctionForPI/RoundAdd.py | fdmdkw/BridgeMaster | e3916b077d96f3520d0a8ed9bb548d614465aa2e | [
"Apache-2.0"
] | 2 | 2019-10-21T15:25:37.000Z | 2021-03-17T06:59:09.000Z | import time
import json
import requests
def AddRound(T_id,bid,leader,contract,N,E,W,S,vulnerable,result,declarer,Rnum,score):
Round = {
'T_id':T_id,
'bid':bid,
'leader':leader,
'contract':contract,
'N':N,
'E':E,
'W':W,
'S':S,
'vulnerable':vulnerable,
'result':result, #declarer's tricksssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss
'declarer':declarer,
'Rnum':Rnum, #round number
'score':score
}
data = {'declarer': N, 'S': "H7.CA.HQ.C9.CJ.D9.SJ.H4.D7.DT.HK.DJ.DQ", 'contract': '1SXX', 'T_id': 33, 'E': "H9.C2.HA.CK.C7.D3.SQ.HT.DA.D4.S6.S8.SA", 'result': 6, 'N': "HJ.C5.H2.CT.S2.D5.S4.S5.D6.DK.ST.S7.SK", 'vulnerable': "None", 'bid': "N,Pass,Pass,1D,Pass,1S,X,XX,Pass,Pass,Pass", 'score': -50, 'W': "H8.C3.H3.C4.CQ.D2.S3.H5.D8.S9.H6.C6.C8", 'leader': "E", 'Rnum': 0}
Data_out = json.dumps(data) #encode to JSON
r = requests.post('http://192.168.0.139:8000/Member/Json/',data=Data_out)
print(r.content)
# Data = json.loads(Data_out)
#print(Data['leader'])
AddRound(8,"1,d2a2s213s",'N',"3NT","s12f32sd12sad","d13refqewff","f4f23f32fdfs","231fef2f23d32d","None",9,"W",3,750) | 38.617647 | 374 | 0.609292 | 190 | 1,313 | 4.173684 | 0.594737 | 0.015132 | 0.015132 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083022 | 0.183549 | 1,313 | 34 | 375 | 38.617647 | 0.656716 | 0.159939 | 0 | 0 | 0 | 0.208333 | 0.388889 | 0.176685 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0.041667 | 0.125 | 0 | 0.166667 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c2fea83116630bc76f3571d183d0e91536cf935 | 420 | py | Python | sabueso/_private_tools/molecular_system/guess_form.py | dprada/sabueso | 14843cf3522b5b89db5b61c1541a7015f114dd53 | [
"MIT"
] | null | null | null | sabueso/_private_tools/molecular_system/guess_form.py | dprada/sabueso | 14843cf3522b5b89db5b61c1541a7015f114dd53 | [
"MIT"
] | 2 | 2022-01-31T21:22:17.000Z | 2022-02-04T20:20:12.000Z | sabueso/_private_tools/molecular_system/guess_form.py | dprada/sabueso | 14843cf3522b5b89db5b61c1541a7015f114dd53 | [
"MIT"
] | 1 | 2021-07-20T15:01:14.000Z | 2021-07-20T15:01:14.000Z | from sabueso.tools.string_pdb_text import is_pdb_text
from sabueso.tools.string_pdb_id import is_pdb_id
from sabueso.tools.string_uniprot_id import is_uniprot_id
def guess_form(string):
output = None
if is_pdb_text(string):
output = 'string:pdb_text'
elif is_pdb_id(string):
output = 'string:pdb_id'
elif is_uniprot_id(string):
output = 'string:uniprot_id'
return output
| 23.333333 | 57 | 0.728571 | 65 | 420 | 4.369231 | 0.276923 | 0.126761 | 0.169014 | 0.232394 | 0.176056 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.197619 | 420 | 17 | 58 | 24.705882 | 0.84273 | 0 | 0 | 0 | 0 | 0 | 0.107399 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.25 | 0 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c3acb7683f7efe48a90d90414bbddf6a6c1c411 | 13,182 | py | Python | Pipeline/Fitting.py | riccardomarin/FARM-ZOSR | 7d29469d7e1c08b4a1e5d13084435001f509bec3 | [
"Python-2.0",
"OLDAP-2.7"
] | 2 | 2019-09-16T19:46:33.000Z | 2021-04-11T14:51:37.000Z | Pipeline/Fitting.py | riccardomarin/FARM-ZOSR | 7d29469d7e1c08b4a1e5d13084435001f509bec3 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | Pipeline/Fitting.py | riccardomarin/FARM-ZOSR | 7d29469d7e1c08b4a1e5d13084435001f509bec3 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 26 13:47:15 2018
%%%%%
% Code for article:
% Marin, R. and Melzi, S. and Rodolà, E. and Castellani, U., High-Resolution Augmentation for Automatic Template-Based Matching of Human Models, 3DV 2019
% Github: https://github.com/riccardomarin/FARM-ZOSR
%%%%%
"""
import sys, copy
import pickle
import numpy as np
import chumpy as ch
from chumpy.ch import MatVecMult
from chumpy import Ch
import cv2
from scipy import spatial
import scipy.io as sio
sys.path.append("./py3d")
from py3d import *
import scipy
import pandas as pd
import glob, os
def loadObj(path):
vertices = []
normals = []
texcoords = []
faces = []
for line in open(path, "r"):
if line.startswith('#'): continue
values = line.split()
if not values: continue
if values[0] == 'v':
vertices.append(tuple(map(float, values[1:4])))
elif values[0] == 'vn':
normals.append(tuple(map(float, values[1:4])))
elif values[0] == 'vt':
texcoords.append(tuple(map(float, values[1:3])))
elif values[0] == 'f':
face = []
for v in values[1:]:
w = map(lambda x: int(x) if x else None, v.split('/'))
w = map(lambda x: x-1 if x != None and x > 0 else x, w)
face.append(tuple(w))
faces.append(tuple(face))
test = TriangleMesh();
a=np.array(faces)
a=a.reshape(-1,3)
test.vertices = Vector3dVector(vertices)
test.triangles = Vector3iVector(np.asarray(a))
test.compute_vertex_normals()
return test
def write_mesh_as_obj(fname, verts, faces):
with open(fname, 'w') as fp:
for v in verts:
fp.write('v %f %f %f\n' % (v[0], v[1], v[2]))
for f in faces + 1: # Faces are 1-based, not 0-based in obj files
fp.write('f %d %d %d\n' % (f[0], f[1], f[2]))
np.set_printoptions(precision=4,suppress=True)
"""
SMPL
"""
class Rodrigues(ch.Ch):
dterms = 'rt'
def compute_r(self):
return cv2.Rodrigues(self.rt.r)[0]
def compute_dr_wrt(self, wrt):
if wrt is self.rt:
return cv2.Rodrigues(self.rt.r)[1].T
dd=pd.read_pickle('./basicModel_neutral_lbs_10_207_0_v1.0.0.pkl')
nposeparms = dd['kintree_table'].shape[1]*3
dd['trans'] = np.zeros(3)
dd['pose'] = np.zeros(nposeparms)
dd['betas'] = np.zeros(dd['shapedirs'].shape[-1])
for s in ['v_template', 'weights', 'posedirs', 'pose', 'trans', 'betas', 'J']: #'shapedirs',
if (s in dd) and not hasattr(dd[s], 'dterms'):
dd[s] = ch.array(dd[s])
else:
print type(dd[s])
dd['v_shaped'] = dd['shapedirs'].dot(dd['betas'])+dd['v_template']
v_shaped = dd['v_shaped']
J_tmpx = MatVecMult(dd['J_regressor'], v_shaped[:,0])
J_tmpy = MatVecMult(dd['J_regressor'], v_shaped[:,1])
J_tmpz = MatVecMult(dd['J_regressor'], v_shaped[:,2])
dd['J'] = ch.vstack((J_tmpx, J_tmpy, J_tmpz)).T
if dd['pose'].ndim != 2 or p.shape[1] != 3:
p = dd['pose'].reshape((-1,3))
p = p[1:]
c= ch.concatenate([(Rodrigues(pp)-ch.eye(3)).ravel() for pp in p]).ravel()
dd['v_posed'] = v_shaped + dd['posedirs'].dot(c)
args = {
'pose': dd['pose'],
'v': dd['v_posed'],
'J': dd['J'],
'weights': dd['weights'],
'kintree_table': dd['kintree_table'],
'xp': ch,
'want_Jtr': True,
'bs_style': dd['bs_style']
}
pose=args['pose']
J=args['J']
kintree_table=args['kintree_table']
xp=ch
results = {}
pose2 = pose.reshape((-1,3))
id_to_col = {kintree_table[1,i] : i for i in range(kintree_table.shape[1])}
parent = {i : id_to_col[kintree_table[0,i]] for i in range(1, kintree_table.shape[1])}
if xp == ch:
rodrigues = lambda x : Rodrigues(x)
else:
import cv2
rodrigues = lambda x : cv2.Rodrigues(x)[0]
with_zeros = lambda x : xp.vstack((x, xp.array([[0.0, 0.0, 0.0, 1.0]])))
results[0] = with_zeros(xp.hstack((rodrigues(pose2[0,:]), J[0,:].reshape((3,1)))))
for i in range(1, kintree_table.shape[1]):
results[i] = results[parent[i]].dot(with_zeros(xp.hstack((
rodrigues(pose2[i,:]),
((J[i,:] - J[parent[i],:]).reshape((3,1)))
))))
pack = lambda x : xp.hstack([np.zeros((4, 3)), x.reshape((4,1))])
results = [results[i] for i in sorted(results.keys())]
results_global = results
if True:
results2 = [results[i] - (pack(
results[i].dot(xp.concatenate( ( (J[i,:]), 0 ) )))
) for i in range(len(results))]
results = results2
result = xp.dstack(results)
A=result
A_global=results_global
T = A.dot(dd['weights'].T)
v=args['v']
rest_shape_h = xp.vstack((v.T, np.ones((1, v.shape[0]))))
v =(T[:,0,:] * rest_shape_h[0, :].reshape((1, -1)) +
T[:,1,:] * rest_shape_h[1, :].reshape((1, -1)) +
T[:,2,:] * rest_shape_h[2, :].reshape((1, -1)) +
T[:,3,:] * rest_shape_h[3, :].reshape((1, -1))).T
v = v[:,:3]
Jtr = xp.vstack([g[:3,3] for g in A_global])
result=v
J_tmpx2 = MatVecMult(dd['J_regressor'], result[:,0])
J_tmpy2 = MatVecMult(dd['J_regressor'], result[:,1])
J_tmpz2 = MatVecMult(dd['J_regressor'], result[:,2])
J_new = ch.vstack((J_tmpx2, J_tmpy2, J_tmpz2)).T
for k, m in dd.items():
setattr(result, k, m)
setattr(J_new, k, m)
rest_joints = xp.vstack((J.T, np.ones((1, J.shape[0]))))
T2=A.dot(np.eye(24).T)
JN =(T2[:,0,:] * rest_joints[0, :].reshape((1, -1)) +
T2[:,1,:] * rest_joints[1, :].reshape((1, -1)) +
T2[:,2,:] * rest_joints[2, :].reshape((1, -1)) +
T2[:,3,:] * rest_joints[3, :].reshape((1, -1))).T
J_reposed=JN[:,:3]
setattr(J_reposed, 'pose', result.pose)
setattr(J_reposed, 'betas',result.betas)
directory = 'Opt2'
flag=1;
os.chdir("../Results/")
while flag:
flag=0;
lista = glob.glob("*.obj");
for iters in range(0,len(lista)):
if os.path.exists('./'+directory+'/optimized2_'+lista[iters][5:-4]+'.obj'):
print(lista[iters][5:-4] + ' Already Done')
continue
else:
flag = 1;
if not(os.path.exists('./Res1/result_' + lista[iters][5:-4] +'.mat')):
print(lista[iters][5:-4] + ': CRITICAL ERROR, MISSIN RES2')
continue
a=sio.loadmat('./Res1/result_' + lista[iters][5:-4] +'.mat');
if not('C' in a):
print(lista[iters][5:-4] + ' Not elaborated yet')
continue
W_Joints=Ch(10);
W_FMP2P=Ch(0.1);
W_Landmarks=Ch(1); #6.5; #5
W_Norm_B=Ch(0.5); #0.1
W_Norm_T=Ch(1);
W_NN=Ch(1);
W_Head = Ch(1);
W_Hands= Ch(0);
result.betas[:]=np.zeros(10);
result.pose[:]= np.zeros(72);
print lista[iters]
Target = loadObj(lista[iters])
scale=Ch(1);
trans=ch.array([0,0,0]);
Tar_shift = Target.vertices+trans;
indexes=a['pF_lb2'].reshape(6890);
distances=Tar_shift[indexes-1]-result*scale;
(t)=ch.minimize(distances, x0 = [trans,result.pose[[0,1,2]]],
method = 'dogleg', callback = None,
options = {'maxiter': 50, 'e_3': .0001, 'disp': 1})
c_pre={};
if (W_Joints):
k=a['Joints_Target'];
j_to_consider = range(0,24)
J_distances = J_reposed[j_to_consider,:] - (k[j_to_consider,:]+trans);
c_pre['Joints']= J_distances*W_Joints;
if(W_FMP2P):
c_pre['FMP2P']= distances*W_FMP2P;
if(W_Norm_B):
c_pre['Norm_B']= ((result.betas)**2)*W_Norm_B;
if(W_Norm_T):
pose_res=result.pose.reshape(-1,3);
angles=ch.sum(ch.abs(pose_res)**2,axis=-1)**(1./2)
pesi = np.ones(24)*8/18
pesi[[0]] = np.ones(1)*[2]
pesi[[10, 11, 22, 23, 15]] = np.ones(5)*[2./18]
pesi[[6,3, 7,8]] = np.ones(4)*[5./18]
costo_T= (angles/(ch.pi*pesi))**12
c_pre['Norm_T']=costo_T*W_Norm_T;
if(W_Landmarks):
Tar_Land = Tar_shift[a['landmarks1'].reshape(5)-1];
SMPL_Land=result[a['landmarks2'].reshape(5)-1];
c_pre['Landmarks']= (SMPL_Land-Tar_Land)*W_Landmarks;
if(W_Head):
Tar_idx = Tar_shift[a['dato_idx'].reshape(len(a['dato_idx']))-1];
SMPL_idx =result[a['smpl_idx'].reshape(len(a['smpl_idx']))-1];
SMPL_W=a['w_head_s'].reshape(len(a['w_head_s']))[a['smpl_idx'].reshape(len(a['smpl_idx']))-1]
Tar_W = a['w_head_t'].reshape(len(a['w_head_t']))[a['dato_idx'].reshape(len(a['dato_idx']))-1]
weights_head=((SMPL_W+Tar_W)/2).reshape(len(Tar_W),1)
c_pre['Head']= (Tar_idx-SMPL_idx)*W_Head;
if(W_Hands):
Tar_idx_l = Tar_shift[a['dato_idx_l'].reshape(len(a['dato_idx_l']))-1];
Tar_idx_r = Tar_shift[a['dato_idx_r'].reshape(len(a['dato_idx_r']))-1];
SMPL_idx_l = result[a['smpl_idx_l'].reshape(len(a['smpl_idx_l']))-1];
SMPL_idx_r = result[a['smpl_idx_r'].reshape(len(a['smpl_idx_r']))-1];
c_pre['Hands_l']= (Tar_idx_l-SMPL_idx_l)*W_Hands;
c_pre['Hands_r']= (Tar_idx_r-SMPL_idx_r)*W_Hands;
(r,b,t)=ch.minimize(c_pre, x0 = [result.pose, result.betas,trans],
method = 'dogleg', callback = None,
options = {'maxiter': 90, 'e_3': .0001, 'disp': 1})
W_Hands[:]= 2.5;
W_Head[:]= 3;
W_Norm_B[:]=0.25;
(r,b,t)=ch.minimize(c_pre, x0 = [result.pose, result.betas,trans],
method = 'dogleg', callback = None,
options = {'maxiter': 50, 'e_3': .0001, 'disp': 1})
c=1;
if(W_NN):
SMPL = TriangleMesh();
for i in range(0,20):
SMPL.vertices = Vector3dVector(result.r)
SMPL.triangles = Vector3iVector(np.asarray(np.array(result.f).reshape(-1,3)))
SMPL.compute_vertex_normals()
Tar_shift = Target.vertices+trans
pt = result.r
distance,index = spatial.KDTree(Tar_shift).query(pt)
new_n=np.asarray(Target.vertex_normals)[index]
nn=np.hstack((np.asarray(SMPL.vertex_normals),new_n))
nnn=np.array([np.dot(x[0:3].T,x[3:6]) for x in nn])
angle = np.arccos(nnn)
mask = angle < np.pi*3/2
indexes=np.array(range(0,6890))
indexes=indexes[mask]
new_v=(np.asarray(Target.vertices)[index]+trans)
cost1=ch.array(new_v[indexes,:])-result[indexes,:]
distance,index = spatial.KDTree(pt).query(Tar_shift)
cost2 = result[index,:]-Tar_shift
en={'e1':cost1, 'e2':cost2}#}
(r,b,t)=ch.minimize(en, x0 = [result.pose, result.betas,trans],
method = 'dogleg', callback = None,
options = {'maxiter': 4, 'e_3': .0001, 'disp': 1})
c=c+1;
if not os.path.exists(directory):
os.makedirs(directory)
write_mesh_as_obj('./'+directory+'/optimized2_'+lista[iters][5:-4]+'.obj', result.r-trans, result.f)
dic={};
dic['betas']=result.betas.r
dic['pose']=result.pose.r
dic['trans']=trans.r
f = open( './'+directory+'/Visualize/datas_'+lista[iters][5:-4]+'.txt', 'w' )
f.write( 'dict = ' + repr(dic) + '\n' )
f.close()
result.pose[0:3]=result.pose[0:3].r*-1
verx=np.asarray(Target.vertices+trans)
r1=np.ones(verx.size/3)
r2=np.zeros([verx.size/3,23])
w8=np.vstack((r1,r2.T));
rest_s = xp.vstack((verx.T, np.ones((1, verx.shape[0]))))
T3 = A.dot(w8)
rot={}
rot['rotation']=T3.r;
rot['trans']=trans.r;
scipy.io.savemat('./'+directory+'/Visualize'+'rot_' + lista[iters][5:-4], rot);
dato_r =(T3[:,0,:] * rest_s[0, :].reshape((1, -1)) +
T3[:,1,:] * rest_s[1, :].reshape((1, -1)) +
T3[:,2,:] * rest_s[2, :].reshape((1, -1)) +
T3[:,3,:] * rest_s[3, :].reshape((1, -1))).T
dato_r=dato_r[:,:3];
write_mesh_as_obj('./'+directory+'/Visualize/datorot_'+lista[iters][5:-4]+'.obj', dato_r,np.asarray(Target.triangles))
result.pose[0:3]=result.pose[0:3].r*0
verx=np.asarray(result.r)
r1=np.ones(verx.size/3)
r2=np.zeros([verx.size/3,23])
w8=np.vstack((r1,r2.T));
rest_s = xp.vstack((verx.T, np.ones((1, verx.shape[0]))))
T3 = A.dot(w8)
dato_r =(T3[:,0,:] * rest_s[0, :].reshape((1, -1)) +
T3[:,1,:] * rest_s[1, :].reshape((1, -1)) +
T3[:,2,:] * rest_s[2, :].reshape((1, -1)) +
T3[:,3,:] * rest_s[3, :].reshape((1, -1))).T
dato_r=dato_r[:,:3];
write_mesh_as_obj('./'+directory+'/Visualize/SMPLrot_'+lista[iters][5:-4]+'.obj', result.r,result.f)
| 34.781003 | 153 | 0.531937 | 1,971 | 13,182 | 3.420091 | 0.16692 | 0.024922 | 0.021362 | 0.019582 | 0.284676 | 0.214212 | 0.173713 | 0.151906 | 0.151906 | 0.119567 | 0 | 0.045713 | 0.2665 | 13,182 | 378 | 154 | 34.873016 | 0.651463 | 0.008345 | 0 | 0.128378 | 0 | 0 | 0.081917 | 0.003446 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.047297 | null | null | 0.02027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5c448a48693d658ba1ada5b6251627f69c54ad8f | 1,250 | py | Python | pmedapp/common/utilities.py | ibadkureshi/tnk-locationallocation | b06abcb7bf8675b13e4c2e4fe419afb5ee11018f | [
"MIT"
] | 1 | 2021-02-07T10:37:52.000Z | 2021-02-07T10:37:52.000Z | pmedapp/common/utilities.py | panosprotopapas/tnk-locationallocation | b06abcb7bf8675b13e4c2e4fe419afb5ee11018f | [
"MIT"
] | null | null | null | pmedapp/common/utilities.py | panosprotopapas/tnk-locationallocation | b06abcb7bf8675b13e4c2e4fe419afb5ee11018f | [
"MIT"
] | 2 | 2020-10-23T13:14:53.000Z | 2020-11-13T12:01:44.000Z | from pandas.api.types import is_numeric_dtype
from celery.result import AsyncResult
import json
#import redis
from django.http import HttpResponseBadRequest, HttpResponse
import mimetypes
def column_numeric(column):
"""
Ensure that the dataframe has only numeric values
"""
# numeric only data
if not is_numeric_dtype(column):
return False
else:
return True
def validate_upload(request, extension='.csv'):
"""
Ensure that the file uploaded is of a defined type
"""
# check that a file is provided
if len(request.FILES) == 0 or not request.FILES['myfile'].name.endswith(extension):
return False
else:
return True
def download_output_file(request):
"""
Return the requested output file for the user to download locally
"""
try:
filename = request.GET['filename']
filepath = 'output/' + filename
fl = open(filepath, 'r')
mime_type, _ = mimetypes.guess_type(filepath)
response = HttpResponse(fl, content_type=mime_type)
response['Content-Disposition'] = "attachment; filename=%s" % filename
return response
except KeyError:
return HttpResponseBadRequest("Please provide a filename")
| 27.173913 | 87 | 0.6768 | 149 | 1,250 | 5.590604 | 0.530201 | 0.021609 | 0.033613 | 0.05042 | 0.067227 | 0.067227 | 0 | 0 | 0 | 0 | 0 | 0.001052 | 0.2392 | 1,250 | 45 | 88 | 27.777778 | 0.874869 | 0.1816 | 0 | 0.230769 | 0 | 0 | 0.095385 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.115385 | false | 0 | 0.192308 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
5c48ff59411a148dbbbee7b176f4ddde04d0287f | 1,435 | py | Python | json_conf.py | luhouxiang/UnitTestServer | 296485fd9d80a215c52a41d9b07a9ef12dfaef29 | [
"Apache-2.0"
] | null | null | null | json_conf.py | luhouxiang/UnitTestServer | 296485fd9d80a215c52a41d9b07a9ef12dfaef29 | [
"Apache-2.0"
] | null | null | null | json_conf.py | luhouxiang/UnitTestServer | 296485fd9d80a215c52a41d9b07a9ef12dfaef29 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
json配置文件类,调用方法
data_dict = {"a":"1", "b":"2"}
JsonConf.set(data_dict)
即可在当前目录下生成json文件:config.json
'''
import json
import os
class JsonConf:
"""
json配置文件类
"""
json_data = {}
@staticmethod
def store2(file_name, datas):
with open(file_name, 'w', encoding="utf-8") as json_file:
tmp = json.dumps(datas, ensure_ascii=False, indent=4)
json_file.write(tmp)
@staticmethod
def store(file_name):
JsonConf.store2(file_name, JsonConf.json_data)
@staticmethod
def load(file_name):
if not os.path.exists(file_name):
with open(file_name, 'w', encoding="utf-8") as json_file:
pass
with open(file_name, encoding="utf-8") as json_file:
try:
JsonConf.json_data = json.load(json_file)
except:
JsonConf.json_data = {}
return JsonConf.json_data
@staticmethod
def set(file_name, data_dict):
json_obj = JsonConf.load(file_name)
for key in data_dict:
json_obj[key] = data_dict[key]
JsonConf.store2(file_name, json_obj)
print(json.dumps(json_obj, ensure_ascii=False, indent=4))
if __name__ == "__main__":
# data = {"a": " 1", "f": "100", "b": "3000"}
# JsonConf.set("mytest.json", data)
pass
| 26.574074 | 70 | 0.568641 | 179 | 1,435 | 4.335196 | 0.346369 | 0.113402 | 0.082474 | 0.088918 | 0.268041 | 0.128866 | 0.100515 | 0.100515 | 0.100515 | 0.100515 | 0 | 0.01992 | 0.300348 | 1,435 | 53 | 71 | 27.075472 | 0.752988 | 0.160279 | 0 | 0.25 | 0 | 0 | 0.022202 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.0625 | 0.0625 | 0 | 0.28125 | 0.03125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.