hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4d2d2acea9bb79c046b8abea693dc31ff18efd72 | 143 | py | Python | submissions/abc035/a.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | 1 | 2021-05-10T01:16:28.000Z | 2021-05-10T01:16:28.000Z | submissions/abc035/a.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | 3 | 2021-05-11T06:14:15.000Z | 2021-06-19T08:18:36.000Z | submissions/abc035/a.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | null | null | null | import sys
input = sys.stdin.readline
w, h = map(int, input().split())
if w / h == 4 / 3:
ans = '4:3'
else:
ans = '16:9'
print(ans)
| 11.916667 | 32 | 0.538462 | 26 | 143 | 2.961538 | 0.692308 | 0.051948 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066038 | 0.258741 | 143 | 11 | 33 | 13 | 0.660377 | 0 | 0 | 0 | 0 | 0 | 0.048951 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.125 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d2da9056c6d973976290183ad18c7e824e87fbe | 1,029 | py | Python | setup.py | JosiahBradley/mod2win | f3636faea8cce041be2d9933574aa1ccd4b818ac | [
"Apache-2.0"
] | null | null | null | setup.py | JosiahBradley/mod2win | f3636faea8cce041be2d9933574aa1ccd4b818ac | [
"Apache-2.0"
] | null | null | null | setup.py | JosiahBradley/mod2win | f3636faea8cce041be2d9933574aa1ccd4b818ac | [
"Apache-2.0"
] | null | null | null | import setuptools
long_description = ""
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
author='Josiah Bradley',
author_email='JosiahBradley@gmail.com',
name="mod2win",
url="https://github.com/JosiahBradley/mod2win",
version="0.0.1",
entry_points={
'console_scripts': [
'play = mod2win.levels.level_launcher:launch',
'compile = mod2win.levels.level_launcher:_compile',
'scrub = mod2win.levels.level_launcher:scrub',
'restore = mod2win.levels.level_launcher:restore',
'spiral = mod2win.levels.spiral_test:main',
]
},
package_dir={'': 'src'},
packages=setuptools.find_packages('src'),
include_package_data=True,
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache License",
"Operating System :: OS Independent",
],
)
| 31.181818 | 63 | 0.640428 | 107 | 1,029 | 5.971963 | 0.635514 | 0.117371 | 0.112676 | 0.162754 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01375 | 0.222546 | 1,029 | 32 | 64 | 32.15625 | 0.785 | 0 | 0 | 0 | 0 | 0 | 0.450923 | 0.194363 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.033333 | 0 | 0.033333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d391ca815462113e85dde20f4caa4e28b604358 | 300 | py | Python | HACKERRANK_Numpy/concatenated.py | StefaniaSferragatta/ADM2020-HW1 | 8f85ac1c8dd4bff52c5c17987c9e96b209a93830 | [
"MIT"
] | null | null | null | HACKERRANK_Numpy/concatenated.py | StefaniaSferragatta/ADM2020-HW1 | 8f85ac1c8dd4bff52c5c17987c9e96b209a93830 | [
"MIT"
] | null | null | null | HACKERRANK_Numpy/concatenated.py | StefaniaSferragatta/ADM2020-HW1 | 8f85ac1c8dd4bff52c5c17987c9e96b209a93830 | [
"MIT"
] | null | null | null | import numpy
N,M,P = map(int,input().split())
p_cols1 =numpy.array([input().split() for _ in range(N)],int)
p_cols1.shape = (N,P)
p_cols2 =numpy.array([input().split() for _ in range(M)],int)
p_cols2.shape = (M,P)
concatenated = numpy.concatenate((p_cols1, p_cols2), axis = 0)
print(concatenated)
| 25 | 62 | 0.686667 | 52 | 300 | 3.807692 | 0.403846 | 0.151515 | 0.151515 | 0.20202 | 0.30303 | 0.30303 | 0.30303 | 0 | 0 | 0 | 0 | 0.026316 | 0.113333 | 300 | 11 | 63 | 27.272727 | 0.718045 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d3ee1ccb8692f8cfb3b7d31686fa015b7d46982 | 5,470 | py | Python | bin/lineage_parser.py | brianlee99/UVP | 5b7ff26c09d84760d4220268f34fb4814848eb4a | [
"MIT"
] | null | null | null | bin/lineage_parser.py | brianlee99/UVP | 5b7ff26c09d84760d4220268f34fb4814848eb4a | [
"MIT"
] | null | null | null | bin/lineage_parser.py | brianlee99/UVP | 5b7ff26c09d84760d4220268f34fb4814848eb4a | [
"MIT"
] | null | null | null | #! /usr/bin/python
import sys
""" This script accepts the final annotation file and the lineage marker SNPs file """
""" and infers the lineage and possible sublineage classification of the isolate """
""" it requires a sample ID name (string) and an output file name(string) """
"""
Author: Matthew Ezewudo
CPTR ReSeqTB Project - Critical Path Institute
"""
input1 = sys.argv[1]
input2 = sys.argv[2]
input3 = sys.argv[3]
input4 = sys.argv[4]
fh1 = open(input1, 'r')
sublinn = ""
(lineage,position,ref,alt) = ([],[],[],[])
prevlin = []
prevsub = []
tribes = ["lineages","Indo-Oceanic","East-Asian","East-African-Indian","Euro-American","West-Africa 1","West-Africa 2","Ethiopian"]
(concord,discord,concord1,discord1,count) = (0,0,0,0,0)
discordance = False
sublinneage = False
linfour = ""
hrv37 = ""
BOV = ""
BOV_AFRI = ""
for lines in fh1:
if lines.startswith('#'):
continue
fields = lines.rstrip("\r\n").split("\t")
lineage.append(fields[0])
position.append(fields[1])
ref.append(fields[2])
alt.append(fields[3])
fh1.close()
fh2 = open(input2,'r')
for lines in fh2:
count += 1
fields = lines.rstrip("\r\n").split("\t")
if fields[2] == '931123':
linfour = fields[2]
if fields[2] == '1759252':
hrv37 = fields[2]
if fields[2] == '2831482':
BOV = fields[2]
if fields[2] == '1882180':
BOV_AFRI = '1882180'
if fields[2] in position:
ind = position.index(fields[2])
if alt[ind] == fields[4]:
if len(lineage[ind]) > 1:
sublin = lineage[ind]
prevsub.append(sublin)
sublinn = prevsub[0]
print "SNP" + " " + position[ind] + " " + "suggests sub-lineage: " + lineage[ind]
if prevsub[0] != sublin:
discord += 1
else:
concord +=1
for i in range(0,len(prevsub)):
if len(sublinn) < len(prevsub[i]) :
sublinn = prevsub[i]
else:
lin = lineage[ind]
prevlin.append(lin)
print "SNP" + " " + position[ind] + " " + "suggests lineage: " + lineage[ind]
if prevlin[0] != lin:
discord1 += 1
else:
concord1 += 1
fh2.close()
fh3 = open(input3,'w')
print >> fh3, "Sample ID" + "\t" + "Lineage" + "\t" + "Lineage Name" + "\t" + "Sublineage"
split_first = ['NA']
if len(prevsub) > 0:
split_first = sublinn.split(".")
sublinneage = True
if len(prevlin) == 0:
if len(BOV) > 0:
print "Lineage: " + "BOV"
print >> fh3, input4 + "\t" + "BOV" + "\t" + "Bovis" + "\t" + "NA"
if len(BOV) == 0 or len(BOV_AFRI) == 0:
for i in range(0,len(prevsub)):
split_lin = prevsub[i].split(".")
if split_lin[0] != split_first[0]:
discordance = True
if split_lin[1] != split_first[1]:
discordance = True
if discordance:
print "no precise lineage inferred"
print >> fh3, "no precise lineage inferred"
sys.exit(1)
else:
if len(split_first) > 1:
print "Lineage: " + split_first[0] + " : " + tribes[int(split_first[0])]
print "Sub-lineage: " + sublinn
print >> fh3, input4 + "\t" + split_first[0] + "\t" + tribes[int(split_first[0])] + "\t" + sublinn
elif len(linfour) < 2:
print "Absence of SNP 931123 suggests lineage 4"
print "Lineage: " + "4" + " : " + "Euro-American"
if len(hrv37) > 2:
print >> fh3, input4 + "\t" + "4" + "\t" + "Euro American" + "\t" + "NA"
elif len(hrv37) < 2:
print "Absence of SNP 1759252 suggests sublineage 4.9"
print >> fh3, input4 + "\t" + "4" + "\t" + "Euro American" + "\t" + "4.9"
else:
print "No Informative SNPs detected"
print >> fh3, "No Informative SNPs detected"
else:
if len(prevlin) > 1:
for j in range(0,len(prevlin)):
if prevlin[0] != prevlin[j]:
discordance = True
if discordance == True:
print "no concordance between predicted lineage and sublineage(s)"
print >> fh3, "no concordance between predicted lineage and sublineage(s)"
sys.exit(1)
else:
if len(sublinn) < 1:
print "Lineage: " + prevlin[0] + " " + tribes[int(prevlin[0])]
print >> fh3, input4 + "\t" + prevlin[0] + "\t" + tribes[int(prevlin[0])] + "\t" + "NA"
elif len(sublinn) > 1:
for i in range(0,len(prevsub)):
split_lin = prevsub[i].split(".")
if split_lin[0] != prevlin[0] and split_lin[0] != 'BOV_AFRI':
discordance = True
if split_lin[0] != split_first[0]:
discordance = True
if discordance:
print "no precise lineage inferred"
print >> fh3, "no precise lineage inferred"
sys.exit(1)
else:
print "Lineage: " + prevlin[0] + " " + tribes[int(prevlin[0])]
if sublinn.startswith('BOV_A'):
print >> fh3, input4 + "\t" + prevlin[0] + "\t" + tribes[int(prevlin[0])] + "\t" + "NA"
else:
print "Sub-lineage: " + sublinn
print >> fh3, input4 + "\t" + prevlin[0] + "\t" + tribes[int(prevlin[0])] + "\t" + sublinn
| 36.466667 | 131 | 0.518464 | 657 | 5,470 | 4.283105 | 0.207002 | 0.039801 | 0.034826 | 0.037313 | 0.383795 | 0.312367 | 0.304549 | 0.28678 | 0.195451 | 0.174129 | 0 | 0.047786 | 0.322852 | 5,470 | 149 | 132 | 36.711409 | 0.711933 | 0.003108 | 0 | 0.290076 | 0 | 0 | 0.15847 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.007634 | null | null | 0.206107 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d40d6894572ebb56bff51cbd51d17f087ba2234 | 2,454 | py | Python | ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/resourcemanager.py | panfeiyy/ambari | 24077510723ede93d3024784f0b04422adaf56d6 | [
"Apache-2.0"
] | 16 | 2018-05-24T10:28:24.000Z | 2021-08-05T03:13:26.000Z | ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/resourcemanager.py | panfeiyy/ambari | 24077510723ede93d3024784f0b04422adaf56d6 | [
"Apache-2.0"
] | 3 | 2021-05-09T12:37:16.000Z | 2022-03-02T10:13:24.000Z | ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/resourcemanager.py | panfeiyy/ambari | 24077510723ede93d3024784f0b04422adaf56d6 | [
"Apache-2.0"
] | 17 | 2018-07-06T08:57:00.000Z | 2021-11-04T11:00:36.000Z | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import sys
from resource_management import *
from yarn import yarn
from service import service
class Resourcemanager(Script):
def install(self, env):
self.install_packages(env)
self.configure(env)
def configure(self, env):
import params
env.set_params(params)
yarn(name='resourcemanager')
def start(self, env):
import params
env.set_params(params)
self.configure(env) # FOR SECURITY
service('resourcemanager',
action='start'
)
def stop(self, env):
import params
env.set_params(params)
service('resourcemanager',
action='stop'
)
def status(self, env):
import status_params
env.set_params(status_params)
check_process_status(status_params.resourcemanager_pid_file)
pass
def refreshqueues(self, env):
import params
self.configure(env)
env.set_params(params)
service('resourcemanager',
action='refreshQueues'
)
def decommission(self, env):
import params
env.set_params(params)
rm_kinit_cmd = params.rm_kinit_cmd
yarn_user = params.yarn_user
conf_dir = params.hadoop_conf_dir
user_group = params.user_group
yarn_refresh_cmd = format("{rm_kinit_cmd} yarn --config {conf_dir} rmadmin -refreshNodes")
File(params.exclude_file_path,
content=Template("exclude_hosts_list.j2"),
owner=yarn_user,
group=user_group
)
if params.update_exclude_file_only == False:
Execute(yarn_refresh_cmd,
environment= {'PATH' : params.execute_path },
user=yarn_user)
pass
pass
if __name__ == "__main__":
Resourcemanager().execute()
| 24.058824 | 94 | 0.711899 | 323 | 2,454 | 5.247678 | 0.408669 | 0.028909 | 0.046018 | 0.056047 | 0.130973 | 0.130973 | 0.130973 | 0.087316 | 0 | 0 | 0 | 0.002583 | 0.211084 | 2,454 | 101 | 95 | 24.29703 | 0.872934 | 0.319071 | 0 | 0.327586 | 0 | 0 | 0.105897 | 0.012635 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12069 | false | 0.051724 | 0.172414 | 0 | 0.310345 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
4d4d0ea614818e4dfdde9e585c36b4fdaeb09ea4 | 4,720 | py | Python | catalogue_flask/model.py | ScottWales/catalogue-flask | 4a9e659875fee6e831e6c31018c9f9d7285dc845 | [
"Apache-2.0"
] | null | null | null | catalogue_flask/model.py | ScottWales/catalogue-flask | 4a9e659875fee6e831e6c31018c9f9d7285dc845 | [
"Apache-2.0"
] | null | null | null | catalogue_flask/model.py | ScottWales/catalogue-flask | 4a9e659875fee6e831e6c31018c9f9d7285dc845 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2017 ARC Centre of Excellence for Climate Systems Science
# author: Scott Wales <scott.wales@unimelb.edu.au>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from flask_sqlalchemy import SQLAlchemy
import os
from datetime import datetime
db = SQLAlchemy()
class Path(db.Model):
"""
A path in the filesystem
"""
id = db.Column(db.Integer, primary_key=True)
path = db.Column(db.Text, unique=True, index=True)
basename = db.Column(db.Text, index=True)
extension = db.Column(db.Text, index=True)
uid = db.Column(db.Integer)
gid = db.Column(db.Integer, index=True)
size_bytes = db.Column(db.Integer)
modified = db.Column(db.Integer)
last_seen = db.Column(db.DateTime, index=True)
content_id = db.Column(db.Integer, db.ForeignKey('content.id'))
content = db.relationship("Content")
def add_from_filename(filename, session):
"""
Given a filename, add it to the database
"""
if not os.path.isfile(filename):
raise IOError("Not a file: %s"%filename)
abspath = os.path.abspath(filename)
path = Path.query.filter_by(path = abspath).one_or_none()
stat = os.stat(filename)
if path is not None:
path.last_seen = datetime.now()
if path.modified < stat.st_mtime:
path.update(stat)
session.add(path)
return path
path = Path()
path.path = abspath
path.update(stat)
path.last_seen = datetime.now()
session.add(path)
return path
def update(self, stat):
"""
Updates the file with new info
"""
self.basename = os.path.basename(self.path)
self.extension = os.path.splitext(self.path)[1]
self.uid = stat.st_uid
self.gid = stat.st_gid
self.size_bytes = stat.st_size
self.modified = stat.st_mtime
# Wipe the content link
self.content = None
class Content(db.Model):
"""
The contents of a file, identified via checksum
May be at multiple paths on the filesystem
sha256 is used for identification, md5 also provided for legacy
:var sha256: sha256 checksum
:var md5: md5 checksum
"""
id = db.Column(db.Integer, primary_key=True)
sha256 = db.Column(db.String, unique=True, index=True, nullable=False)
md5 = db.Column(db.String, index=True, nullable=False)
type = db.Column(db.String)
last_scanned = db.Column(db.DateTime)
paths = db.relationship("Path")
__mapper_args__ = {
'polymorphic_identity':'content',
'polymorphic_on':type
}
netcdf_variable_association = db.Table('netcdf_variable_association', db.Model.metadata,
db.Column('netcdf_id', db.Integer, db.ForeignKey('netcdf_content.id')),
db.Column('concretevar_id', db.Integer, db.ForeignKey('concrete_variable.id'))
)
class NetcdfContent(Content):
"""
Content of a NetCDF file
:var sha256: sha256 checksum
:var md5: md5 checksum
:var variables: list of :class:`~catalogue_flask.model.ConcreteVariable`
"""
id = db.Column(db.Integer, db.ForeignKey('content.id'), primary_key=True)
variables = db.relationship("ConcreteVariable",
secondary=netcdf_variable_association)
__mapper_args__ = {
'polymorphic_identity':'netcdfcontent',
}
class ConcreteVariable(db.Model):
"""
An abstract variable, may have many aliased names
:var cf_name: NetCDF-CF name
:var aliases: List of :class:`~catalogue_flask.model.Variable`
"""
id = db.Column(db.Integer, primary_key=True)
cf_name = db.Column(db.String)
aliases = db.relationship("Variable")
class Variable(db.Model):
"""
An alternate name for a variable
:var name: The name of this alias
:var concrete: The concrete variable this aliases
"""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
concretevariable_id = db.Column(db.Integer, db.ForeignKey('concrete_variable.id'), index=True)
concrete = db.relationship("ConcreteVariable")
| 31.052632 | 98 | 0.661441 | 623 | 4,720 | 4.919743 | 0.304976 | 0.060033 | 0.068516 | 0.061011 | 0.220881 | 0.169005 | 0.134421 | 0.115824 | 0.026101 | 0 | 0 | 0.009101 | 0.23178 | 4,720 | 151 | 99 | 31.258278 | 0.836183 | 0.297034 | 0 | 0.189189 | 0 | 0 | 0.084821 | 0.00861 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027027 | false | 0 | 0.054054 | 0 | 0.554054 | 0.013514 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
4d52bec8cefe73d9a93266481e15f6cb3b5e3a2d | 416 | py | Python | preprocess/rename.py | pprp/faster-rcnn.Supernova | 583bc9f6efd80d5a7fa88189a1c817d92d6018a6 | [
"MIT"
] | 15 | 2019-04-19T12:40:09.000Z | 2020-06-03T07:56:37.000Z | preprocess/rename.py | Zxl19990529/faster-rcnn.Supernova | 583bc9f6efd80d5a7fa88189a1c817d92d6018a6 | [
"MIT"
] | 2 | 2019-04-19T13:21:44.000Z | 2020-06-03T07:49:31.000Z | preprocess/rename.py | Zxl19990529/faster-rcnn.Supernova | 583bc9f6efd80d5a7fa88189a1c817d92d6018a6 | [
"MIT"
] | 5 | 2019-04-19T13:06:22.000Z | 2021-01-19T03:31:58.000Z | import os
import shutil
path = './ALL/'
outpath = "./rename/"
outb = "./b/"
outc = "./c/"
for f in os.listdir(path):
print(f)
name,ext = os.path.splitext(f)
a,ext2 = name.split('_')
if ext2.endswith('b'):
print(outb+f)
shutil.copy(path+f,outb+f)
elif ext2.endswith('c'):
print(outc+f)
shutil.copy(path+f,outc+f)
print(a)
#shutil.copy(path+f,outpath+a+ext) | 21.894737 | 38 | 0.567308 | 65 | 416 | 3.615385 | 0.4 | 0.12766 | 0.178723 | 0.191489 | 0.13617 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009404 | 0.233173 | 416 | 19 | 38 | 21.894737 | 0.727273 | 0.079327 | 0 | 0 | 0 | 0 | 0.067885 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.117647 | 0 | 0.117647 | 0.235294 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d54e80804f63b1496a93c52549fb653034591c2 | 3,720 | py | Python | ui/main_window.py | Iorveth/min_surfaces_modelling | 279a091e12f98c0a7fc8054c6eadaafaec5a1258 | [
"MIT"
] | null | null | null | ui/main_window.py | Iorveth/min_surfaces_modelling | 279a091e12f98c0a7fc8054c6eadaafaec5a1258 | [
"MIT"
] | null | null | null | ui/main_window.py | Iorveth/min_surfaces_modelling | 279a091e12f98c0a7fc8054c6eadaafaec5a1258 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'untitled.ui'
#
# Created by: PyQt5 UI code generator 5.12.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(513, 403)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setEnabled(False)
self.pushButton.setGeometry(QtCore.QRect(80, 70, 91, 31))
self.pushButton.setObjectName("pushButton")
self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_2.setEnabled(False)
self.pushButton_2.setGeometry(QtCore.QRect(10, 70, 71, 31))
self.pushButton_2.setObjectName("pushButton_2")
self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_3.setEnabled(False)
self.pushButton_3.setGeometry(QtCore.QRect(194, 20, 311, 31))
self.pushButton_3.setObjectName("pushButton_3")
self.pushButton_4 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_4.setEnabled(False)
self.pushButton_4.setGeometry(QtCore.QRect(194, 72, 311, 31))
self.pushButton_4.setObjectName("pushButton_4")
self.pushButton2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton2.setGeometry(QtCore.QRect(10, 20, 161, 31))
self.pushButton2.setObjectName("pushButton2")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(200, 0, 311, 16))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(200, 50, 281, 16))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(30, 110, 461, 251))
self.label_3.setText("")
self.label_3.setPixmap(QtGui.QPixmap("C:/Users/arsen/Desktop/riemann.jpg"))
self.label_3.setObjectName("label_3")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 513, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Головне Меню"))
self.pushButton.setText(_translate("MainWindow", "Відобразити все"))
self.pushButton_2.setText(_translate("MainWindow", "Відобразити"))
self.pushButton_3.setText(_translate("MainWindow", "Створити"))
self.pushButton_4.setText(_translate("MainWindow", "Створити"))
self.pushButton2.setText(_translate("MainWindow", "Задати аналітичну функцію"))
self.label.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" color:#aa0000;\">Мінімальна поверхня з квазіконформною заміною параметра</span></p></body></html>"))
self.label_2.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" color:#005500;\">Мінімальна поверхня з конформною заміною параметра</span></p></body></html>"))
| 50.958904 | 190 | 0.706452 | 411 | 3,720 | 6.29927 | 0.296837 | 0.10815 | 0.073001 | 0.071456 | 0.23484 | 0.189649 | 0.040942 | 0.040942 | 0.040942 | 0.040942 | 0 | 0.044853 | 0.166935 | 3,720 | 72 | 191 | 51.666667 | 0.790578 | 0.048925 | 0 | 0 | 1 | 0 | 0.151601 | 0.044205 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035088 | false | 0 | 0.017544 | 0 | 0.070175 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d77116fa77002bdedc5a81cb80ae1a9e3ac2069 | 19,585 | py | Python | lib/python3.8/site-packages/ansible_collections/community/azure/plugins/modules/azure_rm_cosmosdbaccount_info.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/community/azure/plugins/modules/azure_rm_cosmosdbaccount_info.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/community/azure/plugins/modules/azure_rm_cosmosdbaccount_info.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | #!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski, (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_cosmosdbaccount_info
short_description: Get Azure Cosmos DB Account facts
description:
- Get facts of Azure Cosmos DB Account.
options:
resource_group:
description:
- Name of an Azure resource group.
name:
description:
- Cosmos DB database account name.
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
retrieve_keys:
description:
- Retrieve keys and connection strings.
type: str
choices:
- all
- readonly
retrieve_connection_strings:
description:
- Retrieve connection strings.
type: bool
extends_documentation_fragment:
- azure.azcollection.azure
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Get instance of Database Account
community.azure.azure_rm_cosmosdbaccount_info:
resource_group: myResourceGroup
name: testaccount
- name: List instances of Database Account
azure_rm_cosmosdbaccousnt_info:
resource_group: myResourceGroup
'''
RETURN = '''
accounts:
description: A list of dictionaries containing facts for Database Account.
returned: always
type: complex
contains:
id:
description:
- The unique resource identifier of the database account.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DocumentDB/databaseAccount
s/testaccount"
resource_group:
description:
- Name of an Azure resource group.
returned: always
type: str
sample: myResourceGroup
name:
description:
- The name of the database account.
returned: always
type: str
sample: testaccount
location:
description:
- The location of the resource group to which the resource belongs.
returned: always
type: str
sample: westus
kind:
description:
- Indicates the type of database account.
returned: always
type: str
sample: global_document_db
consistency_policy:
description:
- Consistency policy.
returned: always
type: complex
contains:
default_consistency_level:
description:
- Default consistency level.
returned: always
type: str
sample: session
max_interval_in_seconds:
description:
- Maximum interval in seconds.
returned: always
type: int
sample: 5
max_staleness_prefix:
description:
- Maximum staleness prefix.
returned: always
type: int
sample: 100
failover_policies:
description:
- The list of new failover policies for the failover priority change.
returned: always
type: complex
contains:
name:
description:
- Location name.
returned: always
type: str
sample: eastus
failover_priority:
description:
- Failover priority.
returned: always
type: int
sample: 0
id:
description:
- Read location ID.
returned: always
type: str
sample: testaccount-eastus
read_locations:
description:
- Read locations.
returned: always
type: complex
contains:
name:
description:
- Location name.
returned: always
type: str
sample: eastus
failover_priority:
description:
- Failover priority.
returned: always
type: int
sample: 0
id:
description:
- Read location ID.
returned: always
type: str
sample: testaccount-eastus
document_endpoint:
description:
- Document endpoint.
returned: always
type: str
sample: https://testaccount-eastus.documents.azure.com:443/
provisioning_state:
description:
- Provisioning state.
returned: always
type: str
sample: Succeeded
write_locations:
description:
- Write locations.
returned: always
type: complex
contains:
name:
description:
- Location name.
returned: always
type: str
sample: eastus
failover_priority:
description:
- Failover priority.
returned: always
type: int
sample: 0
id:
description:
- Read location ID.
returned: always
type: str
sample: testaccount-eastus
document_endpoint:
description:
- Document endpoint.
returned: always
type: str
sample: https://testaccount-eastus.documents.azure.com:443/
provisioning_state:
description:
- Provisioning state.
returned: always
type: str
sample: Succeeded
database_account_offer_type:
description:
- Offer type.
returned: always
type: str
sample: Standard
ip_range_filter:
description:
- Enable IP range filter.
returned: always
type: str
sample: 10.10.10.10
is_virtual_network_filter_enabled:
description:
- Enable virtual network filter.
returned: always
type: bool
sample: true
enable_automatic_failover:
description:
- Enable automatic failover.
returned: always
type: bool
sample: true
enable_cassandra:
description:
- Enable Cassandra.
returned: always
type: bool
sample: true
enable_table:
description:
- Enable Table.
returned: always
type: bool
sample: true
enable_gremlin:
description:
- Enable Gremlin.
returned: always
type: bool
sample: true
virtual_network_rules:
description:
- List of Virtual Network ACL rules configured for the Cosmos DB account.
type: list
contains:
subnet:
description:
- Resource id of a subnet.
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNet
works/testvnet/subnets/testsubnet1"
ignore_missing_vnet_service_endpoint:
description:
- Create Cosmos DB account without existing virtual network service endpoint.
type: bool
enable_multiple_write_locations:
description:
- Enable multiple write locations.
returned: always
type: bool
sample: true
document_endpoint:
description:
- Document endpoint.
returned: always
type: str
sample: https://testaccount.documents.azure.com:443/
provisioning_state:
description:
- Provisioning state of Cosmos DB.
returned: always
type: str
sample: Succeeded
primary_master_key:
description:
- Primary master key.
returned: when requested
type: str
sample: UIWoYD4YaD4LxW6k3Jy69qcHDMLX4aSttECQkEcwWF1RflLd6crWSGJs0R9kJwujehtfLGeQx4ISVSJfTpJkYw==
secondary_master_key:
description:
- Primary master key.
returned: when requested
type: str
sample: UIWoYD4YaD4LxW6k3Jy69qcHDMLX4aSttECQkEcwWF1RflLd6crWSGJs0R9kJwujehtfLGeQx4ISVSJfTpJkYw==
primary_readonly_master_key:
description:
- Primary master key.
returned: when requested
type: str
sample: UIWoYD4YaD4LxW6k3Jy69qcHDMLX4aSttECQkEcwWF1RflLd6crWSGJs0R9kJwujehtfLGeQx4ISVSJfTpJkYw==
secondary_readonly_master_key:
description:
- Primary master key.
returned: when requested
type: str
sample: UIWoYD4YaD4LxW6k3Jy69qcHDMLX4aSttECQkEcwWF1RflLd6crWSGJs0R9kJwujehtfLGeQx4ISVSJfTpJkYw==
connection_strings:
description:
- List of connection strings.
type: list
returned: when requested
contains:
connection_string:
description:
- Description of connection string.
type: str
returned: always
sample: Primary SQL Connection String
description:
description:
- Connection string.
type: str
returned: always
sample: "AccountEndpoint=https://testaccount.documents.azure.com:443/;AccountKey=fSEjathnk6ZeBTrXkud9j5kfhtSEQ
q3dpJxJga76h9BZkK2BJJrDzSO6DDn6yKads017OZBZ1YZWyq1cW4iuvA=="
tags:
description:
- Tags assigned to the resource. Dictionary of "string":"string" pairs.
returned: always
type: dict
sample: { "tag1":"abc" }
'''
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.common.dict_transformations import _camel_to_snake
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.cosmosdb import CosmosDB
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMCosmosDBAccountInfo(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str'
),
name=dict(
type='str'
),
tags=dict(
type='list'
),
retrieve_keys=dict(
type='str',
choices=['all', 'readonly']
),
retrieve_connection_strings=dict(
type='bool'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.mgmt_client = None
self.resource_group = None
self.name = None
self.tags = None
self.retrieve_keys = None
self.retrieve_connection_strings = None
super(AzureRMCosmosDBAccountInfo, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_cosmosdbaccount_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_cosmosdbaccount_facts' module has been renamed to 'azure_rm_cosmosdbaccount_info'", version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
self.mgmt_client = self.get_mgmt_svc_client(CosmosDB,
base_url=self._cloud_environment.endpoints.resource_manager)
if self.name is not None:
self.results['accounts'] = self.get()
elif self.resource_group is not None:
self.results['accounts'] = self.list_all()
return self.results
def get(self):
response = None
results = []
try:
response = self.mgmt_client.database_accounts.get(resource_group_name=self.resource_group,
account_name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for Database Account.')
if response and self.has_tags(response.tags, self.tags):
results.append(self.format_response(response))
return results
def list_by_resource_group(self):
response = None
results = []
try:
response = self.mgmt_client.database_accounts.list_by_resource_group(resource_group_name=self.resource_group)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for Database Account.')
if response is not None:
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(self.format_response(item))
return results
def list_all(self):
response = None
results = []
try:
response = self.mgmt_client.database_accounts.list()
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for Database Account.')
if response is not None:
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(self.format_response(item))
return results
def format_response(self, item):
d = item.as_dict()
d = {
'id': d.get('id'),
'resource_group': self.parse_resource_to_dict(d.get('id')).get('resource_group'),
'name': d.get('name', None),
'location': d.get('location', '').replace(' ', '').lower(),
'kind': _camel_to_snake(d.get('kind', None)),
'consistency_policy': {'default_consistency_level': _camel_to_snake(d['consistency_policy']['default_consistency_level']),
'max_interval_in_seconds': d['consistency_policy']['max_interval_in_seconds'],
'max_staleness_prefix': d['consistency_policy']['max_staleness_prefix']},
'failover_policies': [{'name': fp['location_name'].replace(' ', '').lower(),
'failover_priority': fp['failover_priority'],
'id': fp['id']} for fp in d['failover_policies']],
'read_locations': [{'name': rl['location_name'].replace(' ', '').lower(),
'failover_priority': rl['failover_priority'],
'id': rl['id'],
'document_endpoint': rl['document_endpoint'],
'provisioning_state': rl['provisioning_state']} for rl in d['read_locations']],
'write_locations': [{'name': wl['location_name'].replace(' ', '').lower(),
'failover_priority': wl['failover_priority'],
'id': wl['id'],
'document_endpoint': wl['document_endpoint'],
'provisioning_state': wl['provisioning_state']} for wl in d['write_locations']],
'database_account_offer_type': d.get('database_account_offer_type'),
'ip_range_filter': d['ip_range_filter'],
'is_virtual_network_filter_enabled': d.get('is_virtual_network_filter_enabled'),
'enable_automatic_failover': d.get('enable_automatic_failover'),
'enable_cassandra': 'EnableCassandra' in d.get('capabilities', []),
'enable_table': 'EnableTable' in d.get('capabilities', []),
'enable_gremlin': 'EnableGremlin' in d.get('capabilities', []),
'virtual_network_rules': d.get('virtual_network_rules'),
'enable_multiple_write_locations': d.get('enable_multiple_write_locations'),
'document_endpoint': d.get('document_endpoint'),
'provisioning_state': d.get('provisioning_state'),
'tags': d.get('tags', None)
}
if self.retrieve_keys == 'all':
keys = self.mgmt_client.database_accounts.list_keys(resource_group_name=self.resource_group,
account_name=self.name)
d['primary_master_key'] = keys.primary_master_key
d['secondary_master_key'] = keys.secondary_master_key
d['primary_readonly_master_key'] = keys.primary_readonly_master_key
d['secondary_readonly_master_key'] = keys.secondary_readonly_master_key
elif self.retrieve_keys == 'readonly':
keys = self.mgmt_client.database_accounts.get_read_only_keys(resource_group_name=self.resource_group,
account_name=self.name)
d['primary_readonly_master_key'] = keys.primary_readonly_master_key
d['secondary_readonly_master_key'] = keys.secondary_readonly_master_key
if self.retrieve_connection_strings:
connection_strings = self.mgmt_client.database_accounts.list_connection_strings(resource_group_name=self.resource_group,
account_name=self.name)
d['connection_strings'] = connection_strings.as_dict()
return d
def main():
AzureRMCosmosDBAccountInfo()
if __name__ == '__main__':
main()
| 37.591171 | 150 | 0.531223 | 1,677 | 19,585 | 6.000596 | 0.172332 | 0.054258 | 0.066183 | 0.041737 | 0.488324 | 0.434264 | 0.393223 | 0.359038 | 0.344828 | 0.320382 | 0 | 0.00924 | 0.397702 | 19,585 | 520 | 151 | 37.663462 | 0.843845 | 0.01348 | 0 | 0.562105 | 0 | 0.006316 | 0.671724 | 0.092787 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014737 | false | 0.002105 | 0.014737 | 0 | 0.042105 | 0.002105 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d8114110ca0855b70f6d4767fc2d9b841ade8dd | 454 | py | Python | bin/run_server.py | syedwaseemjan/EXIFExtractor | 97da85c0552bb0a616f04bab1bc0785ae8b35fb6 | [
"MIT"
] | null | null | null | bin/run_server.py | syedwaseemjan/EXIFExtractor | 97da85c0552bb0a616f04bab1bc0785ae8b35fb6 | [
"MIT"
] | null | null | null | bin/run_server.py | syedwaseemjan/EXIFExtractor | 97da85c0552bb0a616f04bab1bc0785ae8b35fb6 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
from __future__ import absolute_import
import os
import sys
PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append(PROJECT_DIR)
sys.path.append(os.path.abspath(os.path.join(PROJECT_DIR, "app")))
if __name__ == "__main__":
from app.main import Main
aws_bucket_name = None
if len(sys.argv) > 1:
aws_bucket_name = sys.argv[1]
Main().load_images(aws_bucket_name)
| 23.894737 | 81 | 0.722467 | 73 | 454 | 4.123288 | 0.424658 | 0.099668 | 0.129568 | 0.099668 | 0.152824 | 0.152824 | 0 | 0 | 0 | 0 | 0 | 0.005155 | 0.145374 | 454 | 18 | 82 | 25.222222 | 0.770619 | 0.046256 | 0 | 0 | 0 | 0 | 0.025463 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
4d8333c69b2cee30bebf3bc76fe51963641f9990 | 7,500 | py | Python | slicer/bin/Python/tpycl/tpycl.py | pabloduque0/WMH_AttGatedUnet_CustomLoss | 3503b40c031494ca866dced1421d95f7b2e311fe | [
"MIT"
] | null | null | null | slicer/bin/Python/tpycl/tpycl.py | pabloduque0/WMH_AttGatedUnet_CustomLoss | 3503b40c031494ca866dced1421d95f7b2e311fe | [
"MIT"
] | null | null | null | slicer/bin/Python/tpycl/tpycl.py | pabloduque0/WMH_AttGatedUnet_CustomLoss | 3503b40c031494ca866dced1421d95f7b2e311fe | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# tpycl.py is the python support code to allow calling of python-wrapped
# vtk code from tcl scripts
#
# the main class is tpycl, and scripts can
#
import sys
import os
import Tkinter
from __main__ import slicer
import qt
class tpycl(object):
def __init__(self):
self.verbose = False
# when python is initialized inside slicer there is no argv
# so create a fake one if needed
try:
argv0 = sys.argv[0]
except AttributeError:
sys.argv = []
sys.argv.append("")
self.tcl = Tkinter.Tcl()
self.tcl.createcommand("py_eval", self.py_eval)
self.tcl.createcommand("py_package", self.py_package)
self.tcl.createcommand("py_type", self.py_type)
self.tcl.createcommand("py_del", self.py_del)
self.tcl.createcommand("py_puts", self.py_puts)
self.tcl.createcommand("py_after", self.py_after)
self.tcl.createcommand("py_vtkInstanceName", self.py_vtkInstanceName)
self.timer = qt.QTimer()
self.timer.setSingleShot(True)
self.timer.setInterval(0)
self.timer.connect('timeout()', self.after_callback)
if sys.platform == 'win32':
# Update environment variables set by application - unlike other platforms,
# on windows this does not happen automatically so we do it here
# Note that subsquent env changes will not be reflected
for key in os.environ.keys():
self.tcl_putenv(key, os.environ[key])
# This path is Slicer-specific
self.tcl.eval('source "%s/bin/Python/tpycl/tpycl.tcl"' % slicer.app.slicerHome)
def usage(self):
print "tpycl [options] [file.tcl] [arg] [arg]"
print "-v --verbose : debugging info while parsing"
print "-h --help : extra help info"
print ""
print "tpycl is a tcl shell implemented in python that"
print "allows you to import and execute python code from"
print "inside tcl (hence the name - an homage to jcw's typcl which"
print "allows you to call tcl from python)."
print "Not all python constructs supported, but tpycl should be"
print "adequate to call many packages."
exit()
def dprint(self, *args):
""" debug print """
if self.verbose:
for arg in args:
print arg,
print ""
def py_package(self, packageName):
""" imports a vtk-wrapped python package
"""
self.dprint ("importing %s as a package" % packageName)
if packageName == 'vtk':
import vtk
globals()[packageName] = vtk
for name in dir(vtk):
self.tcl.eval("::tpycl::registerClass %s %s.%s" % (name, packageName, name) )
return
package = globals()[packageName] = __import__(packageName)
for name in dir(package):
self.tcl.eval("::tpycl::registerClass %s %s.%s" % (name, packageName, name) )
def py_type(self,string):
""" return true if the string represents a valid python type
such as an int or an instanced variable
"""
try:
exec( "type(%s)"%string, globals() )
except:
return 0
return 1
def py_vtkInstanceName(self,instance):
""" make a unique name for an instance using the classname and
pointer in hex
- assumes the string form of the instance will end with hex
encoding of the pointer, for example: '(vtkImageData)0x2a9a750'
"""
# used to work with vtk 5.6
#return "%s%s" % (instance.GetClassName(), repr(instance).split()[-1][:-1])
# now just strip off the parens
return repr(instance).replace('(','').replace(')','')
def py_del(self,instanceName):
""" deletes a named instance
"""
# only delete if the instanceName exists
if globals().has_key(instanceName):
exec( "del(%s)"%instanceName, globals() )
return None
def py_puts(self, noNewLine, message):
""" print into the python shell
"""
print(message)
if noNewLine == "0":
print("\n")
def py_after(self):
""" sets the QTimer to call the callback
"""
self.timer.start()
def after_callback(self):
""" what gets called when the after timeout happens
"""
self.tcl.eval('::after_callback')
self.timer.stop()
def py_eval(self,cmd):
""" evaluated the python command string and returns the result
- if the result is a vtk object instance, it is registered in the tcl interp
- if the result is a tuple, it is converted to a tcl-style list
"""
cmd = "__tpycl_result = " + cmd
try:
exec( cmd, globals() )
except:
print( "Error executing %s" % cmd )
print( sys.exc_info() )
raise
evalResult = globals()["__tpycl_result"]
try:
if evalResult.IsA("vtkObject"):
instanceName = self.py_vtkInstanceName(evalResult)
if self.tcl_eval("info command %s" % instanceName) == "":
exec ("%s = globals()['__tpycl_result']" % instanceName, globals())
self.tcl_eval( "proc ::%s {args} {tpycl::methodCaller %s %s $args}" % (instanceName, instanceName, instanceName) )
return( instanceName )
except AttributeError:
pass
try:
if evalResult.__class__.__name__ == 'tuple':
returnValue = evalResult[0]
for element in evalResult[1:]:
returnValue = "%s %s" % (returnValue, element)
return( returnValue )
except AttributeError:
pass
return( repr(evalResult) )
def tcl_callback(self, cmd):
""" evaluate tcl code string but don't return the result
(only prints error messages)
"""
self.dprint("callback command is <%s>" % cmd)
try:
result = self.tcl.eval(cmd)
except Tkinter.TclError,error:
print (error)
errorInfo = self.tcl.eval("set ::errorInfo")
print (errorInfo)
def tcl_eval(self, cmd):
""" evaluate tcl code string and return the result
- py_package is a special string to import python code into tcl
- py_eval goes back from tcl into python
"""
self.dprint("command is <%s>" % cmd)
if cmd == 'exit':
exit()
if cmd.startswith("py_package "):
self.py_package( cmd[10:] )
return()
if cmd.startswith("py_eval "):
self.py_eval( cmd[len("py_eval "):] )
return()
try:
result = self.tcl.eval(cmd)
except Tkinter.TclError,error:
print (error)
errorInfo = self.tcl.eval("set ::errorInfo")
print (errorInfo)
return(None)
return(result)
def tcl_putenv(self, key, value):
""" Set environment variable
"""
import re
self.tcl.eval("global env; set env(%s) \"%s\""%(key, re.escape(value)))
def main(self, argv):
""" main loop for the interpreter shell
"""
# parse command line options
self.file = ""
self.args = []
while argv != []:
arg = argv.pop(0)
if arg == "-v" or arg == "--verbose":
self.verbose = True
continue
if arg == "-h" or arg == "--help":
self.usage()
if not self.file:
self.file = arg
else:
self.args.append(arg)
self.dprint("file", self.file)
self.dprint("args", self.args)
# if given a file, run it
if self.file != "":
fp = open(self.file)
while 1:
cmd = fp.readline()
if cmd == "":
break
self.tcl_eval( cmd[:-1] )
# evaluate stdin until eof
while 1:
sys.stdout.write( "% " )
cmd = sys.stdin.readline()[:-1]
if cmd != "":
result = self.tcl_eval( cmd )
if result != None:
print result
if __name__ == "__main__":
tp = tpycl()
tp.main(sys.argv[1:])
| 29.761905 | 124 | 0.6212 | 990 | 7,500 | 4.626263 | 0.275758 | 0.033624 | 0.031223 | 0.033624 | 0.107424 | 0.080349 | 0.068122 | 0.068122 | 0.068122 | 0.068122 | 0 | 0.005 | 0.253333 | 7,500 | 251 | 125 | 29.880478 | 0.812857 | 0.094933 | 0 | 0.203593 | 0 | 0 | 0.170053 | 0.018182 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.011976 | 0.05988 | null | null | 0.161677 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d895a735abdd884483a6e9345d2c27f5444c080 | 1,469 | py | Python | change_mac_ip.py | Anon123-tech/WiFi_Pentest_Guide | 969c5018645bc22fce663c42d709320eece1837a | [
"MIT"
] | null | null | null | change_mac_ip.py | Anon123-tech/WiFi_Pentest_Guide | 969c5018645bc22fce663c42d709320eece1837a | [
"MIT"
] | 1 | 2021-10-01T04:19:27.000Z | 2021-10-01T04:19:27.000Z | change_mac_ip.py | Anon123-tech/WiFi_Pentest_Guide | 969c5018645bc22fce663c42d709320eece1837a | [
"MIT"
] | null | null | null | import sys,os
import argparse as arg
import nmap
import urllib2
parser = arg.ArgumentParser()
parser.add_argument("-a", "--address", help="IP address", required=True)
parser.add_argument("-i", "--interface", help="Interface", required=True)
argument = parser.parse_args()
def scan_(ip):
dict_ = []
scan = nmap.PortScanner().scan(hosts=ip + '/24',arguments='-sn ', sudo=True).get('scan')
for i in scan.keys():
mac = scan.get(scan.keys()[scan.keys().index(i)]).get('addresses').get('mac')
dict_.append({'ip':i, 'mac':mac})
return dict_
def change(ip,mac,iface):
if mac is not None:
gw_arr = ip.split(".")
gw_arr[3] = "1"
gw_address = ".".join(gw_arr)
os.system("ip link set "+iface+" down")
os.system("ip link set dev "+iface+" address "+mac)
os.system("ip link set "+iface+" up")
os.system("ip addr flush dev "+iface)
os.system("ifconfig "+iface+" "+ip+" netmask 255.255.255.0 up")
os.system("route add default gw "+gw_address)
print ("Testing IP %s and MAC %s"%(ip,mac))
os.system("ping -c 2 8.8.8.8")
try:
urllib2.urlopen('http://216.58.192.142', timeout=1)
return True
except:
return False
def main():
res = scan_(argument.address)
print "IP and Mac list"
for j in sorted(res, key = lambda i: i['ip']):
print ("IP :%s / MAC: %s"%(j['ip'],j['mac']))
for j in sorted(res, key = lambda i: i['ip']):
internet_connection = change(j['ip'],j['mac'],argument.interface)
if internet_connection:
break
main()
| 27.716981 | 89 | 0.645337 | 239 | 1,469 | 3.903766 | 0.393305 | 0.060021 | 0.042872 | 0.045016 | 0.125402 | 0.107181 | 0.060021 | 0.060021 | 0.060021 | 0.060021 | 0 | 0.026527 | 0.153165 | 1,469 | 52 | 90 | 28.25 | 0.723473 | 0 | 0 | 0.046512 | 0 | 0 | 0.212389 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.093023 | null | null | 0.069767 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d8d2dfaa3afe8aa6965d0899dc098740dc88c7a | 3,330 | py | Python | test/test_timematcher.py | magus0219/clockwork | 78c08afdd14f226d7f5c13af633d41a2185ebb7f | [
"MIT"
] | null | null | null | test/test_timematcher.py | magus0219/clockwork | 78c08afdd14f226d7f5c13af633d41a2185ebb7f | [
"MIT"
] | null | null | null | test/test_timematcher.py | magus0219/clockwork | 78c08afdd14f226d7f5c13af633d41a2185ebb7f | [
"MIT"
] | null | null | null | '''
Created on Feb 17, 2014
@author: magus0219
'''
import unittest,datetime
from util.dateutil import DateUtil
from core.timematcher import TimeMatcher
from core.timepattern import TimePattern
class TimeMatcherTest(unittest.TestCase):
@unittest.expectedFailure
def testUnvaidValueNotInt(self):
TimeMatcher.matchOneUnit("*", "dsf")
@unittest.expectedFailure
def testUnvaidValueNegitiveInt(self):
TimeMatcher.matchOneUnit("*", -2)
@unittest.expectedFailure
def testUnvaidValuePattern1(self):
TimeMatcher.matchOneUnit("fjf/2", 1)
@unittest.expectedFailure
def testUnvaidValuePattern2(self):
TimeMatcher.matchOneUnit("sdf", 1)
@unittest.expectedFailure
def testUnvaidValuePattern3(self):
TimeMatcher.matchOneUnit("*/sd", 1)
def testMatchOnePattern(self):
self.assertEqual(True, TimeMatcher.matchOneUnit("*", 1))
self.assertEqual(True, TimeMatcher.matchOneUnit("*", 24))
self.assertEqual(True, TimeMatcher.matchOneUnit("*/2", 22))
self.assertEqual(False, TimeMatcher.matchOneUnit("*/2", 13))
self.assertEqual(True, TimeMatcher.matchOneUnit("*/5", 15))
self.assertEqual(False, TimeMatcher.matchOneUnit("*/5", 13))
self.assertEqual(True, TimeMatcher.matchOneUnit("23", 23))
self.assertEqual(False, TimeMatcher.matchOneUnit("23", 13))
def testMatchTimePattern(self):
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("* * * * *"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("28 * * * *"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("* 20 * * *"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("* * 17 * *"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("* * * 2 *"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("* * * * 1"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("28 20 17 2 1"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(True, TimeMatcher.matchTimePattern(TimePattern("*/2 * * * *"),
DateUtil.datetime("2014-02-17 20:28:35")))
self.assertEqual(False, TimeMatcher.matchTimePattern(TimePattern("*/3 * * * *"),
DateUtil.datetime("2014-02-17 20:28:35")))
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testUnvaidValue']
unittest.main()
| 47.571429 | 102 | 0.562162 | 282 | 3,330 | 6.609929 | 0.205674 | 0.136803 | 0.132511 | 0.209227 | 0.590129 | 0.449034 | 0.371245 | 0.371245 | 0.35515 | 0.35515 | 0 | 0.080645 | 0.311111 | 3,330 | 69 | 103 | 48.26087 | 0.731909 | 0.028529 | 0 | 0.28 | 0 | 0 | 0.094515 | 0 | 0 | 0 | 0 | 0 | 0.34 | 1 | 0.14 | false | 0 | 0.08 | 0 | 0.24 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d8dccb89c879711e7ed922439a880fb09054040 | 276 | py | Python | pets/api/urls.py | tekodan/DaleLaPata | 7a998f617d88c3f71fe5da896f2197fc0043a731 | [
"MIT"
] | 1 | 2019-05-06T18:44:43.000Z | 2019-05-06T18:44:43.000Z | pets/api/urls.py | tekodan/DaleLaPata | 7a998f617d88c3f71fe5da896f2197fc0043a731 | [
"MIT"
] | 10 | 2021-03-18T21:20:55.000Z | 2022-03-11T23:33:03.000Z | pets/api/urls.py | koyoo-maxwel/findyourpet | ae5978f9ddd9e116d332734d2a45c76d7c6ac1f6 | [
"MIT"
] | null | null | null | from django.conf.urls import url
from api import views
urlpatterns = [
url(r'^pets/$', views.ListPets.as_view(), name='list_pets'),
url(r'^cities/$', views.CityList.as_view(), name='city-list'),
url(r'^states/$', views.StateList.as_view(), name='state-list'),
]
| 27.6 | 68 | 0.663043 | 41 | 276 | 4.365854 | 0.536585 | 0.067039 | 0.167598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126812 | 276 | 9 | 69 | 30.666667 | 0.742739 | 0 | 0 | 0 | 0 | 0 | 0.192029 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4d93299efc0720f63f6f0dc30928e10e3a67f707 | 830 | py | Python | examples/reactjs-nodejs/deploy.py | py-mina-deploy/py-mina | 029bbe6f183afece1ccc2e2d3d11873c5560e8f4 | [
"MIT"
] | 7 | 2017-09-21T17:00:59.000Z | 2021-06-18T06:03:19.000Z | examples/reactjs-nodejs/deploy.py | py-mina-deploy/py-mina | 029bbe6f183afece1ccc2e2d3d11873c5560e8f4 | [
"MIT"
] | null | null | null | examples/reactjs-nodejs/deploy.py | py-mina-deploy/py-mina | 029bbe6f183afece1ccc2e2d3d11873c5560e8f4 | [
"MIT"
] | 2 | 2018-03-20T07:51:37.000Z | 2020-05-03T14:30:55.000Z | """
Deploy NodeJs application
https://github.com/react-boilerplate/react-boilerplate
"""
from py_mina import *
from py_mina.subtasks import git_clone, create_shared_paths, link_shared_paths, rollback_release
# Settings - shared
set('verbose', True)
set('shared_dirs', ['node_modules', 'tmp'])
set('shared_files', [])
# Tasks
@task
def restart():
"""
Restarts application on remote server
"""
# read README.md
run('sudo monit restart -g nodejs_app_prod')
@deploy_task(on_success=restart)
def deploy():
"""
Runs deploy process on remote server
"""
git_clone()
link_shared_paths()
run('npm install')
run('npm run build')
@setup_task
def setup():
"""
Runs setup process on remote server
"""
create_shared_paths()
@task
def rollback():
"""
Rollbacks to previous release
"""
rollback_release()
| 13.606557 | 96 | 0.708434 | 110 | 830 | 5.145455 | 0.518182 | 0.077739 | 0.074205 | 0.074205 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.156627 | 830 | 60 | 97 | 13.833333 | 0.808571 | 0.315663 | 0 | 0.1 | 0 | 0 | 0.203065 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | true | 0 | 0.1 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4da3d5ac7b735f55566862fbadcd345f662d67b0 | 1,162 | py | Python | settings_template.py | Pierre-Thibault/memberCardGenerator | e05b421d3f50453d3603fd5513383e77378e9ccb | [
"MIT"
] | 2 | 2017-09-29T17:06:19.000Z | 2021-05-10T22:30:50.000Z | settings_template.py | Pierre-Thibault/memberCardGenerator | e05b421d3f50453d3603fd5513383e77378e9ccb | [
"MIT"
] | 1 | 2021-09-07T23:43:40.000Z | 2021-09-07T23:43:40.000Z | settings_template.py | Pierre-Thibault/memberCardGenerator | e05b421d3f50453d3603fd5513383e77378e9ccb | [
"MIT"
] | 1 | 2022-01-10T13:42:54.000Z | 2022-01-10T13:42:54.000Z | # -*- coding: utf-8 -*-
# Copy this file and renamed it settings.py and change the values for your own project
# The csv file containing the information about the member.
# There is three columns: The name, the email and the member type: 0 regular, 1 life time
CSV_FILE = "path to csv file"
# The svg file for regular member. {name} and {email} are going to be replaced with the corresponding values from the
# csv file
SVG_FILE_REGULAR = "path to svg regular member file"
# Same as SVG_FILE_REGULAR but for life time member
SVG_FILE_LIFE_TIME = "path to svg life time member file"
# Destination folder where the member cards will be generated. If the folder does not exist yet it will be created.
DEST_GENERATED_FOLDER = "path to folder that will contain the generated files"
# The message file used as the text body for the email message. UTF-8.
MSG_FILE = "/Users/pierre/Documents/LPA/CA/carte_membre_msg"
# SMTP configuration
SMPT_HOST = "myserver.com"
SMPT_PORT = 587
SMTP_USER = "user_name"
SMTP_PASSWORD = "password"
# Email configuration
EMAIL_FROM = "some_email@something.com"
EMAIL_SUBJECT = "subject"
EMAIL_PDF = "name of attachment file.pdf"
| 36.3125 | 117 | 0.766781 | 193 | 1,162 | 4.502591 | 0.476684 | 0.032221 | 0.023015 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007216 | 0.165232 | 1,162 | 31 | 118 | 37.483871 | 0.88866 | 0.55852 | 0 | 0 | 0 | 0 | 0.530938 | 0.141717 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
4da9f7a9d46841bf7af0a4af66fd041f70367d1f | 575 | py | Python | accounting_tech/migrations/0018_auto_20190403_1456.py | Tim-Ilin/asup_corp_site | 02a9573f2490ef8f31b3ba95bc351c2458d049e5 | [
"MIT"
] | null | null | null | accounting_tech/migrations/0018_auto_20190403_1456.py | Tim-Ilin/asup_corp_site | 02a9573f2490ef8f31b3ba95bc351c2458d049e5 | [
"MIT"
] | 8 | 2021-03-19T11:12:07.000Z | 2022-03-12T00:32:27.000Z | accounting_tech/migrations/0018_auto_20190403_1456.py | Tim-Ilin/asup_corp_site | 02a9573f2490ef8f31b3ba95bc351c2458d049e5 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.7 on 2019-04-03 11:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounting_tech', '0017_auto_20190403_1434'),
]
operations = [
migrations.AlterField(
model_name='request_to_repair',
name='inventory_number',
field=models.ForeignKey(db_column='inventory_number', null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting_tech.Equipment', verbose_name='ИНВ №'),
),
]
| 28.75 | 177 | 0.678261 | 70 | 575 | 5.4 | 0.671429 | 0.063492 | 0.074074 | 0.116402 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067834 | 0.205217 | 575 | 19 | 178 | 30.263158 | 0.754923 | 0.078261 | 0 | 0 | 1 | 0 | 0.221591 | 0.090909 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4dac488dd2d30d64a8f251641aa08b45ff93ef4d | 500 | py | Python | tests/bgzip_test.py | Swiffers/puretabix | 5f0895c9b17560e76dd962a20844fffb565a4aed | [
"MIT"
] | 1 | 2021-07-07T00:18:47.000Z | 2021-07-07T00:18:47.000Z | tests/bgzip_test.py | Swiffers/puretabix | 5f0895c9b17560e76dd962a20844fffb565a4aed | [
"MIT"
] | null | null | null | tests/bgzip_test.py | Swiffers/puretabix | 5f0895c9b17560e76dd962a20844fffb565a4aed | [
"MIT"
] | null | null | null | from puretabix import get_bgzip_lines_parallel
class TestBlockGZip:
def test_get_lines(self, vcf_filename, vcf_gz):
lines = tuple(sorted(map(bytes.decode, vcf_gz.readlines())))
lines_parsed = tuple(sorted(get_bgzip_lines_parallel(vcf_filename)))
for line_in, line_out in zip(lines, lines_parsed):
print(line_in, line_out)
line_in = line_in.strip()
line_out = str(line_out)
assert line_in == line_out, (line_in, line_out)
| 35.714286 | 76 | 0.676 | 71 | 500 | 4.394366 | 0.43662 | 0.115385 | 0.160256 | 0.166667 | 0.147436 | 0.147436 | 0.147436 | 0 | 0 | 0 | 0 | 0 | 0.232 | 500 | 13 | 77 | 38.461538 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 1 | 0.1 | false | 0 | 0.1 | 0 | 0.3 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4dae657468dbf8a1f6bf472d8e316e6a356158ca | 887 | py | Python | setup.py | Eawag-SWW/datapool_client | a43c38f0f858a687d6354ef4d857beee59882c8a | [
"MIT"
] | null | null | null | setup.py | Eawag-SWW/datapool_client | a43c38f0f858a687d6354ef4d857beee59882c8a | [
"MIT"
] | null | null | null | setup.py | Eawag-SWW/datapool_client | a43c38f0f858a687d6354ef4d857beee59882c8a | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
setup(
name="datapool_client",
version="1.0",
description="Designed to access the datapool software developed by ETH Zurich - SIS and Eawag. "
"Find out more under https://datapool.readthedocs.io/en/latest/.",
author="Christian Foerster",
author_email="christian.foerster@eawag.ch",
license="MIT Licence",
classifiers=[
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"pandas",
"numpy",
"psycopg2-binary",
"matplotlib",
"cufflinks",
"plotly",
"pyparsing==2.4.7",
"sqlalchemy",
"tqdm"
],
keywords="datapool_client, eawag, postgres",
packages=find_packages(),
include_package_data=True,
)
| 28.612903 | 100 | 0.611048 | 89 | 887 | 6 | 0.820225 | 0.044944 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012195 | 0.260428 | 887 | 30 | 101 | 29.566667 | 0.801829 | 0 | 0 | 0.068966 | 0 | 0 | 0.492672 | 0.03044 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.034483 | 0 | 0.034483 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4db107eb3d9119fdeaf236399451aa583978436f | 5,219 | py | Python | home/pi/blissflixx/lib/chanutils/chanutils.py | erick-guerra/Royalbox | 967dbbdddc94b9968e6eba873f0d20328fd86f66 | [
"MIT"
] | 1 | 2022-01-29T11:17:58.000Z | 2022-01-29T11:17:58.000Z | home/pi/blissflixx/lib/chanutils/chanutils.py | erick-guerra/Royalbox | 967dbbdddc94b9968e6eba873f0d20328fd86f66 | [
"MIT"
] | null | null | null | home/pi/blissflixx/lib/chanutils/chanutils.py | erick-guerra/Royalbox | 967dbbdddc94b9968e6eba873f0d20328fd86f66 | [
"MIT"
] | null | null | null | import requests, lxml.html, re
import htmlentitydefs, urllib, random
from lxml.cssselect import CSSSelector
from StringIO import StringIO
import cherrypy
import requests
from cachecontrol import CacheControl
_PROXY_LIST = None
_HEADERS = {
'accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'accept-language':'en-GB,en-US;q=0.8,en;q=0.6',
'cache-control':'max-age=0',
#'user-agent':'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36',
'user-agent':'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A356 Safari/604.1',
'Client-ID':'tq6hq1srip0i37ipzuscegt7viex9fh' # Just for Twitch API
}
def _get_proxy_url():
global _PROXY_LIST
if _PROXY_LIST is None:
_PROXY_LIST = get_json("http://blissflixx.rocks/feeds/proxies.json")
p = random.randint(0, len(_PROXY_LIST) - 1)
return _PROXY_LIST[p]['url']
def _get_proxy_headers(headers):
headers = headers.copy()
headers['origin'] = 'blissflixx'
return headers
def get(url, params=None, proxy=False, session=None):
headers = _HEADERS
if proxy:
if params is not None:
utfparams = {}
for k, v in params.iteritems():
utfparams[k] = unicode(v).encode('utf-8')
url = url + "?" + urllib.urlencode(utfparams)
params = {'url': url}
url = _get_proxy_url()
headers = _get_proxy_headers(headers)
if session is None:
session = new_session()
cached_sess = CacheControl(session)
#r = session.get(url, params=params, headers=headers, verify=False)
r = cached_sess.get(url, params=params, headers=headers, verify=False)
if r.status_code >= 300:
raise Exception("Request : '" + url + "' returned: " + str(r.status_code))
return r
def post(url, payload, proxy=False, session=None):
headers = _HEADERS
if proxy:
payload['__url__'] = url
url = _get_proxy_url()
headers = _get_proxy_headers(headers)
if session is None:
session = new_session()
r = session.post(url, data=payload, headers=headers, verify=False)
if r.status_code >= 300:
raise Exception("Request : '" + url + "' returned: " + str(r.status_code))
return r
def post_doc(url, payload, **kwargs):
r = post(url, payload, **kwargs)
return lxml.html.fromstring(r.text)
def post_json(url, payload, **kwargs):
r = post(url, payload, **kwargs)
return r.json()
def get_doc(url, params=None, **kwargs):
r = get(url, params=params, **kwargs)
return lxml.html.fromstring(r.text)
def get_xml(url, params=None, **kwargs):
r = requests.get(url)
cherrypy.log(r.content)
doc = lxml.etree.parse(StringIO(r.content), lxml.etree.XMLParser(encoding="utf-8", recover=True))
#root = lxml.etree.fromstring(doc)
return doc
def get_json(url, params=None, **kwargs):
r = get(url, params=params, **kwargs)
return r.json()
def new_session():
return requests.session()
def select_one(tree, expr):
sel = CSSSelector(expr)
el = sel(tree)
if isinstance(el, list) and len(el) > 0:
return el[0]
else:
return None
def select_all(tree, expr):
sel = CSSSelector(expr)
return sel(tree)
def get_attr(el, name):
if el is not None:
return el.get(name)
else:
return None
def get_text(el):
if el is not None and el.text is not None:
return el.text.strip()
else:
return None
def get_text_content(el):
if el is not None:
return el.text_content().strip()
else:
return None
def byte_size(num, suffix='B'):
for unit in ['','K','M','G','T','P','E','Z']:
if abs(num) < 1024.0:
return "%3.1f %s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f %s%s" % (num, 'Y', suffix)
def replace_entity(text):
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text)
def number_commas(x):
if type(x) not in [type(0), type(0L)]:
return '0'
if x < 0:
return '-' + number_commas(-x)
result = ''
while x >= 1000:
x, r = divmod(x, 1000)
result = ",%03d%s" % (r, result)
return "%d%s" % (x, result)
MOVIE_RE = re.compile(r'(.*)[\(\[]?([12][90]\d\d)[^pP][\(\[]?.*$')
SERIES_RE = re.compile(r'(.*)S(\d\d)E(\d\d).*$')
def movie_title_year(name):
name = name.replace('.', ' ')
m = MOVIE_RE.match(name)
if m is None:
return {'title':name}
title = m.group(1)
if title.endswith('(') or title.endswith('['):
title = title[:-1]
title = title.strip()
year = int(m.group(2))
return {'title':title, 'year':year}
def series_season_episode(name):
name = name.replace('.', ' ')
m = SERIES_RE.match(name)
if m is None:
return {'series':name}
series = m.group(1).strip()
season = int(m.group(2))
episode = int(m.group(3))
return {'series':series, 'season':season, 'episode':episode}
| 27.613757 | 153 | 0.637095 | 779 | 5,219 | 4.175867 | 0.267009 | 0.043037 | 0.018445 | 0.022133 | 0.344912 | 0.286197 | 0.258531 | 0.245619 | 0.169075 | 0.142638 | 0 | 0.030274 | 0.196206 | 5,219 | 188 | 154 | 27.760638 | 0.745173 | 0.054225 | 0 | 0.309677 | 0 | 0.019355 | 0.124772 | 0.038953 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.012903 | 0.045161 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4db11a5afb53d4558aa8f33a19f180a1ecbc8f9d | 5,672 | py | Python | test/07-text-custom-field-list-test.py | hklarner/couchdb-mango | e519f224423ca4696a61d0065530103dd8c6651b | [
"Apache-2.0"
] | 39 | 2015-02-04T09:48:20.000Z | 2021-11-09T22:07:45.000Z | test/07-text-custom-field-list-test.py | hklarner/couchdb-mango | e519f224423ca4696a61d0065530103dd8c6651b | [
"Apache-2.0"
] | 37 | 2015-02-24T17:59:26.000Z | 2021-05-25T12:20:54.000Z | test/07-text-custom-field-list-test.py | hklarner/couchdb-mango | e519f224423ca4696a61d0065530103dd8c6651b | [
"Apache-2.0"
] | 21 | 2015-04-26T05:53:44.000Z | 2021-11-09T22:06:58.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import mango
import unittest
@unittest.skipUnless(mango.has_text_service(), "requires text service")
class CustomFieldsTest(mango.UserDocsTextTests):
FIELDS = [
{"name": "favorites.[]", "type": "string"},
{"name": "manager", "type": "boolean"},
{"name": "age", "type": "number"},
# These two are to test the default analyzer for
# each field.
{"name": "location.state", "type": "string"},
{
"name": "location.address.street",
"type": "string"
},
{"name": "name\\.first", "type": "string"}
]
def test_basic(self):
docs = self.db.find({"age": 22})
assert len(docs) == 1
assert docs[0]["user_id"] == 9
def test_multi_field(self):
docs = self.db.find({"age": 22, "manager": True})
assert len(docs) == 1
assert docs[0]["user_id"] == 9
docs = self.db.find({"age": 22, "manager": False})
assert len(docs) == 0
def test_element_acess(self):
docs = self.db.find({"favorites.0": "Ruby"})
assert len(docs) == 3
for d in docs:
assert "Ruby" in d["favorites"]
# This should throw an exception because we only index the array
# favorites.[], and not the string field favorites
def test_index_selection(self):
try:
self.db.find({"selector": {"$or": [{"favorites": "Ruby"},
{"favorites.0":"Ruby"}]}})
except Exception, e:
assert e.response.status_code == 400
def test_in_with_array(self):
vals = ["Lisp", "Python"]
docs = self.db.find({"favorites": {"$in": vals}})
assert len(docs) == 10
# This should also throw an error because we only indexed
# favorites.[] of type string. For the following query to work, the
# user has to index favorites.[] of type number, and also
# favorites.[].Versions.Alpha of type string.
def test_in_different_types(self):
vals = ["Random Garbage", 52, {"Versions": {"Alpha": "Beta"}}]
try:
self.db.find({"favorites": {"$in": vals}})
except Exception, e:
assert e.response.status_code == 400
# This test differs from the situation where we index everything.
# When we index everything the actual number of docs that gets
# returned is 5. That's because of the special situation where we
# have an array of an array, i.e: [["Lisp"]], because we're indexing
# specifically favorites.[] of type string. So it does not count
# the example and we only get 4 back.
def test_nin_with_array(self):
vals = ["Lisp", "Python"]
docs = self.db.find({"favorites": {"$nin": vals}})
assert len(docs) == 4
def test_missing(self):
self.db.find({"location.state": "Nevada"})
def test_missing_type(self):
# Raises an exception
try:
self.db.find({"age": "foo"})
raise Exception("Should have thrown an HTTPError")
except:
return
def test_field_analyzer_is_keyword(self):
docs = self.db.find({"location.state": "New"})
assert len(docs) == 0
docs = self.db.find({"location.state": "New Hampshire"})
assert len(docs) == 1
assert docs[0]["user_id"] == 10
# Since our FIELDS list only includes "name\\.first", we should
# get an error when we try to search for "name.first", since the index
# for that field does not exist.
def test_escaped_field(self):
docs = self.db.find({"name\\.first": "name dot first"})
assert len(docs) == 1
assert docs[0]["name.first"] == "name dot first"
try:
self.db.find({"name.first": "name dot first"})
raise Exception("Should have thrown an HTTPError")
except:
return
def test_filtered_search_fields(self):
docs = self.db.find({"age": 22}, fields = ["age", "location.state"])
assert len(docs) == 1
assert docs == [{"age": 22, "location": {"state": "Missouri"}}]
docs = self.db.find({"age": 22}, fields = ["age", "Random Garbage"])
assert len(docs) == 1
assert docs == [{"age": 22}]
docs = self.db.find({"age": 22}, fields = ["favorites"])
assert len(docs) == 1
assert docs == [{"favorites": ["Lisp", "Erlang", "Python"]}]
docs = self.db.find({"age": 22}, fields = ["favorites.[]"])
assert len(docs) == 1
assert docs == [{}]
docs = self.db.find({"age": 22}, fields = ["all_fields"])
assert len(docs) == 1
assert docs == [{}]
def test_two_or(self):
docs = self.db.find({"$or": [{"location.state": "New Hampshire"},
{"location.state": "Don't Exist"}]})
assert len(docs) == 1
assert docs[0]["user_id"] == 10
def test_all_match(self):
docs = self.db.find({
"favorites": {
"$allMatch": {
"$eq": "Erlang"
}
}
})
assert len(docs) == 1
assert docs[0]["user_id"] == 10
| 35.672956 | 79 | 0.571403 | 723 | 5,672 | 4.42462 | 0.279391 | 0.039387 | 0.065646 | 0.070022 | 0.36105 | 0.338543 | 0.298843 | 0.238512 | 0.183495 | 0.155986 | 0 | 0.016381 | 0.278914 | 5,672 | 158 | 80 | 35.898734 | 0.76577 | 0.255465 | 0 | 0.346154 | 0 | 0 | 0.193179 | 0.005485 | 0 | 0 | 0 | 0 | 0.288462 | 0 | null | null | 0 | 0.019231 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4db8e935817372c07e59b82af45086b871e6303e | 579 | py | Python | broadcast.py | InukaRanmira/Image-to-pdf | 44f7e33b13aba44c03c3ec5c7e4efe4efe0b1911 | [
"MIT"
] | 1 | 2021-12-24T18:11:49.000Z | 2021-12-24T18:11:49.000Z | broadcast.py | InukaRanmira/Image-to-pdf | 44f7e33b13aba44c03c3ec5c7e4efe4efe0b1911 | [
"MIT"
] | null | null | null | broadcast.py | InukaRanmira/Image-to-pdf | 44f7e33b13aba44c03c3ec5c7e4efe4efe0b1911 | [
"MIT"
] | null | null | null | from pyrogram import Client ,filters
import os
from helper.database import getid
ADMIN = int(os.environ.get("ADMIN", 1696230986))
@Client.on_message(filters.private & filters.user(ADMIN) & filters.command(["broadcast"]))
async def broadcast(bot, message):
if (message.reply_to_message):
ms = await message.reply_text("Geting All ids from database ...........")
ids = getid()
tot = len(ids)
await ms.edit(f"Starting Broadcast .... \n Sending Message To {tot} Users")
for id in ids:
try:
await message.reply_to_message.copy(id)
except:
pass
| 30.473684 | 90 | 0.690846 | 81 | 579 | 4.864198 | 0.580247 | 0.091371 | 0.071066 | 0.106599 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020877 | 0.172712 | 579 | 18 | 91 | 32.166667 | 0.80167 | 0 | 0 | 0 | 0 | 0 | 0.19171 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.0625 | 0.1875 | 0 | 0.1875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
4dc54e3f4ce59c3a9f8980ef33d1443e375f1870 | 905 | py | Python | cayennelpp/tests/test_lpp_type_humidity.py | smlng/pycayennelpp | 28f2ba4fba602527d3369c9cfbce16b783916933 | [
"MIT"
] | 16 | 2019-02-18T10:57:51.000Z | 2022-03-29T01:54:51.000Z | cayennelpp/tests/test_lpp_type_humidity.py | smlng/pycayennelpp | 28f2ba4fba602527d3369c9cfbce16b783916933 | [
"MIT"
] | 40 | 2018-11-04T17:28:49.000Z | 2021-11-26T16:05:16.000Z | cayennelpp/tests/test_lpp_type_humidity.py | smlng/pycayennelpp | 28f2ba4fba602527d3369c9cfbce16b783916933 | [
"MIT"
] | 12 | 2018-11-09T19:06:36.000Z | 2021-05-21T17:44:28.000Z | import pytest
from cayennelpp.lpp_type import LppType
@pytest.fixture
def hum():
return LppType.get_lpp_type(104)
def test_humidity(hum):
val = (50.00,)
hum_buf = hum.encode(val)
assert hum.decode(hum_buf) == val
hum_buf = hum.encode(50.25)
assert hum.decode(hum_buf) == val
val = (50.50,)
hum_buf = hum.encode(val)
assert hum.decode(hum_buf) == val
hum_buf = hum.encode(50.75)
assert hum.decode(hum_buf) == val
def test_humidity_negative_val(hum):
with pytest.raises(Exception):
val = (-50.50,)
hum.encode(val)
def test_humidity_invalid_buf(hum):
with pytest.raises(Exception):
hum.decode(bytearray([0x00, 0x00]))
def test_humidity_invalid_val_type(hum):
with pytest.raises(Exception):
hum.encode([0x00])
def test_humidity_invalid_val(hum):
with pytest.raises(Exception):
hum.encode((0, 0))
| 21.046512 | 43 | 0.667403 | 133 | 905 | 4.353383 | 0.240602 | 0.082902 | 0.129534 | 0.103627 | 0.621762 | 0.621762 | 0.331606 | 0.2038 | 0.2038 | 0.2038 | 0 | 0.047222 | 0.20442 | 905 | 42 | 44 | 21.547619 | 0.756944 | 0 | 0 | 0.344828 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01326 | 0 | 0.137931 | 1 | 0.206897 | false | 0 | 0.068966 | 0.034483 | 0.310345 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4dc612995d0e9e6a026d052503209dc02bc22a03 | 5,218 | py | Python | scripts/studs_dist.py | inesc-tec-robotics/carlos_controller | ffcc45f24dd534bb953d5bd4a47badd3d3d5223d | [
"BSD-3-Clause"
] | null | null | null | scripts/studs_dist.py | inesc-tec-robotics/carlos_controller | ffcc45f24dd534bb953d5bd4a47badd3d3d5223d | [
"BSD-3-Clause"
] | null | null | null | scripts/studs_dist.py | inesc-tec-robotics/carlos_controller | ffcc45f24dd534bb953d5bd4a47badd3d3d5223d | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
from mission_ctrl_msgs.srv import *
from studs_defines import *
import rospy
import time
import carlos_vision as crlv
from geometry_msgs.msg import PointStamped
from geometry_msgs.msg import PoseArray
from geometry_msgs.msg import Pose
from carlos_controller.msg import StudsPoses
#import geometry_msgs.msg
#import std_msgs.msg
# incomming handlers
def set_mode_hnd(req):
print 'Set mode requested: '+str(req.mode)
global working_mode
working_mode = req.mode
# if working_mode != WMODE_STOPPED:
# for i in range(12):
# puntos, stiff1, stiff2, tipo= crlv.calcula_dist(working_mode)
# time.sleep(0.3)
return SetModeResponse(req.mode)
def get_mode_hnd(req):
print 'Mode requested'
global working_mode
return GetModeResponse(working_mode)
# publisher
def publica(dato, stiff1, stiff2, tipo):
global working_mode
global studs_pub
global wall_pub
#pub = rospy.Publisher(CRL_STUDS_POS_MSG, StudsPoses, queue_size=1)
studs = PoseArray()
mes = StudsPoses()
studs.header.stamp = rospy.Time.now()
if tipo==WMODE_TRACK:
#studs.header.frame_id = "orientation"
#studi = Pose()
if dato[0][0][0]!=0 or dato[0][0][1]!=0 or dato[0][0][2]!=0:
wall_or = PointStamped()
wall_or.point.x = dato[0][0][0]+0.0
wall_or.point.y = dato[0][0][1]-0.035
wall_or.point.z = dato[0][0][2]-0.015
wall_pub.publish(wall_or)
elif tipo==WMODE_STOPPED:
studs.header.frame_id = "idle"
studi = Pose()
studi.position.x = 0
studi.position.y = 0
studi.position.z = 0
studi.orientation.x = 0
studi.orientation.y = 0
studi.orientation.z = 0
studi.orientation.w = 0
studs.poses.append(studi)
else:
studs.header.frame_id = "/base_link"
for i in range(len(dato)):
studi = Pose()
studi.position.y = (dato[i][0][0])/1000.0
studi.position.x = -(dato[i][0][1]-110)/1000.0
studi.position.z = (dato[i][0][2]-175)/1000.0
studi.orientation.x = dato[i][1][0]
studi.orientation.y = dato[i][1][1]
studi.orientation.z = dato[i][1][2]
studi.orientation.w = dato[i][1][3]
studs.poses.append(studi)
mes.pose_array=studs
mes.stiff1=stiff1/1000.0
mes.stiff2=stiff2/1000.0
if dato[0][0][0]!=0 or dato[0][0][1]!=0 or dato[0][0][2]!=0:
studs_pub.publish(mes)
# timer callback for state machine update
def timer_callback(event):
global working_mode
global pattern
global init_time
global laser_threshold
if working_mode==WMODE_DETECT:
pattern = rospy.get_param(STUDS_PATTERN)
laser_threshold=rospy.get_param(STUDS_PATTERN_LASER_THRESHOLD)
crlv.cambia_patron(pattern, stud_margin, stud_prox)
puntos, stiff1, stiff2, tipo= crlv.calcula_dist(working_mode, laser_threshold)
print 'Detecting...'
print 'Studs detected: '+ str(len(puntos))
for i in range(len(puntos)):
print 'Stud '+str(i)+': '+ str(puntos[i][0])
print 'Stiff. 1: ' +str(stiff1) + ' Stiff. 2: ' +str(stiff2) + ' SD:' + str(stiff2-stiff1)
if puntos[0][1][0]!=2 and tipo==working_mode:
publica(puntos, stiff1, stiff2, tipo)
else:
publica(puntos, stiff1, stiff2, WMODE_STOPPED)
elif working_mode==WMODE_TRACK:
pattern = rospy.get_param(STUDS_PATTERN)
laser_threshold=rospy.get_param(STUDS_PATTERN_LASER_THRESHOLD)
crlv.cambia_patron(pattern, stud_margin, stud_prox)
puntos, stiff1, stiff2, tipo= crlv.calcula_dist(working_mode, laser_threshold)
print 'Tracking...'
print 'Orientation = ' + str(puntos[0][0])
print 'Stiff. 1: ' +str(stiff1) + ' Stiff. 2: ' +str(stiff2) + ' SD:' + str(stiff2-stiff1)
if puntos[0][1][0]!=2 and tipo==working_mode:
publica(puntos, stiff1, stiff2, tipo)
else:
publica(puntos, stiff1, stiff2, WMODE_STOPPED)
else:
print 'Idle ' + str(rospy.Time.to_sec(rospy.Time.now())-rospy.Time.to_sec(init_time))
def install_params():
global pattern
pattern = rospy.get_param(STUDS_PATTERN)
global stud_margin
stud_margin = rospy.get_param(STUDS_PATTERN_DIST)
global stud_prox
stud_prox = rospy.get_param(STUDS_PATTERN_PROX)
global laser_threshold
laser_threshold = rospy.get_param(STUDS_PATTERN_LASER_THRESHOLD)
def init_server():
global init_time
global thres_laser
thres_laser=220
rospy.init_node(MODULE_NAME)
crlv.arranca()
install_params()
global working_mode
working_mode = WMODE_STOPPED
#working_mode = WMODE_DETECT
#working_mode = WMODE_TRACK
global studs_pub
global wall_pub
wall_pub = rospy.Publisher(CRL_WALL_ORIENT_MSG, PointStamped, queue_size=1)
studs_pub = rospy.Publisher(CRL_STUDS_POS_MSG, StudsPoses, queue_size=1)
s = rospy.Service(SET_MODE_SRV, SetMode, set_mode_hnd)
init_time=rospy.Time.now()
rospy.Timer(rospy.Duration(0.25), timer_callback)
rospy.spin()
if __name__ == "__main__":
init_server()
| 30.87574 | 99 | 0.652166 | 734 | 5,218 | 4.440055 | 0.194823 | 0.060755 | 0.01657 | 0.044185 | 0.390917 | 0.319423 | 0.293035 | 0.293035 | 0.293035 | 0.262044 | 0 | 0.037488 | 0.228057 | 5,218 | 168 | 100 | 31.059524 | 0.771599 | 0.08739 | 0 | 0.355372 | 0 | 0 | 0.036023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.07438 | null | null | 0.082645 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4dc8b44f56e787d0b3156d5c7fc12d0fb557c818 | 1,770 | py | Python | example.py | luisfciencias/intro-cv | 2908d21dd8058acf13b5479a2cb409a6e00859c1 | [
"MIT"
] | null | null | null | example.py | luisfciencias/intro-cv | 2908d21dd8058acf13b5479a2cb409a6e00859c1 | [
"MIT"
] | 5 | 2020-01-28T22:54:12.000Z | 2022-02-10T00:26:51.000Z | example.py | luisfciencias/intro-cv | 2908d21dd8058acf13b5479a2cb409a6e00859c1 | [
"MIT"
] | null | null | null | # example of mask inference with a pre-trained model (COCO)
import sys
from keras.preprocessing.image import img_to_array
from keras.preprocessing.image import load_img
from mrcnn.config import Config
from mrcnn.model import MaskRCNN
from mrcnn.visualize import display_instances
from tools import load_config
# load config params - labels
cfg_dict = load_config('config.yaml')
class_names = cfg_dict['class_names']
# config settings for model inference
class ConfigParams(Config):
NAME = "test"
GPU_COUNT = 1
IMAGES_PER_GPU = 1
NUM_CLASSES = 1 + 80
# replicate the model for pure inference
rcnn_model = MaskRCNN(mode='inference', model_dir='models/', config=ConfigParams())
# model weights input
<<<<<<< HEAD
rcnn_model.load_weights('models/mask_rcnn_coco.h5', by_name=True)
=======
>>>>>>> 2ffc4581f4632ec494d19a7af0f5912e7482a631
path_weights_file = 'models/mask_rcnn_coco.h5'
rcnn_model.load_weights(path_weights_file, by_name=True)
# single image input
path_to_image = sys.argv[1]
img = load_img(path_to_image)
# transition to array
img = img_to_array(img)
print('Image shape:', img.shape)
# make inference
results = rcnn_model.detect([img], verbose=0)
# the output is a list of dictionaries, where each dict has a single object detection
# {'rois': array([[ 30, 54, 360, 586]], dtype=int32),
# 'class_ids': array([21], dtype=int32),
# 'scores': array([0.9999379], dtype=float32),
# 'masks': huge_boolean_array_here ...
result_params = results[0]
# show photo with bounding boxes, masks, class labels and scores
display_instances(img,
result_params['rois'],
result_params['masks'],
result_params['class_ids'],
class_names,
result_params['scores'])
| 32.181818 | 85 | 0.719774 | 245 | 1,770 | 4.995918 | 0.42449 | 0.04902 | 0.035948 | 0.044118 | 0.086601 | 0 | 0 | 0 | 0 | 0 | 0 | 0.042799 | 0.168362 | 1,770 | 54 | 86 | 32.777778 | 0.788723 | 0.315254 | 0 | 0 | 0 | 0 | 0.105263 | 0.0401 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.212121 | null | null | 0.030303 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4dcb9047f54eac204a9bac1c46c12bc3341a699a | 11,237 | py | Python | Leetcode.py | SakuraSa/Leetcode_CodeDownloader | cba23e3ec85b24e14fdf856e0e7eefb2c95644eb | [
"Apache-2.0"
] | 3 | 2015-10-20T13:05:18.000Z | 2020-07-27T19:45:58.000Z | Leetcode.py | SakuraSa/Leetcode_CodeDownloader | cba23e3ec85b24e14fdf856e0e7eefb2c95644eb | [
"Apache-2.0"
] | null | null | null | Leetcode.py | SakuraSa/Leetcode_CodeDownloader | cba23e3ec85b24e14fdf856e0e7eefb2c95644eb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#coding=utf-8
import os
import re
import requests
import datetime
import BeautifulSoup
#url requests setting
host_url = 'https://oj.leetcode.com'
login_url = 'https://oj.leetcode.com/accounts/login/'
question_list_url = 'https://oj.leetcode.com/problems/'
code_base_url = 'https://oj.leetcode.com/submissions/detail/%s/'
code_list_base_url = 'https://oj.leetcode.com/submissions/%d/'
github_login_url = 'https://oj.leetcode.com/accounts/github/login/'
code_regex = re.compile("storage\.put\('(python|cpp|java)', '([^']+)'\);")
leetcode_request_header = {
'Host': 'oj.leetcode.com',
'Origin': 'https://oj.leetcode.com',
'Referer': 'https://oj.leetcode.com/accounts/login/'
}
github_request_header = {
'Host': 'github.com',
'Origin': 'https://github.com',
'Referer': 'https://github.com/'
}
#code setting
ext_dic = {'python': '.py', 'cpp': '.cpp', 'java': '.java'}
comment_char_dic = {'python': '#', 'cpp': '//', 'java': '//'}
class LeetcodeDownloader(object):
def __init__(self, proxies=None, code_path='codes/', output_encoding='utf-8', session=None):
self.proxies = proxies or {}
self.code_path = code_path
self.output_encoding = output_encoding
self.session = session or requests.Session()
self.session.proxies = self.proxies
self.username = self.password = ''
def login(self, username, password):
self.username = username
self.password = password
login_page = self.session.get(login_url)
soup = BeautifulSoup.BeautifulSoup(login_page.text)
secret_input = soup.find('form').find('input', type='hidden')
payload = dict(
login=self.username,
password=self.password,
)
payload[secret_input['name']] = secret_input['value']
self.session.post(login_url, data=payload, headers=leetcode_request_header)
return self.is_logged_in
@property
def is_logged_in(self):
return bool(self.session.cookies.get("PHPSESSID", None))
def login_from_github(self, username, password):
self.username = username
self.password = password
leetcode_github_login_page = self.session.get('https://github.com/login')
soup = BeautifulSoup.BeautifulSoup(leetcode_github_login_page.text)
post_div = soup.find('div', id='login')
github_post_url = 'https://github.com/session'
payload = dict()
for ip in post_div.findAll('input'):
value = ip.get('value', None)
if value:
payload[ip['name']] = value
payload['login'], payload['password'] = username, password
self.session.post(github_post_url, data=payload, headers=github_request_header)
if self.session.cookies['logged_in'] != 'yes':
return False
rsp = self.session.get(github_login_url)
return rsp.status_code == 200
def get_questions(self):
rsp = self.session.get(question_list_url)
soup = BeautifulSoup.BeautifulSoup(rsp.text)
question_table = soup.find('table', id='problemList')
question_table_body = question_table.find('tbody')
for table_row in question_table_body.findAll('tr'):
table_data = table_row.findAll('td')
status = table_data[0].find('span')['class']
name = table_data[1].find('a').text
url = table_data[1].find('a')['href']
date = datetime.datetime.strptime(table_data[2].text, '%Y-%m-%d')
per = float(table_data[3].text.strip('%'))
yield dict(
status=status,
name=name,
url=url,
date=date,
per=per
)
def get_question_description(self, url):
rsp = self.session.get(url)
soup = BeautifulSoup.BeautifulSoup(rsp.text)
name = soup.find("h3").text
accepted_count = int(soup.find("span", attrs={"class": "total-ac text-info"}).find("strong").text)
submission_count = int(soup.find("span", attrs={"class": "total-submit text-info"}).find("strong").text)
def transform(div):
lst = []
for item in div:
if isinstance(item, BeautifulSoup.NavigableString):
lst.append(item)
elif isinstance(item, BeautifulSoup.Tag):
if item.name == "p":
lst.append("%s\n" % transform(item))
elif item.name == "b":
lst.append("###%s###" % transform(item))
elif item.name == "a":
lst.append("[%s](%s)" % (transform(item), item["href"]))
elif item.name == "code":
lst.append("`%s`" % transform(item))
elif item.name == "pre":
lst.append("```%s```" % transform(item))
elif item.name == "ul":
lst.append(transform(item))
elif item.name == "div":
lst.append(transform(item))
elif item.name == "li":
lst.append("* %s" % transform(item))
elif item.name == "br":
lst.append("\n")
else:
lst.append(item.text)
return "".join(lst)
description = transform(soup.find("div", attrs={"class": "question-content"}))
return {
'name': name,
'accepted_count': accepted_count,
'submission_count': submission_count,
'description': description.replace("\r", "")
}
def code(self, code_id):
code_url = code_base_url % code_id
rsp = self.session.get(code_url)
match = code_regex.search(rsp.text)
return match.group(2).decode('raw_unicode_escape')
def page_code(self, page_index=0):
code_url = code_list_base_url % page_index
rsp = self.session.get(code_url)
soup = BeautifulSoup.BeautifulSoup(rsp.text)
table = soup.find('table', id='result_testcases')
if table is None:
return []
table_body = table.find('tbody')
number_reg = re.compile('\d+')
lst = list()
for table_row in table_body.findAll('tr'):
table_data = table_row.findAll('td')
name = table_data[1].find('a').text
questions_url = host_url + table_data[1].find('a')['href']
status = table_data[2].find('strong').text
code_id = int(number_reg.search(table_data[2].find('a')['href']).group(0))
runtime = table_data[3].text.strip()
lang = table_data[4].text
file_name = "%s-%s" % (status, code_id)
file_ext = ext_dic.get(lang, '.txt')
file_path = os.path.join(self.code_path, name)
file_full_name = os.path.join(file_path, file_name + file_ext)
exists = os.path.exists(file_full_name)
lst.append(dict(
name=name,
questions_url=questions_url,
status=status,
code_id=code_id,
runtime=runtime,
lang=lang,
exists=exists
))
return lst
def page_code_all(self):
page_index = 0
while 1:
lst = self.page_code(page_index)
if lst:
for data in lst:
yield data
else:
break
page_index += 1
def save_code(self, table_data_list):
file_path = os.path.join(self.code_path, table_data_list['name'])
if not os.path.exists(file_path):
os.makedirs(file_path)
file_name = "%s-%s" % (table_data_list['status'], table_data_list['code_id'])
file_ext = ext_dic.get(table_data_list['lang'], '.txt')
file_full_name = os.path.join(file_path, file_name + file_ext)
exists = os.path.exists(file_full_name)
if not exists:
comment_char = comment_char_dic.get(table_data_list['lang'], '//')
description = self.get_question_description(table_data_list['questions_url'])
with open(file_full_name, 'w') as file_handle:
file_handle.write(comment_char + 'Author : %s\n' % self.username)
file_handle.write(comment_char + 'Question : %s\n' % table_data_list['name'])
file_handle.write(comment_char + 'Link : %s\n' % table_data_list['questions_url'])
file_handle.write(comment_char + 'Language : %s\n' % table_data_list['lang'])
file_handle.write(comment_char + 'Status : %s\n' % table_data_list['status'])
file_handle.write(comment_char + 'Run Time : %s\n' % table_data_list['runtime'])
file_handle.write(comment_char + 'Description: \n')
for line in description["description"].split("\n"):
if line.strip():
file_handle.write(comment_char)
file_handle.write(line.encode(self.output_encoding))
file_handle.write("\n")
file_handle.write('\n')
file_handle.write(comment_char + 'Code : \n')
file_handle.write(self.code(table_data_list['code_id'])
.encode(self.output_encoding)
.replace('\r', ''))
return {
"file_full_name": file_full_name,
"exists": exists,
}
def get_and_save_all_codes(self):
for table_data_list in self.page_code_all():
result = dict(table_data_list)
code_result = self.save_code(table_data_list)
result['path'] = code_result["file_full_name"]
result['exists'] = code_result["exists"]
yield result
if __name__ == '__main__':
#login form leetcode account
USERNAME = 'YOUR USERNAME'
PASSWORD = 'YOUR PASSWORD'
#login form github account
#downloader.login_from_github(username='YOUR USERNAME', password='YOUR PASSWORD')
from taskbar import TaskBar
downloader = LeetcodeDownloader()
print "Logging..."
if downloader.login(username=USERNAME, password=PASSWORD):
print "ok, logged in."
else:
print "error, logging failed."
exit()
def func(row):
result = dict(row)
code_result = downloader.save_code(row)
result['path'] = code_result["file_full_name"]
result['exists'] = code_result["exists"]
return result
task_bar = TaskBar(40)
print "Loading submissions..."
task_param_list = task_bar.processing(
task=lambda: list((func, ([table_data_list], {})) for table_data_list in downloader.page_code_all()),
title=" Loading submissions...",
show_total=False
)
print "ok, %s submissions found in %.2fs." % (len(task_param_list), task_bar.time_cost)
print "Downloading submissions..."
task_bar.do_task(task_param_list)
| 41.464945 | 112 | 0.568924 | 1,306 | 11,237 | 4.682236 | 0.169985 | 0.045626 | 0.038267 | 0.032379 | 0.354211 | 0.23843 | 0.185936 | 0.122322 | 0.078168 | 0.058545 | 0 | 0.003295 | 0.297766 | 11,237 | 270 | 113 | 41.618519 | 0.771639 | 0.017442 | 0 | 0.133333 | 0 | 0 | 0.131593 | 0.003081 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.0375 | 0.025 | null | null | 0.025 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4dcff13d4501aa2f3c3df9d643bf2c4ada7cfd82 | 335 | py | Python | src/test/resources/script/jython/testReturnString.py | adchilds/jythonutil | 24e6b945cf7474358be1f43e0a72f37411289e39 | [
"CNRI-Jython"
] | 5 | 2016-02-05T19:44:57.000Z | 2017-05-26T10:26:29.000Z | src/test/resources/script/jython/testReturnString.py | adchilds/jythonutil | 24e6b945cf7474358be1f43e0a72f37411289e39 | [
"CNRI-Jython"
] | 1 | 2017-02-03T06:19:21.000Z | 2017-02-11T03:55:55.000Z | src/test/resources/script/jython/testReturnString.py | adchilds/jythonutil | 24e6b945cf7474358be1f43e0a72f37411289e39 | [
"CNRI-Jython"
] | null | null | null | import sys
if __name__ == '__main__':
# Set the defaults
a = ''
b = ''
# If arguments were passed to this script, use those
try:
a = sys.argv[1]
b = sys.argv[2]
except Exception:
pass
# Sets the result to the longer of the two Strings
result = a if len(a) > len(b) else b | 19.705882 | 56 | 0.552239 | 50 | 335 | 3.54 | 0.66 | 0.079096 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009259 | 0.355224 | 335 | 17 | 57 | 19.705882 | 0.810185 | 0.346269 | 0 | 0 | 0 | 0 | 0.037037 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.1 | 0.1 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
4dd83f2bdedcce578bc2f4f15b92a56d3b2455a9 | 3,345 | py | Python | test/test_cfg/read_grammar.py | wannaphong/pycfg | ffa67958ed1c3deb73cadb3969ac086336fb1269 | [
"MIT"
] | 8 | 2017-12-18T08:51:27.000Z | 2020-11-26T02:21:06.000Z | test/test_cfg/read_grammar.py | wannaphong/pycfg | ffa67958ed1c3deb73cadb3969ac086336fb1269 | [
"MIT"
] | 1 | 2020-01-09T15:41:09.000Z | 2020-01-09T15:41:09.000Z | test/test_cfg/read_grammar.py | wannaphong/pycfg | ffa67958ed1c3deb73cadb3969ac086336fb1269 | [
"MIT"
] | 6 | 2017-06-12T16:58:40.000Z | 2019-11-27T06:55:07.000Z | '''Read grammar specifications for test cases.'''
import re
import sys
from pprint import pprint
from cfg.core import ContextFreeGrammar, Terminal, Nonterminal, Marker
from cfg.table import END_MARKER, ParseTableNormalForm
class GrammarTestCase(object):
'''Contains a CFG and optionally a parse table.'''
def __init__(self, sections, filename):
self._sections = sections
self.filename = filename
def __getattr__(self, name):
return self._sections.get(name)
def __str__(self):
return self.filename + '\n' + '\n'.join(self._section_strs())
def _section_strs(self):
for k, v in self._sections.iteritems():
yield '''\
==%s==
%s
''' % (k.upper(), v)
label_re = re.compile('^\s*==\s*(.*?)\s*==\s*$')
comment_re = re.compile('^([^#]*)')
shift_re = re.compile('^sh(\d+)$')
reduce_re = re.compile('^re(\d+)$')
def read_test_case(finname):
'''Read a grammar test case from a file.'''
label = 'grammar'
sections = {}
with open(finname, 'r') as fin:
for line in filter(None, map(lambda s: comment_re.match(s).group(1).strip(), fin)):
m = label_re.match(line)
if m:
label = m.group(1).lower()
else:
sections.setdefault(label, []).append(line)
def retype(s, t):
if s in sections:
sections[s] = t(sections[s])
retype('grammar', read_grammar)
def retype_table(lines):
return read_table(lines, sections['grammar'])
retype('table', retype_table)
retype('tablea', retype_table)
retype('tableb', retype_table)
retype('result', read_bool)
return GrammarTestCase(sections, finname)
def read_grammar(lines):
return ContextFreeGrammar('\n'.join(lines))
def read_table(lines, grammar):
terminals = grammar.terminals
nonterminals = grammar.nonterminals
T = ParseTableNormalForm()
for line in lines:
left, right = line.split('=')
q, X = left.split(',')
q = int(q)
is_terminal = False
if Terminal(X) in terminals:
is_terminal = True
X = Terminal(X)
elif Marker(X) == END_MARKER:
is_terminal = True
X = END_MARKER
if is_terminal:
actions = right.split(',')
for a in actions:
m = shift_re.match(a)
if m:
T.set_gotoshift(q, X, int(m.group(1)))
else:
m = reduce_re.match(a)
if m:
T.add_reduction(q, X, int(m.group(1)))
elif a == 'acc':
T.set_accept(q, X)
else:
raise ValueError('cell value %r not recognized' % a)
elif Nonterminal(X) in nonterminals:
T.set_gotoshift(q, Nonterminal(X), int(right))
else:
raise ValueError('a symbol in the table is not in the grammar at %s,%s' % (q, X))
return T
def read_bool(lines):
s = '\n'.join(lines).strip().lower()
if s == 'true': return True
elif s == 'false': return False
else: return bool(int(s))
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.stderr.write('Usage: read_grammar.py <file>\n')
sys.exit(1)
print read_test_case(sys.argv[1])
| 31.261682 | 93 | 0.564425 | 423 | 3,345 | 4.319149 | 0.293144 | 0.005473 | 0.024083 | 0.01642 | 0.026273 | 0.026273 | 0 | 0 | 0 | 0 | 0 | 0.002983 | 0.298356 | 3,345 | 106 | 94 | 31.556604 | 0.775458 | 0 | 0 | 0.1 | 0 | 0 | 0.077452 | 0.007183 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.055556 | null | null | 0.022222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4ddab5e3d9aa744300fde8fef5e302f340725170 | 44,868 | py | Python | scripts/venv/lib/python2.7/site-packages/cogent/core/entity.py | sauloal/cnidaria | fe6f8c8dfed86d39c80f2804a753c05bb2e485b4 | [
"MIT"
] | 3 | 2015-11-20T08:44:42.000Z | 2016-12-14T01:40:03.000Z | scripts/venv/lib/python2.7/site-packages/cogent/core/entity.py | sauloal/cnidaria | fe6f8c8dfed86d39c80f2804a753c05bb2e485b4 | [
"MIT"
] | 1 | 2017-09-04T14:04:32.000Z | 2020-05-26T19:04:00.000Z | scripts/venv/lib/python2.7/site-packages/cogent/core/entity.py | sauloal/cnidaria | fe6f8c8dfed86d39c80f2804a753c05bb2e485b4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Provides the entities, the building blocks of the SMRCA hierachy
representation of a macromolecular structure.
The MultiEntity class is a special Entity class to hold multiple instances of
other entities. All Entities apart from the Atom can hold others and inherit
from the MultiEntity. The Entity is the most basic class to deal with
structural and molecular data. Do not use it directly since some functions
depend on methods provided by sub-classes. Classes inheriting from MultiEntity
have to provide some attributes during init e.g: self.level = a valid string
inside the SMCRA hierarchy). Holders of entities are like normal MultiEntities,
but are temporary and are outside the parent-children axes.
"""
import cogent
from cogent.core.annotation import SimpleVariable
from numpy import (sqrt, arctan2, power, array, mean, sum)
from cogent.data.protein_properties import AA_NAMES, AA_ATOM_BACKBONE_ORDER, \
AA_ATOM_REMOTE_ORDER, AREAIMOL_VDW_RADII, \
DEFAULT_AREAIMOL_VDW_RADIUS, AA_NAMES_3to1
from cogent.data.ligand_properties import HOH_NAMES, LIGAND_AREAIMOL_VDW_RADII
from operator import itemgetter, gt, ge, lt, le, eq, ne, or_, and_, contains, \
is_, is_not
from collections import defaultdict
from itertools import izip
from copy import copy, deepcopy
__author__ = "Marcin Cieslik"
__copyright__ = "Copyright 2007-2012, The Cogent Project"
__credits__ = ["Marcin Cieslik"]
__license__ = "GPL"
__version__ = "1.5.3"
__maintainer__ = "Marcin Cieslik"
__email__ = "mpc4p@virginia.edu"
__status__ = "Development"
ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_ '
HIERARCHY = ['H', 'S', 'M', 'C', 'R', 'A']
AREAIMOL_VDW_RADII.update(LIGAND_AREAIMOL_VDW_RADII)
# error while creating a structure (non-recoverable error)
class ConstructionError(Exception):
"""Cannot unambiguously create a structure."""
pass
# warning while creating a structure
# (something wrong with the input, but recoverable)
class ConstructionWarning(Exception):
"""Input violates some construction rules (contiguity)."""
pass
def sort_id_list(id_list, sort_tuple):
"""Sorts lists of id tuples. The order is defined by the PDB file
specification."""
(hol_loc, str_loc, mod_loc, chn_loc, res_loc, at_loc) = sort_tuple
# even a simple id is a tuple, this makes sorting general
def space_last(ch_id1, ch_id2): # this is for chain sorting
if ch_id1 == ' ' and ch_id2 != ' ':
return 1
if ch_id2 == ' ' and ch_id1 != ' ':
return - 1
if ch_id1 == ' ' and ch_id2 == ' ':
return 0
return cmp(ch_id1, ch_id2)
def atom(at_id1, at_id2):
# hydrogen atoms come last
is_hydrogen1 = (at_id1[0] == 'H')
is_hydrogen2 = (at_id2[0] == 'H')
diff = cmp(is_hydrogen1, is_hydrogen2)
# back bone come first
if not diff:
order1 = AA_ATOM_BACKBONE_ORDER.get(at_id1)
order2 = AA_ATOM_BACKBONE_ORDER.get(at_id2)
diff = cmp(order2, order1)
# (B)eta, (D)elta, (G)amma, .... o(X)t
if not diff:
remote1 = AA_ATOM_REMOTE_ORDER.get(at_id1[1:2])
remote2 = AA_ATOM_REMOTE_ORDER.get(at_id2[1:2])
diff = cmp(remote1, remote2)
# branching comes last
if not diff:
diff = cmp(at_id1[2:4], at_id2[2:4])
return diff
# SE vs CE - selenium first
if not diff:
alpha1 = ALPHABET.index(at_id1[0:1])
alpha2 = ALPHABET.index(at_id2[0:1])
diff = cmp(alpha2, alpha1)
def residue(res_id1, res_id2):
r1, r2 = 1, 1
if res_id1 in AA_NAMES: r1 = 2
if res_id1 in HOH_NAMES: r1 = 0
if res_id2 in AA_NAMES: r2 = 2
if res_id2 in HOH_NAMES: r2 = 0
if r1 is r2:
return cmp(res_id1, res_id2)
else:
return cmp(r2, r1)
# this assumes that the implementation of sorting is stable.
# does it work for others then cPython.
if res_loc or res_loc is 0:
id_list.sort(key=itemgetter(res_loc), cmp=lambda x, y: residue(x[0], y[0])) # by res_name
if at_loc or at_loc is 0:
id_list.sort(key=itemgetter(at_loc), cmp=lambda x, y: space_last(x[1], y[1])) # by alt_loc
if at_loc or at_loc is 0:
id_list.sort(key=itemgetter(at_loc), cmp=lambda x, y: atom(x[0], y[0])) # by at_id
if res_loc or res_loc is 0:
id_list.sort(key=itemgetter(res_loc), cmp=lambda x, y: cmp(x[2], y[2])) # by res_ic
if res_loc or res_loc is 0:
id_list.sort(key=itemgetter(res_loc), cmp=lambda x, y: cmp(x[1], y[1])) # by res_id
if chn_loc or chn_loc is 0:
id_list.sort(key=itemgetter(chn_loc), cmp=space_last) # by chain
if mod_loc or mod_loc is 0:
id_list.sort(key=itemgetter(mod_loc)) # by model
if str_loc or str_loc is 0:
id_list.sort(key=itemgetter(str_loc)) # by structure
return id_list
def merge(dicts):
"""Merges multiple dictionaries into a new one."""
master_dict = {}
for dict_ in dicts:
master_dict.update(dict_)
return master_dict
def unique(lists):
"""Merges multiple iterables into a unique sorted tuple (sorted set)."""
master_set = set()
for set_ in lists:
master_set.update(set_)
return tuple(sorted(master_set))
class Entity(dict):
"""Container object all entities inherit from it. Inherits from dict."""
def __init__(self, id, name=None, *args):
# This class has to be sub-classed!
# the masked attribute has to be set before the __init__ of an Entity
# because during __setstate__, __getstate__ sub-entities are iterated
# by .values(), which relies on the attribute masked. to decide which
# children should be omitted.
self.masked = False
self.parent = None # mandatory parent attribute
self.modified = True # modified on creation
self.id = (id,) # ids are non-zero lenght tuples
self.name = (name or id) # prefer name over duplicate id
self.xtra = {} # mandatory xtra dict attribute
# Dictionary that keeps additional properties
dict.__init__(self, *args) # finish init as dictionary
def __copy__(self):
return deepcopy(self)
def __deepcopy__(self, memo):
new_state = self.__getstate__()
new_instance = self.__new__(type(self))
new_instance.__setstate__(new_state)
return new_instance
def __getstate__(self):
new_state = copy(self.__dict__) # shallow
new_state['parent'] = None
return new_state
def __setstate__(self, new_state):
self.__dict__.update(new_state)
def __repr__(self):
"""Default representation."""
# mandatory getLevel from sub-class
return "<Entity id=%s, level=%s>" % (self.getId(), self.getLevel())
def __sub__(self, entity):
"""Override "-" as Euclidean distance between coordinates."""
return sqrt(sum(pow(self.coords - entity.coords, 2)))
def _setId(self, id):
self.name = id[0]
def _getId(self):
return (self.name,)
def getId(self):
"""Return the id."""
return self._getId()
def getFull_id(self):
"""Return the full id."""
parent = self.getParent()
if parent:
full_id = parent.getFull_id()
else:
full_id = () # we create a tuple on the top
full_id = full_id + self.getId() # merge tuples from the left
return full_id
def setId(self, id_=None):
"""Set the id. Calls the ``_setId`` method."""
if (id_ and id_ != self.id) or (not id_ and (self.getId() != self.id)):
self.id = (id_ or self.getId())
self.setModified(True, True)
self._setId(self.id)
if self.parent:
self.parent.updateIds()
def _setMasked(self, masked, force=False):
if masked != self.masked or force:
self.masked = masked # mask or unmask
self.setModified(True, False) # set parents as modified
def setMasked(self, *args, **kwargs):
"""Set masked flag (``masked``) ``True``."""
self._setMasked(True, *args, **kwargs)
def setUnmasked(self, *args, **kwargs):
"""Set masked flag (``masked``) ``False``."""
self._setMasked(False, *args, **kwargs)
def setModified(self, up=True, down=False):
"""Set modified flag (``modified``) ``True``."""
self.modified = True
if up and self.parent:
self.parent.setModified(True, False)
def setUnmodified(self, up=False, down=False):
"""Set modified flag (``modified``) ``False``."""
self.modified = False
if up and self.parent:
self.parent.setUnmodified(True, False)
def setParent(self, entity):
"""Set the parent ``Entity`` and adds oneself as the child."""
if self.parent != entity:
# delete old parent
self.delParent()
# add new parent
self.parent = entity
self.parent.addChild(self)
self.setModified(False, True)
def delParent(self):
"""Detach mutually from the parent. Sets both child and parent modified
flags (``modified``) as ``True``."""
if self.parent:
self.parent.pop(self.getId())
self.parent.setModified(True, False)
self.parent = None
self.setModified(False, True)
def getModified(self):
"""Return value of the modified flag (``modified``)."""
return self.modified
def getMasked(self):
"""Return value of the masked flag (``masked``)."""
return self.masked
def setLevel(self, level):
"""Set level (``level``)."""
self.level = level
def getLevel(self):
"""Return level (``level``)in the hierarchy."""
return self.level
def setName(self, name):
"""Set name."""
self.name = name
self.setId()
def getName(self):
"""Return name."""
return self.name
def getParent(self, level=None):
"""Return the parent ``Entity`` instance."""
if not level:
return self.parent
elif level == self.level:
return self
return self.parent.getParent(level)
def move(self, origin):
"""Subtract the origin coordinates from the coordintats (``coords``)."""
self.coords = self.coords - origin
def setCoords(self, coords):
"""Set the entity coordinates. Coordinates should be a
``numpy.array``."""
self.coords = coords
def getCoords(self):
"""Get the entity coordinates."""
return self.coords
def getScoords(self):
"""Return spherical (r, theta, phi) coordinates."""
x, y, z = self.coords
x2, y2, z2 = power(self.coords, 2)
scoords = array([sqrt(x2 + y2 + z2), \
arctan2(sqrt(x2 + y2), z), \
arctan2(y, x)])
return scoords
def getCcoords(self):
"""Return redundant, polar, clustering-coordinates on the unit-sphere.
This is only useful for clustering."""
x, y, z = self.coords
x2, y2, z2 = power(self.coords, 2)
ccoords = array([arctan2(sqrt(y2 + z2), x), \
arctan2(sqrt(x2 + z2), y), \
arctan2(sqrt(x2 + y2), z)
])
return ccoords
def setScoords(self):
"""Set ``entity.scoords``, see: getScoords."""
self.scoords = self.getScoords()
def setCcoords(self):
"""Set ``entity.ccoords``, see: getCcoords."""
self.ccoords = self.getCcoords()
class MultiEntity(Entity):
"""The ``MultiEntity`` contains other ``Entity`` or ``MultiEntity``
instances."""
def __init__(self, long_id, short_id=None, *args):
self.index = HIERARCHY.index(self.level) # index corresponding to the hierarchy level
self.table = dict([(level, {}) for level in HIERARCHY[self.index + 1:]]) # empty table
Entity.__init__(self, long_id, short_id, *args)
def __repr__(self):
id_ = self.getId()
return "<MultiEntity id=%s, holding=%s>" % (id_, len(self))
def _link(self):
"""Recursively adds a parent pointer to children."""
for child in self.itervalues(unmask=True):
child.parent = self
try:
child._link()
except AttributeError:
pass
def _unlink(self):
"""Recursively deletes the parent pointer from children."""
for child in self.itervalues(unmask=True):
child.parent = None
try:
child._unlink()
except AttributeError:
pass
def __getstate__(self):
new_dict = copy(self.__dict__) # shallow copy
new_dict['parent'] = None # remove recursion
new_children = []
for child in self.itervalues(unmask=True):
new_child_instance = deepcopy(child)
new_children.append(new_child_instance)
return (new_children, new_dict)
def __setstate__(self, new_state):
new_children, new_dict = new_state
self.__dict__.update(new_dict)
for child in new_children:
self.addChild(child)
def __copy__(self):
return deepcopy(self)
def __deepcopy__(self, memo):
new_state = self.__getstate__()
new_instance = self.__new__(type(self))
new_instance.__setstate__(new_state)
return new_instance
def __iter__(self):
return self.itervalues()
def setSort_tuple(self, sort_tuple=None):
"""Set the ``sort_tuple attribute``. The ``sort_tuple`` is a tuple
needed by the ``sort_id_list`` function to correctly sort items within
entities."""
if sort_tuple:
self.sort_tuple = sort_tuple
else: # making the sort tuple, ugly, uggly, uaughhlly ble
sort_tuple = [None, None, None, None, None, None]
key_lenght = len(self.keys()[0])
stop_i = self.index + 2 # next level, open right [)
start_i = stop_i - key_lenght # before all nones
indexes = range(start_i, stop_i) # Nones to change
for value, index in enumerate(indexes):
sort_tuple[index] = value
self.sort_tuple = sort_tuple
def getSort_tuple(self):
"""Return the ``sort_tuple`` attribute. If not set calls the
``setSort_tuple`` method first. See: ``setSort_tuple``."""
if not hasattr(self, 'sort_tuple'):
self.setSort_tuple()
return self.sort_tuple
def itervalues(self, unmask=False):
return (v for v in super(MultiEntity, self).itervalues() if not v.masked or unmask)
def iteritems(self, unmask=False):
return ((k, v) for k, v in super(MultiEntity, self).iteritems() if not v.masked or unmask)
def iterkeys(self, unmask=False):
return (k for k, v in super(MultiEntity, self).iteritems() if not v.masked or unmask)
def values(self, *args, **kwargs):
return list(self.itervalues(*args, **kwargs))
def items(self, *args, **kwargs):
return list(self.iteritems(*args, **kwargs))
def keys(self, *args, **kwargs):
return list(self.iterkeys(*args, **kwargs))
def __contains__(self, key, *args, **kwargs):
return key in self.keys(*args, **kwargs)
def sortedkeys(self, *args, **kwargs):
list_ = sort_id_list(self.keys(*args, **kwargs), self.getSort_tuple())
return list_
def sortedvalues(self, *args, **kwargs):
values = [self[i] for i in self.sortedkeys(*args, **kwargs)]
return values
def sorteditems(self, *args, **kwargs):
items = [(i, self[i]) for i in self.sortedkeys()]
return items
def _setMasked(self, masked, force=False):
"""Set the masked flag (``masked``) recursively. If forced proceed even
if the flag is already set correctly."""
if masked != self.masked or force: # the second condition is when
if masked: # an entity has all children masked
# we have to mask children # but is not masked itself
for child in self.itervalues(): # only unmasked children
child.setMasked()
child.setModified(False, False)
else:
# we have to unmask children
for child in self.itervalues(unmask=True):
if child.masked or force: # only masked children
child.setUnmasked(force=force)
child.setModified(False, False)
self.masked = masked
self.setModified(True, False) # set parents as modified
def setModified(self, up=True, down=True):
"""Set the modified flag (``modified``) ``True``. If down proceeds
recursively for all children. If up proceeds recursively for all
parents."""
self.modified = True
if up and self.parent:
self.parent.setModified(True, False)
if down:
for child in self.itervalues(unmask=True):
child.setModified(False, True)
def setUnmodified(self, up=False, down=False):
"""Set the modified (``modified``) flag ``False``. If down proceeds
recursively for all children. If up proceeds recursively for all
parents."""
self.modified = False
if up and self.parent:
self.parent.setUnmodified(True, False)
if down:
for child in self.itervalues(unmask=True):
child.setUnmodified(False, True)
def _initChild(self, child):
"""Initialize a child (during construction)."""
child.parent = self
self[child.getId()] = child
def addChild(self, child):
"""Add a child."""
child.setParent(self)
child_id = child.getId()
self[child_id] = child
self.setModified(True, False)
def delChild(self, child_id):
"""Remove a child."""
child = self.get(child_id)
if child:
child.delParent()
self.setModified(True, False)
def getChildren(self, ids=None, **kwargs):
"""Return a copy of the list of children."""
if ids:
children = []
for (id_, child) in self.iteritems(**kwargs):
if id_ in ids:
children.append(child)
else:
children = self.values(**kwargs)
return children
def _setTable(self, entity):
"""Recursive helper method for ``entity.setTable``."""
for e in entity.itervalues():
self.table[e.getLevel()].update({e.getFull_id():e})
self._setTable(e)
def setTable(self, force=True, unmodify=True):
"""Populate the children table (``table``) recursively with all children
grouped into hierarchy levels. If forced is ``True`` the table will be
updated even if the ``Entity`` instance is not modified. If unmodify is
``True`` the ``Entity`` modified flag (``modified``) will be set
``False`` afterwards."""
if self.modified or force:
# a table is accurate as long as the contents of a dictionary do not
# change.
self.delTable()
self._setTable(self)
if unmodify:
self.setUnmodified()
def delTable(self):
"""Delete all children from the children-table (``table``). This does
not modify the hierarchy."""
self.table = dict([(level, {}) for level in HIERARCHY[self.index + 1:]])
self.modified = True
def getTable(self, level):
"""Return children of given level from the children-table
(``table``)."""
return self.table[level]
def updateIds(self):
"""Update self with children ids."""
ids = []
for (id_, child) in self.iteritems():
new_id = child.getId()
if id_ != new_id:
ids.append((id_, new_id))
for (old_id, new_id) in ids:
child = self.pop(old_id)
self.update(((new_id, child),))
def getData(self, attr, xtra=False, method=False, forgiving=True, sorted=False):
"""Get data from children attributes, methods and xtra dicts as a list.
If is ``True`` forgiving remove ``None`` values from the output.
``Nones`` are place-holders if a child does not have the requested data.
If xtra is True the xtra dictionary (``xtra``) will be searched, if
method is ``True`` the child attribute will be called."""
values = self.sortedvalues() if sorted else self.values()
if xtra:
# looking inside the xtra of children
data = [child.xtra.get(attr) for child in values] # could get None
else:
# looking at attributes
data = []
for child in values:
try:
if not method:
data.append(getattr(child, attr))
else:
data.append(getattr(child, attr)())
except AttributeError: #
data.append(None)
if forgiving: # remove Nones
data = [point for point in data if point is not None]
return data
def propagateData(self, function, level, attr, **kwargs):
"""Propagate data from child level to this ``Entity`` instance. The
function defines how children data should be transformed to become
the parents data e.g. summed."""
if self.index <= HIERARCHY.index(level) - 2:
for child in self.itervalues():
child.propagateData(function, level, attr, **kwargs)
datas = self.getData(attr, **kwargs)
if isinstance(function, basestring):
function = eval(function)
transformed_datas = function(datas)
if kwargs.get('xtra'):
self.xtra[attr] = transformed_datas
else:
setattr(self, attr, transformed_datas)
return transformed_datas
def countChildren(self, *args, **kwargs):
"""Count children based on ``getData``. Additional arguments and
keyworded arguments are passed to the ``getData`` method."""
data = self.getData(*args, **kwargs)
children = defaultdict(int) # by default returns 0
for d in data:
children[d] += 1
return children
def freqChildren(self, *args, **kwargs):
"""Frequency of children based on ``countChildren``. Additional
arguments and keyworded arguments are passed to the ``countChildren``
method."""
children_count = self.countChildren(*args, **kwargs)
lenght = float(len(self)) # it could be len(children_count)?
for (key_, value_) in children_count.iteritems():
children_count[key_] = value_ / lenght
return children_count
def splitChildren(self, *args, **kwargs):
"""Splits children into groups children based on ``getData``.
Additional arguments and keyworded arguments are passed to the
``getData`` method."""
kwargs['forgiving'] = False
data = self.getData(*args, **kwargs)
clusters = defaultdict(dict) # by default returns {}
for (key, (id_, child)) in izip(data, self.iteritems()):
clusters[key].update({id_:child})
return clusters
def selectChildren(self, value, operator, *args, **kwargs):
"""Generic method to select children, based on ``getData``.
Returns a dictionary of children indexed by ids. Compares the data item
for each child using the operator name e.g. "eq" and a value e.g.
"H_HOH". Additional arguments and keyworded arguments are passed to the
``getData`` method."""
kwargs['forgiving'] = False
data = self.getData(*args, **kwargs)
children = {}
for (got, (id_, child)) in izip(data, self.iteritems()):
if isinstance(operator, basestring):
operator = eval(operator)
if operator(value, got):
children.update({id_:child})
return children
def ornamentChildren(self, *args, **kwargs):
"""Return a list of (ornament, (id, child)) tuples, based on
``getData``. Useful for sorting see: Schwartzian transform.
Forgiving is set False. Additional arguments and keyworded arguments are
passed to the ``getData`` method."""
kwargs['forgiving'] = False
data = self.getData(*args, **kwargs)
children = []
for (got, (id_, child)) in izip(data, self.iteritems()):
children.append((got, (id_, child)))
return children
def ornamentdictChildren(self, *args, **kwargs):
"""Return a dictionary of ornaments indexed by child ids, based on
``getData``. Forgiving is set False. Additional arguments and
keyworded arguments are passed to the ``getData`` method."""
kwargs['forgiving'] = False
data = self.getData(*args, **kwargs)
propertydict = {}
for (got, id_) in izip(data, self.iterkeys()):
propertydict.update(((id_, got),))
return propertydict
def stripChildren(self, *args, **kwargs):
"""Strips children based on selection criteria. See:
``selectChildren``. Additional arguments and keyworded arguments are
passed to the ``selectChildren`` method."""
children_ids = self.selectChildren(*args, **kwargs).keys()
for id_ in children_ids:
self.delChild(id_)
def maskChildren(self, *args, **kwargs):
"""Mask children based on selection criteria. See: ``selectChildren``.
Additional arguments and keyworded arguments are passed to the
``selectChildren`` method."""
children = self.selectChildren(*args, **kwargs).itervalues()
for child in children:
child.setMasked() # child.setModified child.parent.setModified
def unmaskChildren(self, *args, **kwargs):
"""Unmask children based on selection criteria. See:
``selectChildren``. Additional arguments and keyworded arguments are
passed to the ``selectChildren`` method."""
children = self.selectChildren(*args, **kwargs).itervalues()
for child in children:
child.setUnmasked() # child.setModified child.parent.setModified
def moveRecursively(self, origin):
"""Move ``Entity`` instance recursively to the origin."""
for child in self.itervalues():
try:
child.moveRecursively(origin)
except:
# Atoms do not have this
child.move(origin)
pass
self.setCoords()
def setCoordsRecursively(self):
"""Set coordinates (``coords``) recursively. Useful if any child had its
coordinates changed."""
for child in self.itervalues():
try:
child.setCoordsRecursively()
except:
#Atoms do not have this
pass
self.setCoords()
def setCoords(self, *args, **kwargs):
"""Set coordinates (``coords``) as a centroid of children coordinates.
A subset of children can be selected for the calculation. See:
``Entity.selectChildren``. Additional arguments and keyworded arguments
are passed to the ``getData`` method."""
# select only some children
if args or kwargs:
children = self.selectChildren(*args, **kwargs).values()
else:
children = self
coords = []
for child in children:
coords.append(child.getCoords())
self.coords = mean(coords, axis=0)
def getCoords(self):
"""Returns the current coordinates (``coords``). Raises an
``AttributeError`` if not set."""
try:
return self.coords
except AttributeError:
raise AttributeError, "Entity has coordinates not set."
def dispatch(self, method, *args, **kwargs):
"""Calls a method of all children with given arguments and keyworded
arguments."""
for child in self.itervalues():
getattr(child, method)(*args, **kwargs)
class Structure(MultiEntity):
"""The ``Structure`` instance contains ``Model`` instances."""
def __init__(self, id, *args, **kwargs):
self.level = 'S'
MultiEntity.__init__(self, id, *args, **kwargs)
def __repr__(self):
return '<Structure id=%s>' % self.getId()
def removeAltmodels(self):
"""Remove all models with an id != 0"""
self.stripChildren((0,), 'ne', 'id', forgiving=False)
def getDict(self):
"""See: ``Entity.getDict``."""
return {'structure':self.getId()[0]}
class Model(MultiEntity):
"""The ``Model`` instance contains ``Chain`` instances."""
def __init__(self, id, *args, **kwargs):
self.level = 'M'
MultiEntity.__init__(self, id, *args, **kwargs)
def __repr__(self):
return "<Model id=%s>" % self.getId()
def getDict(self):
"""See: ``Entity.getDict``."""
try:
from_parent = self.parent.getDict()
except AttributeError:
# we are allowed to silence this becaus a structure id is not
# required to write a proper pdb line.
from_parent = {}
from_parent.update({'model':self.getId()[0]})
return from_parent
class Chain(MultiEntity):
"""The ``Chain`` instance contains ``Residue`` instances."""
def __init__(self, id, *args, **kwargs):
self.level = 'C'
MultiEntity.__init__(self, id, *args, **kwargs)
def __repr__(self):
return "<Chain id=%s>" % self.getId()
def removeHetero(self):
"""Remove residues with the hetero flag."""
self.stripChildren('H', 'eq', 'h_flag', forgiving=False)
def removeWater(self):
"""Remove water residues."""
self.stripChildren('H_HOH', 'eq', 'name', forgiving=False)
def residueCount(self):
"""Count residues based on ``name``."""
return self.countChildren('name')
def residueFreq(self):
"""Calculate residue frequency (based on ``name``)."""
return self.freqChildren('name')
def getSeq(self, moltype ='PROTEIN'):
"""Returns a Sequence object from the ordered residues.
The "seq_type" determines allowed residue names."""
if moltype == 'PROTEIN':
valid_names = AA_NAMES
moltype = cogent.PROTEIN
elif moltype == 'DNA':
raise NotImplementedError('The sequence type: %s is not implemented' % moltype)
elif moltype == 'RNA':
raise NotImplementedError('The sequence type: %s is not implemented' % moltype)
else:
raise ValueError('The \'seq_type\' is not supported.')
aa = ResidueHolder('aa', self.selectChildren(valid_names, contains, 'name'))
aa_noic = ResidueHolder('noic', aa.selectChildren(' ', eq, 'res_ic'))
raw_seq = []
full_ids = []
for res in aa_noic.sortedvalues():
raw_seq.append(AA_NAMES_3to1[res.name])
full_ids.append(res.getFull_id()[1:])
raw_seq = "".join(raw_seq)
seq = cogent.Sequence(moltype, raw_seq, self.getName())
seq.addAnnotation(SimpleVariable, 'entity_id', 'S_id', full_ids)
return seq
def getDict(self):
"""See: ``Entity.getDict``."""
from_parent = self.parent.getDict()
from_parent.update({'chain_id':self.getId()[0]})
return from_parent
class Residue(MultiEntity):
"""The ``Residue`` instance contains ``Atom`` instances."""
def __init__(self, res_long_id, h_flag, seg_id, *args, **kwargs):
self.level = 'R'
self.seg_id = seg_id
self.h_flag = h_flag
self.res_id = res_long_id[1] #ID number
self.res_ic = res_long_id[2] #ID long NAME
MultiEntity.__init__(self, res_long_id, res_long_id[0], *args, **kwargs)
def __repr__(self):
res_name, res_id, res_ic = self.getId()[0]
full_name = (res_name, res_id, res_ic)
return "<Residue %s resseq=%s icode=%s>" % full_name
def _getId(self):
"""Return the residue full id. ``(name, res_id, res_ic)``."""
return ((self.name, self.res_id, self.res_ic),)
def _setId(self, id):
"""Set the residue id ``res_id``, name ``name`` and insertion code
``res_ic`` from a full id."""
(self.name, self.res_id, self.res_ic) = id[0]
def removeHydrogens(self):
"""Remove hydrogen atoms."""
self.stripChildren(' H', 'eq', 'element', forgiving=False)
def getSeg_id(self):
"""Return the segment id."""
return self.seg_id
def setSeg_id(self, seg_id):
"""Set the segment id. This does not change the id."""
self.seg_id = seg_id
def getIc(self):
"""Return the insertion code."""
return self.res_ic
def setIc(self, res_ic):
"""Set the insertion code."""
self.res_ic = res_ic
self.setId()
def getRes_id(self):
"""Get the id."""
return self.res_id
def setRes_id(self, res_id):
"""Set the id."""
self.res_id = res_id
self.setId()
def getH_flag(self):
"""Return the hetero flag."""
return self.h_flag
def setH_flag(self, h_flag):
"""Sets the hetero flag. A valid flag is ' ' or 'H'. If 'H' the flag
becomes part of the residue name i.e. H_XXX."""
if not h_flag in (' ', 'H'):
raise AttributeError, "Only ' ' and 'H' hetero flags allowed."
if len(self.name) == 3:
self.name = "%s_%s" % (h_flag, self.name)
elif len(self.name) == 5:
self.name = "%s_%s" % (h_flag, self.name[2:])
else:
raise ValueError, 'Non-standard residue name'
self.h_flag = h_flag
self.setId()
def getDict(self):
"""See: ``Entity.getDict``."""
from_parent = self.parent.getDict()
if self.h_flag != ' ':
at_type = 'HETATM'
else:
at_type = 'ATOM '
from_parent.update({'at_type': at_type,
'h_flag': self.h_flag,
'res_name': self.name,
'res_long_id': self.getId()[0],
'res_id': self.res_id,
'res_ic': self.res_ic,
'seg_id': self.seg_id, })
return from_parent
class Atom(Entity):
"""The ``Atom`` class contains no children."""
def __init__(self, at_long_id, at_name, ser_num, coords, occupancy, bfactor, element):
self.level = 'A'
self.index = HIERARCHY.index(self.level)
self.coords = coords
self.bfactor = bfactor
self.occupancy = occupancy
self.ser_num = ser_num
self.at_id = at_long_id[0]
self.alt_loc = at_long_id[1]
self.table = dict([(level, {}) for level in HIERARCHY[self.index + 1:]])
self.element = element
Entity.__init__(self, at_long_id, at_name)
def __nonzero__(self):
return bool(self.id)
def __repr__(self):
return "<Atom %s>" % self.getId()
def _getId(self):
"""Return the full id. The id of an atom is not its ' XX ' name
but this string after left/right spaces striping. The full id is
``(at_id, alt_loc)``."""
return ((self.at_id, self.alt_loc),)
def _setId(self, id):
"""Set the atom id ``at_id`` and alternate location ``alt_loc`` from a
full id. See: ``_getId``."""
(self.at_id, self.alt_loc) = id[0]
def setElement(self, element):
"""Set the atom element ``element``."""
self.element = element
def setName(self, name):
"""Set name and update the id."""
self.name = name
self.setAt_id(name.strip())
def setAt_id(self, at_id):
"""Set id. An atom id should be derived from the atom name. See:
``_getId``."""
self.at_id = at_id
self.setId()
def setAlt_loc(self, alt_loc):
"""Set alternate location identifier."""
self.alt_loc = alt_loc
self.setId()
def setSer_num(self, n):
"""Set serial number."""
self.ser_num = n
def setBfactor(self, bfactor):
"""Set B-factor."""
self.bfactor = bfactor
def setOccupancy(self, occupancy):
"""Set occupancy."""
self.occupancy = occupancy
def setRadius(self, radius=None, radius_type=AREAIMOL_VDW_RADII, \
default_radius=DEFAULT_AREAIMOL_VDW_RADIUS):
"""Set radius, defaults to the AreaIMol VdW radius."""
if radius:
self.radius = radius
else:
try:
self.radius = radius_type[(self.parent.name, self.name)]
except KeyError:
self.radius = default_radius
def getSer_num(self):
"""Return the serial number."""
return self.ser_num
def getBfactor(self):
"""Return the B-factor."""
return self.bfactor
def getOccupancy(self):
"""Return the occupancy."""
return self.occupancy
def getRadius(self):
"""Return the radius."""
return self.radius
def getDict(self):
"""See: ``Entity.getDict``."""
from_parent = self.parent.getDict()
from_parent.update({'at_name': self.name,
'ser_num': self.ser_num,
'coords': self.coords,
'occupancy': self.occupancy,
'bfactor': self.bfactor,
'alt_loc': self.alt_loc,
'at_long_id': self.getId()[0],
'at_id': self.at_id,
'element': self.element})
return from_parent
class Holder(MultiEntity):
"""The ``Holder`` instance exists outside the SMCRA hierarchy. Elements in
a ``Holder`` instance are indexed by the full id."""
def __init__(self, name, *args):
if not hasattr(self, 'level'):
self.level = name
MultiEntity.__init__(self, name, name, *args)
def __repr__(self):
return '<Holder level=%s name=%s>' % (self.level, self.getName())
def addChild(self, child):
"""Add a child."""
child_id = child.getFull_id()
self[child_id] = child
def delChild(self, child_id):
"""Remove a child."""
self.pop(child_id)
def updateIds(self):
"""Update self with children long ids."""
ids = []
for (id_, child) in self.iteritems():
new_id = child.getFull_id()
if id_ != new_id:
ids.append((id_, new_id))
for (old_id, new_id) in ids:
child = self.pop(old_id)
self.update(((new_id, child),))
class StructureHolder(Holder):
"""The ``StructureHolder`` contains ``Structure`` instances. See:
``Holder``."""
def __init__(self, *args):
self.level = 'H'
Holder.__init__(self, *args)
def __repr__(self):
return "<StructureHolder name=%s>" % self.getName()
class ModelHolder(Holder):
"""The ``ModelHolder`` contains ``Model`` instances. See: ``Holder``."""
def __init__(self, *args):
self.level = 'S'
Holder.__init__(self, *args)
def __repr__(self):
return "<ModelHolder name=%s>" % self.getName()
class ChainHolder(Holder):
"""The ``ChainHolder`` contains ``Chain`` instances. See: ``Holder``."""
def __init__(self, *args):
self.level = 'M'
Holder.__init__(self, *args)
def __repr__(self):
return "<ChainHolder name=%s>" % self.getName()
class ResidueHolder(Holder):
"""The ``ResidueHolder`` contains ``Residue`` instances. See: ``Holder``."""
def __init__(self, *args):
self.level = 'C'
Holder.__init__(self, *args)
def __repr__(self):
return "<ResidueHolder name=%s>" % self.getName()
class AtomHolder(Holder):
"""The ``AtomHolder`` contains ``Atom`` instances. See: ``Holder``."""
def __init__(self, *args):
self.level = 'R'
Holder.__init__(self, *args)
def __repr__(self):
return "<AtomHolder name=%s>" % self.getName()
class StructureBuilder(object):
"""Constructs a ``Structure`` object. The ``StructureBuilder`` class is used
by a parser class to parse a file into a ``Structure`` object. An instance
of a ``StructureBuilder`` has methods to create ``Entity`` instances and add
them into the SMCRA hierarchy``."""
def __init__(self):
self.structure = None
def initStructure(self, structure_id):
"""Initialize a ``Structure`` instance."""
self.structure = Structure(structure_id)
def initModel(self, model_id):
"""Initialize a ``Model`` instance and add it as a child to the
``Structure`` instance. If a model is defined twice a
``ConstructionError`` is raised."""
if not (model_id,) in self.structure:
self.model = Model(model_id)
self.model.junk = AtomHolder('junk')
self.structure._initChild(self.model)
else:
raise ConstructionError
def initChain(self, chain_id):
"""Initialize a ``Chain`` instance and add it as a child to the
``Model`` instance. If a chain is defined twice a
``ConstructionWarning`` is raised. This means that the model is not
continuous."""
if not (chain_id,) in self.model:
self.chain = Chain(chain_id)
self.model._initChild(self.chain)
else:
self.chain = self.model[(chain_id,)]
raise ConstructionWarning, "Chain %s is not continous" % chain_id
def initSeg(self, seg_id):
"""Does not create an ``Entity`` instance, but updates the segment id,
``seg_id`` which is used to initialize ``Residue`` instances."""
self.seg_id = seg_id
def initResidue(self, res_long_id, res_name):
"""Initialize a ``Residue`` instance and add it as a child to the
``Chain`` instance. If a residue is defined twice a
``ConstructionWarning`` is raised. This means that the chain is not
continuous."""
if not (res_long_id,) in self.chain:
self.residue = Residue(res_long_id, res_name, self.seg_id)
self.chain._initChild(self.residue)
else:
self.residue = self.chain[(res_long_id,)]
raise ConstructionWarning, "Residue %s%s%s is not continuous" % \
res_long_id
def initAtom(self, at_long_id, at_name, ser_num, coord, occupancy, \
bfactor, element):
"""Initialize an ``Atom`` instance and add is as child to the
``Residue`` instance. If an atom is defined twice a
``ConstructionError`` is raised and the ``Atom`` instance is added to
the ``structure.model.junk`` ``Holder`` instance."""
if not (at_long_id,) in self.residue:
self.atom = Atom(at_long_id, at_name, ser_num, coord, occupancy, \
bfactor, element)
self.residue._initChild(self.atom)
else:
full_id = (tuple(self.residue[(at_long_id,)].getFull_id()), \
ser_num)
self.model.junk._initChild(Atom(full_id, at_name, ser_num, coord, \
occupancy, bfactor, element))
raise ConstructionError, 'Atom %s%s is defined twice.' % at_long_id
def getStructure(self):
"""Update coordinates (``coords``), set the children-table (``table``)
and return the ``Structure`` instance."""
self.structure.setTable()
self.structure.setCoordsRecursively()
return self.structure
| 37.483709 | 99 | 0.584693 | 5,414 | 44,868 | 4.694496 | 0.125046 | 0.018492 | 0.009364 | 0.006059 | 0.315313 | 0.262827 | 0.218091 | 0.197671 | 0.17068 | 0.146679 | 0 | 0.005332 | 0.297762 | 44,868 | 1,196 | 100 | 37.51505 | 0.80132 | 0.047629 | 0 | 0.336801 | 0 | 0 | 0.033222 | 0.000854 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.007802 | 0.011704 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4ddb38d835903f3211b8436bd705a411ed81f133 | 3,381 | py | Python | venv/lib/python3.9/site-packages/ajsonrpc/tests/test_dispatcher.py | janten/ESP32-Paxcounter | 212317f3800ec87aef4847e7d60971d4bb9e7d70 | [
"Apache-2.0"
] | 12 | 2019-03-06T03:44:42.000Z | 2021-07-22T03:47:24.000Z | venv/lib/python3.9/site-packages/ajsonrpc/tests/test_dispatcher.py | janten/ESP32-Paxcounter | 212317f3800ec87aef4847e7d60971d4bb9e7d70 | [
"Apache-2.0"
] | 10 | 2020-10-28T10:04:58.000Z | 2021-07-21T20:47:27.000Z | venv/lib/python3.9/site-packages/ajsonrpc/tests/test_dispatcher.py | janten/ESP32-Paxcounter | 212317f3800ec87aef4847e7d60971d4bb9e7d70 | [
"Apache-2.0"
] | 4 | 2021-07-21T20:00:14.000Z | 2021-10-12T19:43:30.000Z | import unittest
from ..dispatcher import Dispatcher
class Math:
@staticmethod
def sum(a, b):
return a + b
@classmethod
def diff(cls, a, b):
return a - b
def mul(self, a, b):
return a * b
class TestDispatcher(unittest.TestCase):
def test_empty(self):
self.assertEqual(len(Dispatcher()), 0)
def test_add_function(self):
d = Dispatcher()
@d.add_function
def one():
return 1
def two():
return 2
d.add_function(two)
d.add_function(two, name="two_alias")
self.assertIn("one", d)
self.assertEqual(d["one"](), 1)
self.assertIsNotNone(one) # do not remove function from the scope
self.assertIn("two", d)
self.assertIn("two_alias", d)
def test_class(self):
d1 = Dispatcher()
d1.add_class(Math)
self.assertIn("math.sum", d1)
self.assertIn("math.diff", d1)
self.assertIn("math.mul", d1)
self.assertEqual(d1["math.sum"](3, 8), 11)
self.assertEqual(d1["math.diff"](6, 9), -3)
self.assertEqual(d1["math.mul"](2, 3), 6)
d2 = Dispatcher(Math)
self.assertNotIn("__class__", d2)
self.assertEqual(d1.keys(), d2.keys())
for method in ["math.sum", "math.diff"]:
self.assertEqual(d1[method], d2[method])
def test_class_prefix(self):
d = Dispatcher(Math, prefix="")
self.assertIn("sum", d)
self.assertNotIn("math.sum", d)
def test_object(self):
math = Math()
d1 = Dispatcher()
d1.add_object(math)
self.assertIn("math.sum", d1)
self.assertIn("math.diff", d1)
self.assertEqual(d1["math.sum"](3, 8), 11)
self.assertEqual(d1["math.diff"](6, 9), -3)
d2 = Dispatcher(math)
self.assertNotIn("__class__", d2)
self.assertEqual(d1, d2)
def test_object_prefix(self):
d = Dispatcher(Math(), prefix="")
self.assertIn("sum", d)
self.assertNotIn("math.sum", d)
def test_add_dict(self):
d = Dispatcher()
d.add_prototype({"sum": lambda *args: sum(args)}, "util.")
self.assertIn("util.sum", d)
self.assertEqual(d["util.sum"](13, -2), 11)
def test_init_from_dict(self):
d = Dispatcher({
"one": lambda: 1,
"two": lambda: 2,
})
self.assertIn("one", d)
self.assertIn("two", d)
def test_del_method(self):
d = Dispatcher()
d["method"] = lambda: ""
self.assertIn("method", d)
del d["method"]
self.assertNotIn("method", d)
def test_to_dict(self):
d = Dispatcher()
def func():
return ""
d["method"] = func
self.assertEqual(dict(d), {"method": func})
def test__getattr_function(self):
# class
self.assertEqual(Dispatcher._getattr_function(Math, "sum")(3, 2), 5)
self.assertEqual(Dispatcher._getattr_function(Math, "diff")(3, 2), 1)
self.assertEqual(Dispatcher._getattr_function(Math, "mul")(3, 2), 6)
# object
self.assertEqual(Dispatcher._getattr_function(Math(), "sum")(3, 2), 5)
self.assertEqual(Dispatcher._getattr_function(Math(), "diff")(3, 2), 1)
self.assertEqual(Dispatcher._getattr_function(Math(), "mul")(3, 2), 6)
| 27.487805 | 79 | 0.561964 | 421 | 3,381 | 4.396675 | 0.15677 | 0.145867 | 0.073474 | 0.103728 | 0.47866 | 0.420313 | 0.420313 | 0.420313 | 0.420313 | 0.420313 | 0 | 0.028842 | 0.282165 | 3,381 | 122 | 80 | 27.713115 | 0.733828 | 0.014789 | 0 | 0.26087 | 0 | 0 | 0.078148 | 0 | 0 | 0 | 0 | 0 | 0.413043 | 1 | 0.184783 | false | 0 | 0.021739 | 0.065217 | 0.293478 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4de9705438995df854b9ebaf6e2d9530e21d53a7 | 3,155 | py | Python | tapioca_trello/resource_mapping/checklist.py | humrochagf/tapioca-trello | a7067a4c43b22e64cef67b68068580448a4cb420 | [
"MIT"
] | null | null | null | tapioca_trello/resource_mapping/checklist.py | humrochagf/tapioca-trello | a7067a4c43b22e64cef67b68068580448a4cb420 | [
"MIT"
] | null | null | null | tapioca_trello/resource_mapping/checklist.py | humrochagf/tapioca-trello | a7067a4c43b22e64cef67b68068580448a4cb420 | [
"MIT"
] | 1 | 2018-07-31T23:04:34.000Z | 2018-07-31T23:04:34.000Z | # -*- coding: utf-8 -*-
CHECKLIST_MAPPING = {
'checklist_retrieve': {
'resource': '/checklists/{id}',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsid'
),
'methods': ['GET'],
},
'checklist_field_retrieve': {
'resource': '/checklists/{id}/{field}',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidfield'
),
'methods': ['GET'],
},
'checklist_board_retrieve': {
'resource': '/checklists/{id}/board',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidboard'
),
'methods': ['GET'],
},
'checklist_card_retrieve': {
'resource': '/checklists/{id}/cards',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidcards'
),
'methods': ['GET'],
},
'checklist_item_list': {
'resource': '/checklists/{id}/checkItems',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidcardscheckitems'
),
'methods': ['GET'],
},
'checklist_item_retrieve': {
'resource': '/checklists/{id}/checkItems/{idCheckItem}',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidcardscheckitemscheckitemid'
),
'methods': ['GET'],
},
'checklist_update': {
'resource': '/checklists/{id}',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsid-1'
),
'methods': ['PUT'],
},
'checklist_item_update': {
'resource': '/checklists/{id}/checkItems/{idCheckItem}',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidcheckitemsidcheckitem'
),
'methods': ['PUT'],
},
'checklist_name_update': {
'resource': '/checklists/{id}/name',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidname'
),
'methods': ['PUT'],
},
'checklist_create': {
'resource': '/checklists',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklists'
),
'methods': ['POST'],
},
'checklist_item_create': {
'resource': '/checklists/{id}/checkItems',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidcheckitems'
),
'methods': ['POST'],
},
'checklist_delete': {
'resource': '/checklists/{id}',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsid-2'
),
'methods': ['DELETE'],
},
'checklist_item_delete': {
'resource': '/checklists/{id}/checkItems/{idCheckItem}',
'docs': (
'https://developers.trello.com/v1.0/reference'
'#checklistsidcheckitemsid'
),
'methods': ['DELETE'],
},
}
| 28.944954 | 64 | 0.496672 | 235 | 3,155 | 6.570213 | 0.191489 | 0.151554 | 0.159974 | 0.210492 | 0.517487 | 0.517487 | 0.517487 | 0.517487 | 0.387953 | 0.387953 | 0 | 0.013445 | 0.316323 | 3,155 | 108 | 65 | 29.212963 | 0.702364 | 0.006656 | 0 | 0.566038 | 0 | 0 | 0.551724 | 0.189336 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1506feffa85f0e03250b9a11fac052405432fbe0 | 628 | py | Python | test.py | blodzbyte/isEven | 18e42cfdad052d34318900fdd91167a533b52210 | [
"MIT"
] | 44 | 2020-03-11T16:44:41.000Z | 2022-03-16T07:55:24.000Z | test.py | blodzbyte/isEven | 18e42cfdad052d34318900fdd91167a533b52210 | [
"MIT"
] | 9 | 2020-03-11T21:07:01.000Z | 2021-07-08T18:49:23.000Z | test.py | blodzbyte/isEven | 18e42cfdad052d34318900fdd91167a533b52210 | [
"MIT"
] | 18 | 2020-03-11T20:03:50.000Z | 2021-07-22T21:40:00.000Z | #!/usr/bin/env python3
from isEven import isEven
def testRange(min, max, evens):
print('Testing [{},{}] {}...'.format(min, max, 'evens' if evens else 'odds'))
for i in range(min, max, 2):
i = i if evens else i - 1
result = isEven(i)
if(not result and evens):
raise Exception('Test failed. Got: isEven({}) = {}. Expected: '\
'isEven({}) = {}.'.format(i, str(result), i,\
str(evens)))
print('Test passed.')
def main():
testRange(0, 10000, True)
testRange(0, 10000, False)
if __name__ == '__main__':
main()
| 29.904762 | 80 | 0.517516 | 77 | 628 | 4.116883 | 0.532468 | 0.056782 | 0.069401 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034642 | 0.31051 | 628 | 20 | 81 | 31.4 | 0.69746 | 0.033439 | 0 | 0 | 0 | 0 | 0.183168 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.0625 | 0.0625 | 0 | 0.1875 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
150bff7433b6fabe00d05feee353f17bc33f7d36 | 757 | py | Python | minoan_project/minoan_project/urls.py | mtzirkel/minoan | 3eadeb1f73acf261e2f550642432ea5c25557ecb | [
"MIT"
] | null | null | null | minoan_project/minoan_project/urls.py | mtzirkel/minoan | 3eadeb1f73acf261e2f550642432ea5c25557ecb | [
"MIT"
] | null | null | null | minoan_project/minoan_project/urls.py | mtzirkel/minoan | 3eadeb1f73acf261e2f550642432ea5c25557ecb | [
"MIT"
] | null | null | null | from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
from django.views.generic import TemplateView
from . import views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html')),
url(r'^admin/', include(admin.site.urls)),
#login
url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'}),
#home
url(r'^home/$', views.home),
)
# Uncomment the next line to serve media files in dev.
# urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 27.035714 | 89 | 0.698811 | 102 | 757 | 5.127451 | 0.421569 | 0.095602 | 0.080306 | 0.068834 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173052 | 757 | 27 | 90 | 28.037037 | 0.835463 | 0.24967 | 0 | 0 | 0 | 0 | 0.154804 | 0.05516 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.461538 | 0 | 0.461538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
12781452042b292ed356843d47c2a5e60478909f | 7,998 | py | Python | parsers/sales_order.py | njncalub/logistiko | 74b1d17bc76538de6f5f70c7eca927780d6b4113 | [
"MIT"
] | null | null | null | parsers/sales_order.py | njncalub/logistiko | 74b1d17bc76538de6f5f70c7eca927780d6b4113 | [
"MIT"
] | null | null | null | parsers/sales_order.py | njncalub/logistiko | 74b1d17bc76538de6f5f70c7eca927780d6b4113 | [
"MIT"
] | null | null | null | import csv
from core.exceptions import InvalidFileException
def load_so_item_from_file(path, db_service):
with open(path) as csv_file:
csv_reader = csv.reader(csv_file)
error_msg = 'Missing required header: {}'
for i, row in enumerate(csv_reader, 1):
data = {
'id_sales_order_item': row[0],
'bob_id_sales_order_item': row[1],
'fk_sales_order': row[2],
'fk_sales_order_item_status': row[3],
'fk_delivery_type': row[4],
'unit_price': row[5],
'tax_amount': row[6],
'paid_price': row[7],
'name': row[8],
'sku': row[9],
'created_at': row[10],
'updated_at': row[11],
'last_status_change': row[12],
'original_unit_price': row[13],
'shipping_type': row[14],
'real_delivery_date': row[15],
'bob_id_supplier': row[16],
'is_marketplace': row[17],
}
if i == 1: # check if the header values line up
if not data['id_sales_order_item'] == 'id_sales_order_item':
raise InvalidFileException(
error_msg.format('id_sales_order_item'))
if not data['bob_id_sales_order_item'] == \
'bob_id_sales_order_item':
raise InvalidFileException(
error_msg.format('bob_id_sales_order_item'))
if not data['fk_sales_order'] == 'fk_sales_order':
raise InvalidFileException(
error_msg.format('fk_sales_order'))
if not data['fk_sales_order_item_status'] == \
'fk_sales_order_item_status':
raise InvalidFileException(
error_msg.format('fk_sales_order_item_status'))
if not data['fk_delivery_type'] == 'fk_delivery_type':
raise InvalidFileException(
error_msg.format('fk_delivery_type'))
if not data['unit_price'] == 'unit_price':
raise InvalidFileException(error_msg.format('unit_price'))
if not data['tax_amount'] == 'tax_amount':
raise InvalidFileException(error_msg.format('tax_amount'))
if not data['paid_price'] == 'paid_price':
raise InvalidFileException(error_msg.format('paid_price'))
if not data['name'] == 'name':
raise InvalidFileException(error_msg.format('name'))
if not data['sku'] == 'sku':
raise InvalidFileException(error_msg.format('sku'))
if not data['created_at'] == 'created_at':
raise InvalidFileException(error_msg.format('created_at'))
if not data['updated_at'] == 'updated_at':
raise InvalidFileException(error_msg.format('updated_at'))
if not data['last_status_change'] == 'last_status_change':
raise InvalidFileException(
error_msg.format('last_status_change'))
if not data['original_unit_price'] == 'original_unit_price':
raise InvalidFileException(
error_msg.format('original_unit_price'))
if not data['shipping_type'] == 'shipping_type':
raise InvalidFileException(
error_msg.format('shipping_type'))
if not data['real_delivery_date'] == 'real_delivery_date':
raise InvalidFileException(
error_msg.format('real_delivery_date'))
if not data['bob_id_supplier'] == 'bob_id_supplier':
raise InvalidFileException(
error_msg.format('bob_id_supplier'))
if not data['is_marketplace'] == 'is_marketplace':
raise InvalidFileException(
error_msg.format('is_marketplace'))
else:
process_so_item_data(data=data, db_service=db_service)
print(f'Processed {i} sales order item(s).')
def load_so_item_status_from_file(path, db_service):
with open(path) as csv_file:
csv_reader = csv.reader(csv_file)
error_msg = 'Missing required header: {}'
for i, row in enumerate(csv_reader, 1):
data = {
'id_sales_order_item_status': row[0],
'fk_oms_function': row[1],
'status': row[2],
'desc': row[3],
'deprecated': row[4],
'updated_at': row[5],
}
if i == 1: # check if the header values line up
if not data['id_sales_order_item_status'] == \
'id_sales_order_item_status':
raise InvalidFileException(
error_msg.format('id_sales_order_item_status'))
if not data['fk_oms_function'] == 'fk_oms_function':
raise InvalidFileException(
error_msg.format('fk_oms_function'))
if not data['status'] == 'status':
raise InvalidFileException(error_msg.format('status'))
if not data['desc'] == 'desc':
raise InvalidFileException(error_msg.format('desc'))
if not data['deprecated'] == 'deprecated':
raise InvalidFileException(error_msg.format('deprecated'))
if not data['updated_at'] == 'updated_at':
raise InvalidFileException(error_msg.format('updated_at'))
else:
process_so_item_status_data(data=data, db_service=db_service)
print(f'Processed {i} sales order item status.')
def load_so_item_status_history_from_file(path, db_service):
with open(path) as csv_file:
csv_reader = csv.reader(csv_file)
error_msg = 'Missing required header: {}'
for i, row in enumerate(csv_reader, 1):
data = {
'id_sales_order_item_status_history': row[0],
'fk_sales_order_item': row[1],
'fk_sales_order_item_status': row[2],
'created_at': row[3],
}
if i == 1: # check if the header values line up
if not data['id_sales_order_item_status_history'] == \
'id_sales_order_item_status_history':
raise InvalidFileException(
error_msg.format('id_sales_order_item_status_history'))
if not data['fk_sales_order_item'] == 'fk_sales_order_item':
raise InvalidFileException(
error_msg.format('fk_sales_order_item'))
if not data['fk_sales_order_item_status'] == \
'fk_sales_order_item_status':
raise InvalidFileException(
error_msg.format('fk_sales_order_item_status'))
if not data['created_at'] == 'created_at':
raise InvalidFileException(error_msg.format('created_at'))
else:
process_so_item_status_history_data(data=data,
db_service=db_service)
print(f'Processed {i} sales order item status history.')
def process_so_item_data(data, db_service):
if data['real_delivery_date'] == 'NULL':
data['real_delivery_date'] = None
db_service.add_so_item(**data)
def process_so_item_status_data(data, db_service):
db_service.add_so_item_status(**data)
def process_so_item_status_history_data(data, db_service):
db_service.add_so_item_status_history(**data)
| 46.77193 | 79 | 0.542136 | 857 | 7,998 | 4.697783 | 0.113186 | 0.086935 | 0.107799 | 0.229508 | 0.754098 | 0.628664 | 0.541232 | 0.480378 | 0.4461 | 0.387481 | 0 | 0.008192 | 0.358965 | 7,998 | 170 | 80 | 47.047059 | 0.777063 | 0.013003 | 0 | 0.342282 | 0 | 0 | 0.239164 | 0.068441 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040268 | false | 0 | 0.013423 | 0 | 0.053691 | 0.020134 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1279a170c86c50a1d9aa504d29a7b4fbc15ef3a6 | 2,350 | py | Python | tools/pca_outcore.py | escorciav/deep-action-proposals | c14f512febc1abd0ec40bd3188a83e4ee3913535 | [
"MIT"
] | 28 | 2017-03-19T12:02:22.000Z | 2021-07-08T13:49:41.000Z | tools/pca_outcore.py | escorciav/deep-action-proposals | c14f512febc1abd0ec40bd3188a83e4ee3913535 | [
"MIT"
] | 2 | 2018-05-07T07:43:15.000Z | 2018-12-14T16:06:48.000Z | tools/pca_outcore.py | escorciav/deep-action-proposals | c14f512febc1abd0ec40bd3188a83e4ee3913535 | [
"MIT"
] | 7 | 2017-03-19T11:51:21.000Z | 2020-01-07T11:17:48.000Z | #!/usr/bin/env python
"""
PCA done via matrix multiplication out-of-core.
"""
import argparse
import time
import h5py
import hickle as hkl
import numpy as np
def input_parse():
description = 'Compute PCA with A.T * A computation out of core'
p = argparse.ArgumentParser(description=description)
p.add_argument('dsfile', help='HDF5-file with features')
p.add_argument('pcafile', help='HDF5-file with PCA results')
p.add_argument('-ll', '--log_loop', default=500, type=int,
help='Verbose in terms of number of videos')
return p
def main(dsfile, pcafile, t_size=16, t_stride=8, source='c3d_features',
log_loop=100):
print time.ctime(), 'start: loading hdf5'
fid = h5py.File(dsfile, 'r')
video_names = fid.keys()
feat_dim = fid[video_names[0]][source].shape[1]
print time.ctime(), 'finish: loading hdf5'
print time.ctime(), 'start: compute mean'
x_mean, n = np.zeros((1, feat_dim), dtype=np.float32), 0
for i, v in fid.iteritems():
feat = v[source][:]
n += feat.shape[0]
x_mean += feat.sum(axis=0)
x_mean /= n
print time.ctime(), 'finish: compute mean'
def compute_ATA(chunk, f=fid, source=source, mean=x_mean):
feat_dim = f[chunk[0]][source].shape[1]
ATA_c = np.zeros((feat_dim, feat_dim), dtype=np.float32)
for i in chunk:
feat_c = f[i][source][:]
feat_c_ = feat_c - mean
ATA_c += np.dot(feat_c_.T, feat_c_)
return ATA_c
print time.ctime(), 'start: out-of-core matrix multiplication'
j, n_videos = 0, len(video_names)
ATA = np.zeros((feat_dim, feat_dim), dtype=np.float32)
for i, v in fid.iteritems():
feat = v[source][:]
feat_ = feat - x_mean
ATA += np.dot(feat_.T, feat_)
j += 1
if j % log_loop == 0:
print time.ctime(), 'Iteration {}/{}'.format(j, n_videos)
print time.ctime(), 'finish: out-of-core matrix multiplication'
# SVD
print time.ctime(), 'start: SVD in memory'
U, S, _ = np.linalg.svd(ATA)
print time.ctime(), 'finish: SVD in memory'
print time.ctime(), 'serializing ...'
hkl.dump({'x_mean': x_mean, 'U': U, 'S': S, 'n_samples': n}, pcafile)
if __name__ == '__main__':
p = input_parse()
args = p.parse_args()
main(**vars(args))
| 30.519481 | 73 | 0.609362 | 353 | 2,350 | 3.898017 | 0.311615 | 0.065407 | 0.101744 | 0.055233 | 0.154797 | 0.097384 | 0.097384 | 0.097384 | 0.097384 | 0.056686 | 0 | 0.018456 | 0.239149 | 2,350 | 76 | 74 | 30.921053 | 0.751119 | 0.010213 | 0 | 0.071429 | 0 | 0 | 0.188355 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.089286 | null | null | 0.178571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
127b40e7a10ad49a4f232756467391a18976528f | 1,968 | py | Python | gamry_parser/cv.py | bcliang/gamry-parser | c1dfcf73d973c88ee496f0aa256d99f642ab6013 | [
"MIT"
] | 6 | 2019-03-14T21:21:13.000Z | 2022-03-04T19:21:32.000Z | gamry_parser/cv.py | bcliang/gamry-parser | c1dfcf73d973c88ee496f0aa256d99f642ab6013 | [
"MIT"
] | 34 | 2019-03-11T04:21:51.000Z | 2022-01-10T21:45:38.000Z | gamry_parser/cv.py | bcliang/gamry-parser | c1dfcf73d973c88ee496f0aa256d99f642ab6013 | [
"MIT"
] | 5 | 2019-08-11T15:38:30.000Z | 2021-04-24T20:06:09.000Z | import gamry_parser as parser
class CyclicVoltammetry(parser.GamryParser):
"""Load a Cyclic Voltammetry experiment generated in Gamry EXPLAIN format."""
def get_v_range(self):
"""retrieve the programmed voltage scan ranges
Args:
None
Returns:
tuple, containing:
float: voltage limit 1, in V
float: voltage limit 2, in V
"""
assert self.loaded, "DTA file not loaded. Run CyclicVoltammetry.load()"
assert (
"VLIMIT1" in self.header.keys()
), "DTA header file missing VLIMIT1 specification"
assert (
"VLIMIT2" in self.header.keys()
), "DTA header file missing VLIMIT2 specification"
return self.header["VLIMIT1"], self.header["VLIMIT2"]
def get_scan_rate(self):
"""retrieve the programmed scan rate
Args:
None
Returns:
float: the scan rate, in mV/s
"""
assert self.loaded, "DTA file not loaded. Run CyclicVoltammetry.load()"
assert (
"SCANRATE" in self.header.keys()
), "DTA header file missing SCANRATE specification"
return self.header["SCANRATE"]
def get_curve_data(self, curve: int = 0):
"""retrieve relevant cyclic voltammetry experimental data
Args:
curve (int, optional): curve number to return. Defaults to 0.
Returns:
pandas.DataFrame:
- Vf: potential, in V
- Im: current, in A
"""
assert self.loaded, "DTA file not loaded. Run CyclicVoltammetry.load()"
assert curve >= 0, "Invalid curve ({}). Indexing starts at 0".format(curve)
assert (
curve < self.curve_count
), "Invalid curve ({}). File contains {} total curves.".format(
curve, self.curve_count
)
df = self.curves[curve]
return df[["Vf", "Im"]]
| 29.373134 | 83 | 0.571646 | 215 | 1,968 | 5.190698 | 0.362791 | 0.053763 | 0.043011 | 0.051075 | 0.263441 | 0.263441 | 0.263441 | 0.263441 | 0.166667 | 0.166667 | 0 | 0.009132 | 0.332317 | 1,968 | 66 | 84 | 29.818182 | 0.840183 | 0.279472 | 0 | 0.259259 | 1 | 0 | 0.33979 | 0.058111 | 0 | 0 | 0 | 0 | 0.296296 | 1 | 0.111111 | false | 0 | 0.037037 | 0 | 0.296296 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
127d60f439a2eeaeea97213b05b97e925b002613 | 15,790 | py | Python | osprofiler/tests/unit/drivers/test_ceilometer.py | charliebr30/osprofiler | cffca4e29e373e3f09f2ffdd458761183a851569 | [
"Apache-2.0"
] | null | null | null | osprofiler/tests/unit/drivers/test_ceilometer.py | charliebr30/osprofiler | cffca4e29e373e3f09f2ffdd458761183a851569 | [
"Apache-2.0"
] | 1 | 2017-04-15T22:16:06.000Z | 2017-04-15T22:16:06.000Z | osprofiler/tests/unit/drivers/test_ceilometer.py | shwsun/osprofiler | 46d29fc5ab8a4068217e399883f39cdd443a7500 | [
"Apache-2.0"
] | 1 | 2020-02-17T09:48:43.000Z | 2020-02-17T09:48:43.000Z | # Copyright 2016 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from osprofiler.drivers.ceilometer import Ceilometer
from osprofiler.tests import test
class CeilometerParserTestCase(test.TestCase):
def setUp(self):
super(CeilometerParserTestCase, self).setUp()
self.ceilometer = Ceilometer("ceilometer://",
ceilometer_api_version="2")
def test_build_empty_tree(self):
self.assertEqual([], self.ceilometer._build_tree({}))
def test_build_complex_tree(self):
test_input = {
"2": {"parent_id": "0", "trace_id": "2", "info": {"started": 1}},
"1": {"parent_id": "0", "trace_id": "1", "info": {"started": 0}},
"21": {"parent_id": "2", "trace_id": "21", "info": {"started": 6}},
"22": {"parent_id": "2", "trace_id": "22", "info": {"started": 7}},
"11": {"parent_id": "1", "trace_id": "11", "info": {"started": 1}},
"113": {"parent_id": "11", "trace_id": "113",
"info": {"started": 3}},
"112": {"parent_id": "11", "trace_id": "112",
"info": {"started": 2}},
"114": {"parent_id": "11", "trace_id": "114",
"info": {"started": 5}}
}
expected_output = [
{
"parent_id": "0",
"trace_id": "1",
"info": {"started": 0},
"children": [
{
"parent_id": "1",
"trace_id": "11",
"info": {"started": 1},
"children": [
{"parent_id": "11", "trace_id": "112",
"info": {"started": 2}, "children": []},
{"parent_id": "11", "trace_id": "113",
"info": {"started": 3}, "children": []},
{"parent_id": "11", "trace_id": "114",
"info": {"started": 5}, "children": []}
]
}
]
},
{
"parent_id": "0",
"trace_id": "2",
"info": {"started": 1},
"children": [
{"parent_id": "2", "trace_id": "21",
"info": {"started": 6}, "children": []},
{"parent_id": "2", "trace_id": "22",
"info": {"started": 7}, "children": []}
]
}
]
result = self.ceilometer._build_tree(test_input)
self.assertEqual(expected_output, result)
def test_get_report_empty(self):
self.ceilometer.client = mock.MagicMock()
self.ceilometer.client.events.list.return_value = []
expected = {
"info": {
"name": "total",
"started": 0,
"finished": None,
"last_trace_started": None
},
"children": [],
"stats": {},
}
base_id = "10"
self.assertEqual(expected, self.ceilometer.get_report(base_id))
def test_get_report(self):
self.ceilometer.client = mock.MagicMock()
results = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock(),
mock.MagicMock(), mock.MagicMock()]
self.ceilometer.client.events.list.return_value = results
results[0].to_dict.return_value = {
"traits": [
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "method",
"value": "POST"
},
{
"type": "string",
"name": "name",
"value": "wsgi-start"
},
{
"type": "string",
"name": "parent_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.338776"
},
{
"type": "string",
"name": "trace_id",
"value": "06320327-2c2c-45ae-923a-515de890276a"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.415793",
"event_type": "profiler.main",
"message_id": "65fc1553-3082-4a6f-9d1e-0e3183f57a47"}
results[1].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "name",
"value": "wsgi-stop"
},
{
"type": "string",
"name": "parent_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.380405"
},
{
"type": "string",
"name": "trace_id",
"value": "016c97fd-87f3-40b2-9b55-e431156b694b"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.406052",
"event_type": "profiler.main",
"message_id": "3256d9f1-48ba-4ac5-a50b-64fa42c6e264"}
results[2].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "db.params",
"value": "[]"
},
{
"type": "string",
"name": "db.statement",
"value": "SELECT 1"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "name",
"value": "db-start"
},
{
"type": "string",
"name": "parent_id",
"value": "06320327-2c2c-45ae-923a-515de890276a"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.395365"
},
{
"type": "string",
"name": "trace_id",
"value": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.984161",
"event_type": "profiler.main",
"message_id": "60368aa4-16f0-4f37-a8fb-89e92fdf36ff"}
results[3].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "name",
"value": "db-stop"
},
{
"type": "string",
"name": "parent_id",
"value": "06320327-2c2c-45ae-923a-515de890276a"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.415486"
},
{
"type": "string",
"name": "trace_id",
"value": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a"
}
],
"raw": {},
"generated": "2015-12-23T10:41:39.019378",
"event_type": "profiler.main",
"message_id": "3fbeb339-55c5-4f28-88e4-15bee251dd3d"}
results[4].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "method",
"value": "GET"
},
{
"type": "string",
"name": "name",
"value": "wsgi-start"
},
{
"type": "string",
"name": "parent_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.427444"
},
{
"type": "string",
"name": "trace_id",
"value": "016c97fd-87f3-40b2-9b55-e431156b694b"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.360409",
"event_type": "profiler.main",
"message_id": "57b971a9-572f-4f29-9838-3ed2564c6b5b"}
expected = {"children": [
{"children": [{"children": [],
"info": {"finished": 76,
"host": "ubuntu",
"meta.raw_payload.db-start": {},
"meta.raw_payload.db-stop": {},
"name": "db",
"project": "keystone",
"service": "main",
"started": 56,
"exception": "None"},
"parent_id": "06320327-2c2c-45ae-923a-515de890276a",
"trace_id": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a"}
],
"info": {"finished": 0,
"host": "ubuntu",
"meta.raw_payload.wsgi-start": {},
"name": "wsgi",
"project": "keystone",
"service": "main",
"started": 0},
"parent_id": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4",
"trace_id": "06320327-2c2c-45ae-923a-515de890276a"},
{"children": [],
"info": {"finished": 41,
"host": "ubuntu",
"meta.raw_payload.wsgi-start": {},
"meta.raw_payload.wsgi-stop": {},
"name": "wsgi",
"project": "keystone",
"service": "main",
"started": 88,
"exception": "None"},
"parent_id": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4",
"trace_id": "016c97fd-87f3-40b2-9b55-e431156b694b"}],
"info": {
"finished": 88,
"name": "total",
"started": 0,
"last_trace_started": 88
},
"stats": {"db": {"count": 1, "duration": 20},
"wsgi": {"count": 2, "duration": -47}},
}
base_id = "10"
result = self.ceilometer.get_report(base_id)
expected_filter = [{"field": "base_id", "op": "eq", "value": base_id}]
self.ceilometer.client.events.list.assert_called_once_with(
expected_filter, limit=100000)
self.assertEqual(expected, result)
| 37.240566 | 79 | 0.338252 | 1,067 | 15,790 | 4.895033 | 0.218369 | 0.084243 | 0.11794 | 0.038292 | 0.657094 | 0.628183 | 0.551982 | 0.508137 | 0.498181 | 0.366073 | 0 | 0.115461 | 0.522799 | 15,790 | 423 | 80 | 37.328605 | 0.577704 | 0.037935 | 0 | 0.443299 | 0 | 0 | 0.259423 | 0.087309 | 0 | 0 | 0 | 0 | 0.012887 | 1 | 0.012887 | false | 0 | 0.007732 | 0 | 0.023196 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12810e363b2fde4bb2f563894e88d9b033fc5d56 | 2,666 | py | Python | utils/tools.py | alipay/Pyraformer | 84af4dbd93b7b96975b5034f0dde412005260123 | [
"Apache-2.0"
] | 7 | 2022-03-24T03:42:14.000Z | 2022-03-27T16:27:31.000Z | utils/tools.py | alipay/Pyraformer | 84af4dbd93b7b96975b5034f0dde412005260123 | [
"Apache-2.0"
] | 1 | 2022-03-17T08:54:42.000Z | 2022-03-17T08:54:42.000Z | utils/tools.py | alipay/Pyraformer | 84af4dbd93b7b96975b5034f0dde412005260123 | [
"Apache-2.0"
] | 1 | 2022-03-29T16:33:44.000Z | 2022-03-29T16:33:44.000Z | from torch.nn.modules import loss
import torch
import numpy as np
def MAE(pred, true):
return np.mean(np.abs(pred-true))
def MSE(pred, true):
return np.mean((pred-true)**2)
def RMSE(pred, true):
return np.sqrt(MSE(pred, true))
def MAPE(pred, true):
return np.mean(np.abs((pred - true) / true))
def MSPE(pred, true):
return np.mean(np.square((pred - true) / true))
def metric(pred, true):
mae = MAE(pred, true)
mse = MSE(pred, true)
rmse = RMSE(pred, true)
mape = MAPE(pred, true)
mspe = MSPE(pred, true)
return mae,mse,rmse,mape,mspe
class StandardScaler():
def __init__(self):
self.mean = 0.
self.std = 1.
def fit(self, data):
self.mean = data.mean(0)
self.std = data.std(0)
def transform(self, data):
mean = torch.from_numpy(self.mean).type_as(data).to(data.device) if torch.is_tensor(data) else self.mean
std = torch.from_numpy(self.std).type_as(data).to(data.device) if torch.is_tensor(data) else self.std
return (data - mean) / std
def inverse_transform(self, data):
mean = torch.from_numpy(self.mean).type_as(data).to(data.device) if torch.is_tensor(data) else self.mean
std = torch.from_numpy(self.std).type_as(data).to(data.device) if torch.is_tensor(data) else self.std
return (data * std) + mean
class TopkMSELoss(torch.nn.Module):
def __init__(self, topk) -> None:
super().__init__()
self.topk = topk
self.criterion = torch.nn.MSELoss(reduction='none')
def forward(self, output, label):
losses = self.criterion(output, label).mean(2).mean(1)
losses = torch.topk(losses, self.topk)[0]
return losses
class SingleStepLoss(torch.nn.Module):
""" Compute top-k log-likelihood and mse. """
def __init__(self, ignore_zero):
super().__init__()
self.ignore_zero = ignore_zero
def forward(self, mu, sigma, labels, topk=0):
if self.ignore_zero:
indexes = (labels != 0)
else:
indexes = (labels >= 0)
distribution = torch.distributions.normal.Normal(mu[indexes], sigma[indexes])
likelihood = -distribution.log_prob(labels[indexes])
diff = labels[indexes] - mu[indexes]
se = diff * diff
if 0 < topk < len(likelihood):
likelihood = torch.topk(likelihood, topk)[0]
se = torch.topk(se, topk)[0]
return likelihood, se
def AE_loss(mu, labels, ignore_zero):
if ignore_zero:
indexes = (labels != 0)
else:
indexes = (labels >= 0)
ae = torch.abs(labels[indexes] - mu[indexes])
return ae
| 28.361702 | 112 | 0.62003 | 376 | 2,666 | 4.287234 | 0.199468 | 0.079404 | 0.052109 | 0.049628 | 0.344913 | 0.332506 | 0.318859 | 0.318859 | 0.318859 | 0.225806 | 0 | 0.007925 | 0.242686 | 2,666 | 93 | 113 | 28.666667 | 0.79049 | 0.013878 | 0 | 0.176471 | 0 | 0 | 0.001526 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.220588 | false | 0 | 0.044118 | 0.073529 | 0.470588 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1282edeb2a30864dc3a5aa0e406d5fae2795f292 | 1,974 | py | Python | webScraping/Instagram/2a_selenium_corriere.py | PythonBiellaGroup/MaterialeSerate | 58b45ecda7b9a8a298b9ca966d2806618a277372 | [
"MIT"
] | 12 | 2021-12-12T22:19:52.000Z | 2022-03-18T11:45:17.000Z | webScraping/Instagram/2a_selenium_corriere.py | PythonGroupBiella/MaterialeLezioni | 58b45ecda7b9a8a298b9ca966d2806618a277372 | [
"MIT"
] | 1 | 2022-03-23T13:58:33.000Z | 2022-03-23T14:05:08.000Z | webScraping/Instagram/2a_selenium_corriere.py | PythonGroupBiella/MaterialeLezioni | 58b45ecda7b9a8a298b9ca966d2806618a277372 | [
"MIT"
] | 5 | 2021-11-30T19:38:41.000Z | 2022-01-30T14:50:44.000Z | # use selenium to scrape headlines from corriere.it
# pip install selenium
from re import L
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import pandas as pd
import time
import sys
HOME = "https://corriere.it"
# open Firefox
driver = webdriver.Firefox()
# navigate to corriere.it
driver.get(HOME)
# In order to extract the information that you’re looking to scrape,
# you need to locate the element’s XPath.
# An XPath is a syntax used for finding any element on a webpage.
# We can see the headline
#<a class="has-text-black" href="https://www.corriere.it/sport/calcio/coppa-italia/22_aprile_19/inter-milan-formazioni-news-risultato-f607f438-bfef-11ec-9f78-c9d279c21b38.shtml">Inter-Milan, doppio Lautaro e Gosens, nerazzurri in finale di Coppa Italia </a>
# --> [@class=”name”]
# all great but we need to sort out this coxokie pop-up
#driver.find_element_by_xpath("//*[@id='_cpmt-accept']").click()
#WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, '_cpmt-accept'))).click()
#WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.CSS_SELECTOR, "div#_cpmt-buttons button#_cpmt-accept"))).click()
time.sleep(5)
# carefully look at the env, we have an iframe here
cookie_iframe = driver.find_element_by_xpath("//iframe[@id='_cpmt-iframe']")
driver.switch_to.frame(cookie_iframe)
print(cookie_iframe)
#driver.switch_to.frame(driver.find_element(By.XPATH("//iframe[@id='_cpmt-iframe']")))
button = driver.find_element_by_id("_cpmt-accept").click()
# back to the main class
driver.get(HOME)
# elements --> find_all
headlines = driver.find_elements_by_xpath('//h4[@class="title-art-hp is-medium is-line-h-106"]')
# here we get all the headlines from the corriere
# we can get the text
for headline in headlines:
print(headline.text) | 44.863636 | 258 | 0.766971 | 312 | 1,974 | 4.737179 | 0.451923 | 0.040595 | 0.056834 | 0.051421 | 0.20636 | 0.147497 | 0.147497 | 0.147497 | 0.147497 | 0.090663 | 0 | 0.018203 | 0.109422 | 1,974 | 44 | 259 | 44.863636 | 0.822526 | 0.592705 | 0 | 0.095238 | 0 | 0.047619 | 0.140845 | 0.067862 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.428571 | 0 | 0.428571 | 0.095238 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
12848f59193336131bb837186f98da6abb8ba010 | 1,665 | py | Python | tests/test_api.py | bh-chaker/wetterdienst | b0d51bb4c7392eb47834e4978e26882d74b22e35 | [
"MIT"
] | 155 | 2020-07-03T05:09:22.000Z | 2022-03-28T06:57:39.000Z | tests/test_api.py | bh-chaker/wetterdienst | b0d51bb4c7392eb47834e4978e26882d74b22e35 | [
"MIT"
] | 453 | 2020-07-02T21:21:52.000Z | 2022-03-31T21:35:36.000Z | tests/test_api.py | bh-chaker/wetterdienst | b0d51bb4c7392eb47834e4978e26882d74b22e35 | [
"MIT"
] | 21 | 2020-09-07T12:13:27.000Z | 2022-03-26T16:26:09.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2018-2021, earthobservations developers.
# Distributed under the MIT License. See LICENSE for more info.
import pytest
from wetterdienst import Wetterdienst
@pytest.mark.remote
@pytest.mark.parametrize(
"provider,kind,kwargs",
[
# German Weather Service (DWD)
(
"dwd",
"observation",
{"parameter": "kl", "resolution": "daily", "period": "recent"},
),
("dwd", "forecast", {"parameter": "large", "mosmix_type": "large"}),
# Environment and Climate Change Canada
("eccc", "observation", {"parameter": "daily", "resolution": "daily"}),
],
)
@pytest.mark.parametrize("si_units", (False, True))
def test_api(provider, kind, kwargs, si_units):
""" Test main wetterdienst API """
# Build API
api = Wetterdienst(provider, kind)
# Discover parameters
assert api.discover()
# All stations
request = api(**kwargs, si_units=si_units).all()
stations = request.df
# Check stations DataFrame columns
assert set(stations.columns).issuperset(
{
"station_id",
"from_date",
"to_date",
"height",
"latitude",
"longitude",
"name",
"state",
}
)
# Check that there are actually stations
assert not stations.empty
# Query first DataFrame from values
values = next(request.values.query()).df
# TODO: DWD Forecast has no quality
assert set(values.columns).issuperset(
{"station_id", "parameter", "date", "value", "quality"}
)
assert not values.empty
| 26.015625 | 79 | 0.587988 | 169 | 1,665 | 5.733728 | 0.556213 | 0.028896 | 0.043344 | 0.053664 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007475 | 0.276877 | 1,665 | 63 | 80 | 26.428571 | 0.797342 | 0.25045 | 0 | 0 | 0 | 0 | 0.209756 | 0 | 0 | 0 | 0 | 0.015873 | 0.128205 | 1 | 0.025641 | false | 0 | 0.051282 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12867ea275e82f412c64f544501dc211d18fb6b3 | 2,761 | py | Python | crowd_anki/export/anki_exporter_wrapper.py | katrinleinweber/CrowdAnki | c78d837e082365d69bde5b1361b1dd4d11cd3d63 | [
"MIT"
] | 391 | 2016-08-31T21:55:07.000Z | 2022-03-30T16:30:12.000Z | crowd_anki/export/anki_exporter_wrapper.py | katrinleinweber/CrowdAnki | c78d837e082365d69bde5b1361b1dd4d11cd3d63 | [
"MIT"
] | 150 | 2016-09-01T00:35:35.000Z | 2022-03-30T23:26:48.000Z | crowd_anki/export/anki_exporter_wrapper.py | katrinleinweber/CrowdAnki | c78d837e082365d69bde5b1361b1dd4d11cd3d63 | [
"MIT"
] | 51 | 2016-09-04T17:02:39.000Z | 2022-02-04T11:49:10.000Z | from pathlib import Path
from .anki_exporter import AnkiJsonExporter
from ..anki.adapters.anki_deck import AnkiDeck
from ..config.config_settings import ConfigSettings
from ..utils import constants
from ..utils.notifier import AnkiModalNotifier, Notifier
from ..utils.disambiguate_uuids import disambiguate_note_model_uuids
EXPORT_FAILED_TITLE = "Export failed"
class AnkiJsonExporterWrapper:
"""
Wrapper designed to work with standard export dialog in anki.
"""
key = "CrowdAnki JSON representation"
ext = constants.ANKI_EXPORT_EXTENSION
hideTags = True
includeTags = True
directory_export = True
def __init__(self, collection,
deck_id: int = None,
json_exporter: AnkiJsonExporter = None,
notifier: Notifier = None):
self.includeMedia = True
self.did = deck_id
self.count = 0 # Todo?
self.collection = collection
self.anki_json_exporter = json_exporter or AnkiJsonExporter(collection, ConfigSettings.get_instance())
self.notifier = notifier or AnkiModalNotifier()
# required by anki exporting interface with its non-PEP-8 names
# noinspection PyPep8Naming
def exportInto(self, directory_path):
if self.did is None:
self.notifier.warning(EXPORT_FAILED_TITLE, "CrowdAnki export works only for specific decks. "
"Please use CrowdAnki snapshot if you want to export "
"the whole collection.")
return
deck = AnkiDeck(self.collection.decks.get(self.did, default=False))
if deck.is_dynamic:
self.notifier.warning(EXPORT_FAILED_TITLE, "CrowdAnki does not support export for dynamic decks.")
return
# Clean up duplicate note models. See
# https://github.com/Stvad/CrowdAnki/wiki/Workarounds-%E2%80%94-Duplicate-note-model-uuids.
disambiguate_note_model_uuids(self.collection)
# .parent because we receive name with random numbers at the end (hacking around internals of Anki) :(
export_path = Path(directory_path).parent
self.anki_json_exporter.export_to_directory(deck, export_path, self.includeMedia,
create_deck_subdirectory=ConfigSettings.get_instance().export_create_deck_subdirectory)
self.count = self.anki_json_exporter.last_exported_count
def get_exporter_id(exporter):
return f"{exporter.key} (*{exporter.ext})", exporter
def exporters_hook(exporters_list):
exporter_id = get_exporter_id(AnkiJsonExporterWrapper)
if exporter_id not in exporters_list:
exporters_list.append(exporter_id)
| 40.014493 | 139 | 0.680913 | 312 | 2,761 | 5.833333 | 0.413462 | 0.032967 | 0.023077 | 0.032967 | 0.049451 | 0.049451 | 0.049451 | 0 | 0 | 0 | 0 | 0.003865 | 0.250272 | 2,761 | 68 | 140 | 40.602941 | 0.875362 | 0.138718 | 0 | 0.044444 | 0 | 0 | 0.104794 | 0 | 0 | 0 | 0 | 0.014706 | 0 | 1 | 0.088889 | false | 0 | 0.155556 | 0.022222 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
128a2d7a634e13b30d2d38fc5ac9815e890ebcfe | 943 | py | Python | demo2/demo2_consume2.py | YuYanzy/kafka-python-demo | fc01ac29230b41fe1821f6e5a9d7226dea9688fe | [
"Apache-2.0"
] | 3 | 2021-05-07T01:48:37.000Z | 2021-09-24T20:53:51.000Z | demo2/demo2_consume2.py | YuYanzy/kafka-python-demo | fc01ac29230b41fe1821f6e5a9d7226dea9688fe | [
"Apache-2.0"
] | null | null | null | demo2/demo2_consume2.py | YuYanzy/kafka-python-demo | fc01ac29230b41fe1821f6e5a9d7226dea9688fe | [
"Apache-2.0"
] | 1 | 2021-05-08T08:46:01.000Z | 2021-05-08T08:46:01.000Z | # -*- coding: utf-8 -*-
# @Author : Ecohnoch(xcy)
# @File : demo2_consume.py
# @Function : TODO
import kafka
demo2_config = {
'kafka_host': 'localhost:9092',
'kafka_topic': 'demo2',
'kafka_group_id': 'demo2_group1'
}
def consume():
consumer = kafka.KafkaConsumer(demo2_config['kafka_topic'],
group_id=demo2_config['kafka_group_id'],
bootstrap_servers=[demo2_config['kafka_host']])
print('link kafka ok.')
for msg in consumer:
this_key_bytes = msg.key
this_val_bytes = msg.value
this_key = str(this_key_bytes, encoding='utf-8')
this_val = str(this_val_bytes, encoding='utf-8')
# msg.key, msg.value, msg.topic, msg.partition, msg.offset
print(this_key, this_val, 'topic: {}, partition: {}, offset: {}'.format(msg.topic, msg.partition, msg.offset))
if __name__ == '__main__':
consume()
| 29.46875 | 118 | 0.604454 | 115 | 943 | 4.643478 | 0.4 | 0.082397 | 0.11985 | 0.074906 | 0.108614 | 0.108614 | 0 | 0 | 0 | 0 | 0 | 0.021277 | 0.252386 | 943 | 31 | 119 | 30.419355 | 0.73617 | 0.160127 | 0 | 0 | 0 | 0 | 0.215561 | 0 | 0 | 0 | 0 | 0.032258 | 0 | 1 | 0.052632 | false | 0 | 0.052632 | 0 | 0.105263 | 0.105263 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
128e7777e186dad8ff8ca443386abd102aa7f54e | 1,492 | py | Python | Weather Station using DHT Sensor with Raspberry Pi and ThingSpeak Platform/Weather Station - ThingSpeak - Raspberry Pi.py | MeqdadDev/ai-robotics-cv-iot-mini-projects | 0c591bc495c95aa95d436e51f38e55bf510349ac | [
"MIT"
] | null | null | null | Weather Station using DHT Sensor with Raspberry Pi and ThingSpeak Platform/Weather Station - ThingSpeak - Raspberry Pi.py | MeqdadDev/ai-robotics-cv-iot-mini-projects | 0c591bc495c95aa95d436e51f38e55bf510349ac | [
"MIT"
] | null | null | null | Weather Station using DHT Sensor with Raspberry Pi and ThingSpeak Platform/Weather Station - ThingSpeak - Raspberry Pi.py | MeqdadDev/ai-robotics-cv-iot-mini-projects | 0c591bc495c95aa95d436e51f38e55bf510349ac | [
"MIT"
] | 1 | 2022-03-29T07:41:23.000Z | 2022-03-29T07:41:23.000Z | '''
IoT Mini Project
Weather Station using DHT Sensor and Raspberry Pi with ThingSpeak Platform
Code Sample: Interfacing DHT22 with Raspberry Pi and sending the data to an IoT Platform (ThingSpeak Platform)
'''
from time import sleep
# import Adafruit_DHT # Not supported library
import adafruit_dht
from board import *
import requests
# After creating your account on ThingSpeak platform, put your channel id below
channel_id = 12345
write_key = 'WriteYourKeyAsString.......' # Put your write key here
# D4 = GPIO4 / D17 = GPIO17 ...etc.
SENSOR_PIN = D17
def get_measurements():
dht22 = adafruit_dht.DHT22(SENSOR_PIN, use_pulseio=False)
temperature = dht22.temperature
humidity = dht22.humidity
print(f"Humidity= {humidity:.2f}")
print(f"Temperature= {temperature:.2f}°C")
return temperature, humidity
def sendData(temp, humidity):
url = 'https://api.thingspeak.com/update'
params = {'key': write_key, 'field1': temp, 'field2': humidity}
res = requests.get(url, params=params)
if __name__ == "__main__":
while True:
# 15 seconds is the minimum time for the free account on ThingSpeak
sleep(15)
try:
temperature, humidity = get_measurements()
except:
print("Error: Can't get the sensor values, check out your wiring connection.")
try:
sendData(temperature, humidity)
except:
print("Error: Can't push the sensor values to ThingSpeak server.")
| 29.84 | 110 | 0.690349 | 191 | 1,492 | 5.298429 | 0.528796 | 0.075099 | 0.033597 | 0.037549 | 0.039526 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026587 | 0.218499 | 1,492 | 49 | 111 | 30.44898 | 0.84048 | 0 | 0 | 0.137931 | 0 | 0 | 0.257032 | 0.026188 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.137931 | null | null | 0.137931 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
128f728bec79cfe03c54bf8f06695117449e7c5a | 5,771 | py | Python | python/ucloud/import_data.py | oldthreefeng/miscellany | 8d3c7a14b53929d752c7356c85ae6681000cd526 | [
"MIT"
] | 1 | 2019-01-04T07:44:08.000Z | 2019-01-04T07:44:08.000Z | python/ucloud/import_data.py | oldthreefeng/miscellany | 8d3c7a14b53929d752c7356c85ae6681000cd526 | [
"MIT"
] | null | null | null | python/ucloud/import_data.py | oldthreefeng/miscellany | 8d3c7a14b53929d752c7356c85ae6681000cd526 | [
"MIT"
] | 2 | 2018-12-10T12:55:38.000Z | 2019-01-04T07:43:55.000Z | #!/usr/bin/python2
import sys
import os
import redis
import time
import datetime
string_keys = []
hash_keys = []
list_keys = []
set_keys = []
zset_keys = []
def import_string(source, dest):
print "Begin Import String Type"
keys_count = len(string_keys)
print "String Key Count is:", keys_count
pipeSrc = source.pipeline(transaction=False)
pipeDst = dest.pipeline(transaction=False)
index = 0
pipe_size = 1000
while index < keys_count:
old_index = index
num = 0
while (index < keys_count) and (num < pipe_size):
pipeSrc.get(string_keys[index])
index += 1
num += 1
results = pipeSrc.execute()
for value in results:
pipeDst.set(string_keys[old_index], value)
old_index += 1
pipeDst.execute()
def import_hash(source, dest):
print "Begin Import Hash Type"
keys_count = len(hash_keys)
print "Hash Key Count is:", keys_count
pipeSrc = source.pipeline(transaction=False)
pipeDst = dest.pipeline(transaction=False)
for key in hash_keys:
hkeys = source.hkeys(key)
keys_count = len(hkeys)
index = 0
pipe_size = 1000
while index < keys_count:
old_index = index
num = 0
while (index < keys_count) and (num < pipe_size):
pipeSrc.hget(key, hkeys[index])
index += 1
num += 1
results = pipeSrc.execute()
for value in results:
pipeDst.hset(key, hkeys[old_index], value)
old_index += 1
pipeDst.execute()
def import_set(source, dest):
print "Begin Import Set Type"
keys_count = len(set_keys)
print "Set Key Count is:", keys_count
pipeDst = dest.pipeline(transaction=False)
for key in set_keys:
sValues = source.smembers(key)
value_count = len(sValues)
index = 0
pipe_size = 1000
while index < value_count:
old_index = index
num = 0
while (index < value_count) and (num < pipe_size):
pipeDst.sadd(key, sValues.pop())
index += 1
num += 1
pipeDst.execute()
def import_zset(source, dest):
print "Begin Import ZSet Type"
keys_count = len(zset_keys)
print "ZSet Key Count is:", keys_count
pipeSrc = source.pipeline(transaction=False)
pipeDst = dest.pipeline(transaction=False)
for key in zset_keys:
zset_size = source.zcard(key)
index = 0
pipe_size = 1000
while index < zset_size:
members = source.zrange(key, index, index + pipe_size)
index += len(members)
for member in members:
pipeSrc.zscore(key, member)
scores = pipeSrc.execute()
i = 0
for member in members:
pipeDst.zadd(key, member, scores[i])
i += 1
pipeDst.execute()
def import_list(source, dest):
print "Begin Import List Type"
keys_count = len(list_keys)
print "List Key Count is:", keys_count
pipeDst = dest.pipeline(transaction=False)
for key in list_keys:
list_size = source.llen(key)
index = 0
pipe_size = 1000
while index < list_size:
results = source.lrange(key, index, index + pipe_size)
index += len(results)
for value in results:
pipeDst.rpush(key, value)
pipeDst.execute()
def read_type_keys(source):
keys = source.keys()
keys_count = len(keys)
print "Key Count is:", keys_count
pipe = source.pipeline(transaction=False)
# for key in keys:
index = 0
pipe_size = 5000
while index < keys_count:
old_index = index
num = 0
while (index < keys_count) and (num < pipe_size):
pipe.type(keys[index])
index += 1
num += 1
results = pipe.execute()
for type in results:
if type == "string":
string_keys.append(keys[old_index])
elif type == "list":
list_keys.append(keys[old_index])
elif type == "hash":
hash_keys.append(keys[old_index])
elif type == "set":
set_keys.append(keys[old_index])
elif type == "zset":
zset_keys.append(keys[old_index])
else:
print keys[old_index], " is not find when TYPE"
old_index += 1
if __name__ == '__main__':
config = {
"source": ['10.4.1.91:0', '10.4.13.124:0', '10.4.12.16:0', '10.4.2.250:0'],
"dest": ['127.0.0.1:11', '127.0.0.1:12', '127.0.0.1:2', '127.0.0.1:1']
}
start = datetime.datetime.now()
for group in zip(config["source"], config["dest"]):
print group
SrcIP = group[0].split(':')[0]
SrcPort = 6379
DstIP = group[1].split(':')[0]
DstPort = 6379
DstDB = group[1].split(':')[1]
source = redis.Redis(host=SrcIP, port=SrcPort)
dest = redis.Redis(host=DstIP, port=DstPort, db=DstDB)
print "Begin Read Keys"
read_type_keys(source)
print "String Key Count is:", len(string_keys)
print "Set Key Count is:", len(set_keys)
print "ZSet Key Count is:", len(zset_keys)
print "List Key Count is:", len(list_keys)
print "Hash Key Count is:", len(hash_keys)
import_string(source, dest)
import_hash(source, dest)
import_list(source, dest)
import_set(source, dest)
import_zset(source, dest)
stop = datetime.datetime.now()
diff = stop - start
print "Finish, token time:", str(diff)
| 30.21466 | 83 | 0.562468 | 727 | 5,771 | 4.325997 | 0.145805 | 0.054372 | 0.034976 | 0.026709 | 0.533545 | 0.440064 | 0.384738 | 0.309698 | 0.279809 | 0.279809 | 0 | 0.032017 | 0.328886 | 5,771 | 190 | 84 | 30.373684 | 0.780015 | 0.005892 | 0 | 0.337349 | 0 | 0 | 0.088579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.120482 | null | null | 0.120482 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1296680de0a376242d8b5859461295d893d5f13c | 4,180 | py | Python | local_test/test_pullparser.py | rmoskal/e-springpad | d2c1dfbae63a29737d9cfdee571704b7a5e85bd5 | [
"MIT"
] | 1 | 2017-01-10T17:12:25.000Z | 2017-01-10T17:12:25.000Z | local_test/test_pullparser.py | rmoskal/e-springpad | d2c1dfbae63a29737d9cfdee571704b7a5e85bd5 | [
"MIT"
] | null | null | null | local_test/test_pullparser.py | rmoskal/e-springpad | d2c1dfbae63a29737d9cfdee571704b7a5e85bd5 | [
"MIT"
] | null | null | null | __author__ = 'rob'
import unittest
import logging
import evernotebookparser
from xml.etree import ElementTree
import re
class TestNotebookParser(unittest.TestCase):
def setUp(self):
self.o = evernotebookparser.NotebookParser2("../Quotes.enex")
def test_parsing2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
self.assertEquals(32,len(results))
def test_re(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE en-note SYSTEM "http://xml.evernote.com/pub/enml2.dtd">
<en-note>Barthes, Roland<br clear="none"/> Sade, Fourier, Loyola: p.7.<br clear="none"/>
<br clear="none"/>Motto: It is a matter of bringing into daily life a fragment of the unintelligible formulas that emanate from a text we admire.
<br clear="none"/></en-note>"""
self.assertEquals(data.find('<en-note>'),133)
self.assertEquals(data.find('</en-note>'),410)
self.assertEquals(data[133],'<')
data = evernotebookparser.extract(data)
self.assertTrue(data.startswith("Barthes"))
def test_construction1(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[0];
self.assertEquals(["B"],item['tags'])
self.assertTrue(item['content'].startswith("Barthes"))
def test_construction2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[1];
self.assertEquals(['O'],item['tags'])
class TestNotebookParser2(unittest.TestCase):
def setUp(self):
self.o = evernotebookparser.NotebookParser2("../test.enex")
def test_parsing2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
self.assertEquals(2,len(results))
def test_construction1(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[0];
self.assertTrue(item['content'].startswith("<div>"))
self.assertFalse("url" in item)
def test_construction2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[1];
self.assertTrue(item['content'].startswith("<div>"))
self.assertTrue("url" in item)
self.assertEquals(item['url'],"http://mostmedia.com")
class TestNotebookMac(unittest.TestCase):
def setUp(self):
self.o = evernotebookparser.NotebookParser2("../Travel.enex")
def test_parsing2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
self.assertEquals(4,len(results))
def test_construction1(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[0];
self.assertTrue(item['content'].startswith("<div>"))
self.assertFalse("url" in item)
def test_construction2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[1];
self.assertTrue(item['content'].startswith("<div>"))
class TestDavids(unittest.TestCase):
def setUp(self):
self.o = evernotebookparser.NotebookParser2("../recipes.enex")
def test_parsing2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
self.assertEquals(49,len(results))
def test_construction1(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[0];
print item['content']
#self.assertTrue(item['content'].startswith("<div>"))
#self.assertFalse("url" in item)
def test_construction2(self):
results = [];
self.o.get_items(lambda x: results.append(x))
item = results[1];
#self.assertTrue(item['content'].startswith("<div>"))
| 32.403101 | 157 | 0.570335 | 457 | 4,180 | 5.153173 | 0.227571 | 0.03397 | 0.076433 | 0.081529 | 0.695117 | 0.680255 | 0.654777 | 0.653079 | 0.653079 | 0.540977 | 0 | 0.015111 | 0.28756 | 4,180 | 129 | 158 | 32.403101 | 0.775688 | 0.032297 | 0 | 0.606742 | 0 | 0.033708 | 0.152362 | 0.005936 | 0 | 0 | 0 | 0 | 0.213483 | 0 | null | null | 0 | 0.05618 | null | null | 0.011236 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12990c8712d2523d8e2f0753d7b1faee0bbfa287 | 353 | py | Python | plots_lib/architecture_config.py | cmimprota/ASL-SIFT | e6e489e9cc06746e2ab8cd11193fc9fc0112e5df | [
"Zlib"
] | 1 | 2021-12-30T14:59:43.000Z | 2021-12-30T14:59:43.000Z | plots_lib/architecture_config.py | cmimprota/ASL-SIFT | e6e489e9cc06746e2ab8cd11193fc9fc0112e5df | [
"Zlib"
] | null | null | null | plots_lib/architecture_config.py | cmimprota/ASL-SIFT | e6e489e9cc06746e2ab8cd11193fc9fc0112e5df | [
"Zlib"
] | 1 | 2021-04-12T11:13:32.000Z | 2021-04-12T11:13:32.000Z | config = dict()
config['fixed_cpu_frequency'] = "@ 3700 MHz"
config['frequency'] = 3.7e9
config['maxflops_sisd'] = 2
config['maxflops_sisd_fma'] = 4
config['maxflops_simd'] = 16
config['maxflops_simd_fma'] = 32
config['roofline_beta'] = 64 # According to WikiChip (Skylake)
config['figure_size'] = (20,9)
config['save_folder'] = '../all_plots/' | 29.416667 | 69 | 0.691218 | 48 | 353 | 4.833333 | 0.666667 | 0.241379 | 0.155172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.058442 | 0.127479 | 353 | 12 | 70 | 29.416667 | 0.694805 | 0.087819 | 0 | 0 | 0 | 0 | 0.454829 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12a26d1b84cfd62fa98cec13a5aa4a115ddadb78 | 779 | py | Python | bin/print_data_structure.py | JohanComparat/pyEmerge | 9b5bfa01959d48ea41221609b8f375f27e3e39ff | [
"Unlicense"
] | null | null | null | bin/print_data_structure.py | JohanComparat/pyEmerge | 9b5bfa01959d48ea41221609b8f375f27e3e39ff | [
"Unlicense"
] | null | null | null | bin/print_data_structure.py | JohanComparat/pyEmerge | 9b5bfa01959d48ea41221609b8f375f27e3e39ff | [
"Unlicense"
] | null | null | null | import sys
ii = int(sys.argv[1])
env = sys.argv[2]
# python3 print_data_structure.py 22 MD10
import glob
import os
import numpy as n
import EmergeIterate
iterate = EmergeIterate.EmergeIterate(ii, env)
iterate.open_snapshots()
def print_attr(h5item):
for attr in h5item:
print(attr, h5item[attr])
def print_all_key(h5item):
for key in h5item.keys():
print('========================================')
print(key, h5item[key])
print('- - - - - - - - - - - - - - - - - - - - ')
print_attr(h5item[key])
def print_data_structure(h5item):
print('+ + + + + + + HEADER + + + + + + + + +')
print_attr(h5item.attrs)
print('\n')
print('+ + + + + + + DATA + + + + + + + + + +')
print_all_key(h5item)
print_data_structure(iterate.f0)
| 23.606061 | 55 | 0.56611 | 92 | 779 | 4.641304 | 0.380435 | 0.084309 | 0.140515 | 0.079625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02946 | 0.215661 | 779 | 32 | 56 | 24.34375 | 0.669394 | 0.050064 | 0 | 0 | 0 | 0 | 0.217096 | 0.054274 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12 | false | 0 | 0.2 | 0 | 0.32 | 0.56 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
12a4188c00b7c8a1abdb2f2f512a6ed7085ea497 | 1,291 | py | Python | tests/test_coders.py | GlobalFishingWatch/pipe-tools | 34dff591997bb2c25e018df86d13a9d42972032b | [
"Apache-2.0"
] | 1 | 2018-05-26T20:10:51.000Z | 2018-05-26T20:10:51.000Z | tests/test_coders.py | GlobalFishingWatch/pipe-tools | 34dff591997bb2c25e018df86d13a9d42972032b | [
"Apache-2.0"
] | 37 | 2017-10-22T12:00:59.000Z | 2022-02-08T19:17:58.000Z | tests/test_coders.py | GlobalFishingWatch/pipe-tools | 34dff591997bb2c25e018df86d13a9d42972032b | [
"Apache-2.0"
] | null | null | null | import pytest
import six
import ujson
import apache_beam as beam
from apache_beam.testing.test_pipeline import TestPipeline as _TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.coders import typecoders
from apache_beam.typehints import Dict, Union
from pipe_tools.coders import JSONDictCoder
from pipe_tools.coders import JSONDict
from pipe_tools.generator import MessageGenerator
class MyType():
pass
@pytest.mark.filterwarnings('ignore:Using fallback coder:UserWarning')
@pytest.mark.filterwarnings('ignore:The compiler package is deprecated and removed in Python 3.x.:DeprecationWarning')
class TestCoders():
def test_JSONDictCoder(self):
records = [
{},
{'a': 1, 'b': 2, 'c': None},
{"test":None},
]
coder = JSONDictCoder()
for r in records:
assert r == coder.decode(coder.encode(r))
def test_type_hints(self):
messages = MessageGenerator()
source = beam.Create(messages)
assert source.get_output_type() == Dict[six.binary_type, Union[float, int]]
with _TestPipeline() as p:
result = (
p | beam.Create(messages)
)
p.run() | 26.346939 | 118 | 0.676995 | 157 | 1,291 | 5.43949 | 0.496815 | 0.070258 | 0.081967 | 0.07377 | 0.131148 | 0.0726 | 0 | 0 | 0 | 0 | 0 | 0.003036 | 0.234702 | 1,291 | 49 | 119 | 26.346939 | 0.861336 | 0 | 0 | 0 | 0 | 0 | 0.102941 | 0.017802 | 0 | 0 | 0 | 0 | 0.085714 | 1 | 0.057143 | false | 0.028571 | 0.342857 | 0 | 0.457143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
12a668f147490b052289202d9372f523023dc419 | 3,820 | py | Python | yeti/core/model/stix/sro.py | yeti-platform/TibetanBrownBear | 8ab520bd199a63e404b3a6a5b49a29f277384e8e | [
"Apache-2.0"
] | 9 | 2018-01-15T22:44:24.000Z | 2021-05-28T11:13:03.000Z | yeti/core/model/stix/sro.py | yeti-platform/TibetanBrownBear | 8ab520bd199a63e404b3a6a5b49a29f277384e8e | [
"Apache-2.0"
] | 140 | 2018-01-12T10:07:47.000Z | 2021-08-02T23:03:49.000Z | yeti/core/model/stix/sro.py | yeti-platform/TibetanBrownBear | 8ab520bd199a63e404b3a6a5b49a29f277384e8e | [
"Apache-2.0"
] | 11 | 2018-01-16T19:49:35.000Z | 2022-01-18T16:30:34.000Z | """Detail Yeti's Entity object structure."""
import json
from yeti.core.errors import ValidationError
from .base import StixObject
class StixSRO(StixObject):
def __init__(self, db_from, db_to, attributes):
self._db_from = db_from
self._db_to = db_to
super().__init__(**attributes)
@classmethod
def get(cls, key):
"""Fetches the most recent version of a STIX Relationship given its
STIX ID.
Args:
key: The STIX ID of the Relationship to fetch.
Returns:
A STIX Relationship object.
"""
all_versions = cls.filter({'attributes.id': key})
if not all_versions:
return None
winner = all_versions[0]
for version in all_versions:
if version.modified > winner.modified:
winner = version
return winner
def all_versions(self):
"""Returns all versions of a STIX object given its key.
Returns:
A list of STIX objects.
"""
return super().filter({'attributes.id': self.id}, latest=False)
def dump(self, destination='db'):
"""Dumps an Entity object into its STIX JSON representation.
Args:
destination: Since STIX2 uses IDs as means to identify a single object
we need to transform the object depending on whether it is being
sent to the database or to a web client.
Returns:
The Entity's JSON representation in dictionary form.
"""
attributes = json.loads(self._stix_object.serialize())
if destination == 'db':
return {
'id': None,
'_from': self._db_from,
'_to': self._db_to,
'attributes': attributes
}
return attributes
@classmethod
def load_stix(cls, args):
"""Translate information from the backend into a valid STIX definition.
Will instantiate a STIX object from that definition.
Args:
args: The dictionary to use to create the STIX object.
strict: Unused, kept to be consistent with overriden method
Returns:
The corresponding STIX objet.
Raises:
ValidationError: If a STIX object could not be instantiated from the
serialized data.
"""
if isinstance(args, list):
return [cls.load_stix(item) for item in args]
subclass = cls.get_final_datatype(args['attributes'])
db_id = args.pop('_id', None)
db_from = args.pop('_from')
db_to = args.pop('_to')
args.pop('_rev', None)
stix_rel = args['attributes']
try:
obj = subclass(db_from, db_to, stix_rel)
if db_id:
obj._arango_id = db_id # pylint: disable=protected-access
except Exception as err:
raise ValidationError(str(err))
return obj
@property
def type(self):
return self._stix_object.type
@property
def id(self):
return self._stix_object.id
@property
def created_by_ref(self):
return self._stix_object.created_by_ref
@property
def created(self):
return self._stix_object.created
@property
def modified(self):
return self._stix_object.modified
@property
def revoked(self):
return self._stix_object.revoked
@property
def labels(self):
return self._stix_object.labels
@property
def external_references(self):
return self._stix_object.external_references
@property
def object_marking_refs(self):
return self._stix_object.object_marking_refs
@property
def granular_markings(self):
return self._stix_object.granular_markings
| 28.296296 | 80 | 0.606545 | 457 | 3,820 | 4.894967 | 0.326039 | 0.067054 | 0.068842 | 0.080465 | 0.113545 | 0.027716 | 0 | 0 | 0 | 0 | 0 | 0.000766 | 0.316492 | 3,820 | 134 | 81 | 28.507463 | 0.855994 | 0.274869 | 0 | 0.155844 | 0 | 0 | 0.03357 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.194805 | false | 0 | 0.038961 | 0.12987 | 0.467532 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
12a832b1e6427f5514100a7f00be3d2042f2ed0f | 207 | py | Python | LeetCode_1304.py | xulu199705/LeetCode | 9a654a10117a93f9ad9728d6b86eb3713185545e | [
"MIT"
] | null | null | null | LeetCode_1304.py | xulu199705/LeetCode | 9a654a10117a93f9ad9728d6b86eb3713185545e | [
"MIT"
] | null | null | null | LeetCode_1304.py | xulu199705/LeetCode | 9a654a10117a93f9ad9728d6b86eb3713185545e | [
"MIT"
] | null | null | null | from typing import List
class Solution:
def sumZero(self, n: int) -> List[int]:
ans = [x for x in range(-(n//2), n//2 + 1)]
if n % 2 == 0:
ans.remove(0)
return ans
| 18.818182 | 51 | 0.492754 | 33 | 207 | 3.090909 | 0.666667 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045113 | 0.357488 | 207 | 10 | 52 | 20.7 | 0.721805 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.142857 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
12aabf7a6ed3903e5b3fb7b076bf621fe0068180 | 1,318 | py | Python | nipype/interfaces/ants/tests/test_auto_ImageMath.py | TRO-HIT/nipype | c453eac5d7efdd4e19a9bcc8a7f3d800026cc125 | [
"Apache-2.0"
] | null | null | null | nipype/interfaces/ants/tests/test_auto_ImageMath.py | TRO-HIT/nipype | c453eac5d7efdd4e19a9bcc8a7f3d800026cc125 | [
"Apache-2.0"
] | null | null | null | nipype/interfaces/ants/tests/test_auto_ImageMath.py | TRO-HIT/nipype | c453eac5d7efdd4e19a9bcc8a7f3d800026cc125 | [
"Apache-2.0"
] | 1 | 2020-12-16T16:36:48.000Z | 2020-12-16T16:36:48.000Z | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..utils import ImageMath
def test_ImageMath_inputs():
input_map = dict(
args=dict(argstr="%s",),
copy_header=dict(usedefault=True,),
dimension=dict(argstr="%d", position=1, usedefault=True,),
environ=dict(nohash=True, usedefault=True,),
num_threads=dict(nohash=True, usedefault=True,),
op1=dict(argstr="%s", extensions=None, mandatory=True, position=-2,),
op2=dict(argstr="%s", position=-1,),
operation=dict(argstr="%s", mandatory=True, position=3,),
output_image=dict(
argstr="%s",
extensions=None,
keep_extension=True,
name_source=["op1"],
name_template="%s_maths",
position=2,
),
)
inputs = ImageMath.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_ImageMath_outputs():
output_map = dict(output_image=dict(extensions=None,),)
outputs = ImageMath.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 34.684211 | 77 | 0.618361 | 157 | 1,318 | 5.076433 | 0.401274 | 0.075282 | 0.069009 | 0.060226 | 0.331242 | 0.198243 | 0.138018 | 0.138018 | 0.138018 | 0.138018 | 0 | 0.007944 | 0.235964 | 1,318 | 37 | 78 | 35.621622 | 0.783515 | 0.038695 | 0 | 0.066667 | 1 | 0 | 0.018182 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 1 | 0.066667 | false | 0 | 0.033333 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12b1527e01e27cdb3f79857b70a9797275320e0e | 1,372 | py | Python | spacy/lang/th/__init__.py | snosrap/spaCy | 3f68bbcfec44ef55d101e6db742d353b72652129 | [
"MIT"
] | 1 | 2019-05-17T02:43:33.000Z | 2019-05-17T02:43:33.000Z | spacy/lang/th/__init__.py | snosrap/spaCy | 3f68bbcfec44ef55d101e6db742d353b72652129 | [
"MIT"
] | 49 | 2021-10-01T10:15:30.000Z | 2021-12-27T14:36:05.000Z | spacy/lang/th/__init__.py | snosrap/spaCy | 3f68bbcfec44ef55d101e6db742d353b72652129 | [
"MIT"
] | 1 | 2019-10-01T08:27:20.000Z | 2019-10-01T08:27:20.000Z | from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from ...language import Language, BaseDefaults
from ...tokens import Doc
from ...util import DummyTokenizer, registry, load_config_from_str
from ...vocab import Vocab
DEFAULT_CONFIG = """
[nlp]
[nlp.tokenizer]
@tokenizers = "spacy.th.ThaiTokenizer"
"""
@registry.tokenizers("spacy.th.ThaiTokenizer")
def create_thai_tokenizer():
def thai_tokenizer_factory(nlp):
return ThaiTokenizer(nlp.vocab)
return thai_tokenizer_factory
class ThaiTokenizer(DummyTokenizer):
def __init__(self, vocab: Vocab) -> None:
try:
from pythainlp.tokenize import word_tokenize
except ImportError:
raise ImportError(
"The Thai tokenizer requires the PyThaiNLP library: "
"https://github.com/PyThaiNLP/pythainlp"
) from None
self.word_tokenize = word_tokenize
self.vocab = vocab
def __call__(self, text: str) -> Doc:
words = list(self.word_tokenize(text))
spaces = [False] * len(words)
return Doc(self.vocab, words=words, spaces=spaces)
class ThaiDefaults(BaseDefaults):
config = load_config_from_str(DEFAULT_CONFIG)
lex_attr_getters = LEX_ATTRS
stop_words = STOP_WORDS
class Thai(Language):
lang = "th"
Defaults = ThaiDefaults
__all__ = ["Thai"]
| 24.945455 | 69 | 0.68586 | 159 | 1,372 | 5.672956 | 0.36478 | 0.039911 | 0.031042 | 0.037694 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.220845 | 1,372 | 54 | 70 | 25.407407 | 0.843779 | 0 | 0 | 0 | 0 | 0 | 0.131195 | 0.033528 | 0 | 0 | 0 | 0 | 0 | 1 | 0.102564 | false | 0 | 0.230769 | 0.025641 | 0.615385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
12b22d55acd96929800d8872484a4576f43f6f08 | 6,223 | py | Python | cloudrunner_server/plugins/clouds/docker_host.py | ttrifonov/cloudrunner-server | 3b2426c8d9987e78425899010b534afc7734d8d4 | [
"Apache-2.0"
] | 2 | 2016-03-31T08:45:29.000Z | 2021-04-28T15:18:45.000Z | cloudrunner_server/plugins/clouds/docker_host.py | ttrifonov/cloudrunner-server | 3b2426c8d9987e78425899010b534afc7734d8d4 | [
"Apache-2.0"
] | null | null | null | cloudrunner_server/plugins/clouds/docker_host.py | ttrifonov/cloudrunner-server | 3b2426c8d9987e78425899010b534afc7734d8d4 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# /*******************************************************
# * Copyright (C) 2013-2014 CloudRunner.io <info@cloudrunner.io>
# *
# * Proprietary and confidential
# * This file is part of CloudRunner Server.
# *
# * CloudRunner Server can not be copied and/or distributed
# * without the express permission of CloudRunner.io
# *******************************************************/
import json
import os
import requests
import tempfile
from cloudrunner import VAR_DIR
from .base import BaseCloudProvider, CR_SERVER
HEADERS = {'Content-Type': 'application/json'}
class Docker(BaseCloudProvider):
def __init__(self, profile, log=None):
super(Docker, self).__init__(profile, log)
prefix = "%s-%s" % (self.profile.owner.org.name, self.profile.id)
self._path = os.path.join(VAR_DIR, "tmp", "creds", prefix)
if ":" in self.profile.username:
self.server_address = self.profile.username
else:
self.server_address = "%s:2376" % self.profile.username
try:
os.makedirs(self._path)
except:
pass
_, self._cert_path = tempfile.mkstemp(dir=self._path,
suffix='pem',
text=True)
_, self._key_path = tempfile.mkstemp(dir=self._path,
suffix='pem',
text=True)
with open(self._cert_path, 'w') as f:
f.write(self.profile.password)
with open(self._key_path, 'w') as f:
f.write(self.profile.arguments)
def _cleanup(self):
os.unlink(self._cert_path)
os.unlink(self._key_path)
def create_machine(self, name, image=None, server=CR_SERVER,
ports=None, privileged=None,
volumes=None, **kwargs):
self.log.info("Registering Docker machine [%s::%s] for [%s] at [%s]" %
(name, image, CR_SERVER, self.server_address))
priv = privileged in ['1', 'true', 'True', True]
# cmd = PROVISION % dict(server=server,
# name=name,
# api_key=self.api_key)
exposed_ports, port_bindings = {}, {}
_ports = [p.strip() for p in ports.split(",") if p.strip()]
for port in _ports:
cont_port, _, host_port = port.partition(":")
exposed = "%s/tcp" % cont_port
exposed_ports[exposed] = {}
if host_port:
host_port = host_port
port_bindings[exposed] = [{
'HostPort': host_port
}]
else:
port_bindings[exposed] = [{
'HostPort': None
}]
volumes_desc, binds = {}, []
_volumes = [v.strip() for v in volumes.split(",") if v.strip()]
for _vol in _volumes:
mnt_host, _, mnt_cont = _vol.partition(":")
if not mnt_cont:
mnt_cont = mnt_host
mnt_host = ''
volumes_desc[mnt_cont] = {}
if mnt_host:
binds.append("%s:%s" % (mnt_host, mnt_cont))
else:
binds.append("%s:%s" % (mnt_cont, mnt_cont))
env = ["SERVER_ID=%s" % CR_SERVER, "ORG_ID=%s" % self.api_key]
create_data = dict(Hostname=name, Image=image, Env=env,
ExposedPorts=exposed_ports,
Volumes=volumes_desc,
Privileged=priv,
Tty=True,
OpenStdin=True,)
# Cmd=[cmd],
# Entrypoint=['/bin/curl'])
create_url = "https://%s/containers/create" % self.server_address
try:
server_ids = []
res = requests.post(create_url, data=json.dumps(create_data),
cert=(self._cert_path,
self._key_path),
headers=HEADERS,
verify=False)
if res.status_code >= 300:
self.log.error("FAILURE %s(%s)" %
(res.status_code, res.content))
return self.FAIL, [], {}
start_data = dict(PortBindings=port_bindings,
Binds=binds,
Privileged=priv,
Detach=False,
Tty=False)
server_id = res.json()['Id']
self.log.info("Started docker instance %s" % server_id)
server_ids.append(server_id)
start_url = "https://%s/containers/%s/start" % (
self.server_address,
server_id)
res = requests.post(start_url, data=json.dumps(start_data),
cert=(self._cert_path,
self._key_path),
headers=HEADERS,
verify=False)
meta = dict(server_address=self.server_address)
except Exception, ex:
self.log.exception(ex)
raise
finally:
self._cleanup()
return self.OK, server_ids, meta
def delete_machine(self, server_ids, **kwargs):
ret = self.OK
for server_id in server_ids:
try:
delete_url = "https://%s/containers/%s?force=true" % (
self.server_address, server_id)
res = requests.delete(delete_url, cert=(self._cert_path,
self._key_path),
headers=HEADERS,
verify=False)
if res.status_code >= 300:
self.log.error("FAILURE %s(%s)" %
(res.status_code, res.content))
ret = self.FAIL
except Exception, ex:
self.log.error(ex)
return ret
| 39.138365 | 78 | 0.472602 | 617 | 6,223 | 4.567261 | 0.273906 | 0.031228 | 0.042229 | 0.020227 | 0.221434 | 0.17885 | 0.17885 | 0.1533 | 0.135557 | 0.135557 | 0 | 0.006196 | 0.403503 | 6,223 | 158 | 79 | 39.386076 | 0.752963 | 0.097381 | 0 | 0.28 | 0 | 0 | 0.057857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.016 | 0.048 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12b887c446ea424a4bd8fd55a07bceb06b1c0206 | 1,656 | py | Python | test.py | Tweetsched/tweetsched-publisher | c639670fc9658251a02b8946b34dfae3f3145a72 | [
"MIT"
] | 1 | 2018-08-28T14:04:15.000Z | 2018-08-28T14:04:15.000Z | test.py | Tweetsched/tweetsched-publisher | c639670fc9658251a02b8946b34dfae3f3145a72 | [
"MIT"
] | null | null | null | test.py | Tweetsched/tweetsched-publisher | c639670fc9658251a02b8946b34dfae3f3145a72 | [
"MIT"
] | null | null | null | from base64 import b64encode
from app import app
import unittest
from mock import patch
import os
import json
from twython import Twython
class TestApp(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
os.environ['SERVICE_KEY'] = 'test-key'
os.environ['SERVICE_PASS'] = 'test-secret'
os.environ['APP_KEY'] = 'test-key'
os.environ['APP_SECRET'] = 'test-secret'
os.environ['OAUTH_TOKEN'] = 'test-oauth-token'
os.environ['OAUTH_TOKEN_SECRET'] = 'test-oauth-token-secret'
@patch('app.Twython.update_status')
def test_publish_tweet(self, update_status_mock):
update_status_mock.return_value = True
auth = (os.environ['SERVICE_KEY'] + ':' + os.environ['SERVICE_PASS']).encode('utf-8')
headers = {
'Authorization': 'Basic ' + b64encode(auth).decode()
}
rv = self.app.post('/api/v1/tweets',
data = json.dumps(dict(id = 3, message = 'test tweet', profileId = '1')),
content_type = 'application/json',
headers = headers)
self.assertEqual(rv.status_code, 200)
self.assertEqual(update_status_mock.call_count, 1)
update_status_mock.assert_called_once()
def test_404(self):
auth = (os.environ['SERVICE_KEY'] + ':' + os.environ['SERVICE_PASS']).encode('utf-8')
headers = {
'Authorization': 'Basic ' + b64encode(auth).decode()
}
rv = self.app.get('/i-am-not-found', headers=headers)
self.assertEqual(rv.status_code, 404)
if __name__ == '__main__':
unittest.main()
| 35.234043 | 100 | 0.607488 | 198 | 1,656 | 4.883838 | 0.373737 | 0.093071 | 0.099276 | 0.058945 | 0.354705 | 0.304033 | 0.304033 | 0.219235 | 0.219235 | 0.219235 | 0 | 0.018563 | 0.251812 | 1,656 | 46 | 101 | 36 | 0.761905 | 0 | 0 | 0.153846 | 0 | 0 | 0.199879 | 0.028986 | 0 | 0 | 0 | 0 | 0.102564 | 1 | 0.076923 | false | 0.076923 | 0.179487 | 0 | 0.282051 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
12b9be88a391697f2894a2c7dcc4147754edbf99 | 1,227 | py | Python | website/models/post.py | LKKTGB/lkkpomia | 0a814ed6d28757e07d6392ca27c914e68f0b3bda | [
"MIT"
] | null | null | null | website/models/post.py | LKKTGB/lkkpomia | 0a814ed6d28757e07d6392ca27c914e68f0b3bda | [
"MIT"
] | 5 | 2020-04-26T09:03:33.000Z | 2022-02-02T13:00:39.000Z | website/models/post.py | LKKTGB/lkkpomia | 0a814ed6d28757e07d6392ca27c914e68f0b3bda | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup
from django.db import models
from django.utils.translation import ugettext_lazy as _
from taggit.managers import TaggableManager
class Post(models.Model):
title = models.CharField(_('post_title'), max_length=100)
body = models.TextField(_('post_body'))
tags = TaggableManager(_('post_tags'), help_text=_('post_tags_help_text'))
create_time = models.DateTimeField(_('post_create_time'), auto_now_add=True)
update_time = models.DateTimeField(_('post_update_time'), auto_now=True)
class Meta:
verbose_name = _('post')
verbose_name_plural = _('posts')
@staticmethod
def autocomplete_search_fields():
return ('id__iexact', 'title__icontains',)
def __str__(self):
return self.title
@property
def cover_url(self):
soup = BeautifulSoup(self.body, 'html.parser')
tags = soup.findAll('img')
return tags[0]['src'] if tags else None
@property
def summary(self):
soup = BeautifulSoup(self.body, 'html.parser')
for br in soup.find_all("br"):
br.replace_with("\n")
ps = [t for t in soup.findAll('p') if t.text.strip()]
return ps[0].text if ps else None
| 32.289474 | 80 | 0.667482 | 159 | 1,227 | 4.880503 | 0.490566 | 0.025773 | 0.030928 | 0.041237 | 0.100515 | 0.100515 | 0.100515 | 0 | 0 | 0 | 0 | 0.006198 | 0.211084 | 1,227 | 37 | 81 | 33.162162 | 0.795455 | 0 | 0 | 0.133333 | 0 | 0 | 0.119804 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.133333 | 0.066667 | 0.633333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
12bc9ffc8a5d1fd39d7381b5bb5f4a16fad4749b | 14,579 | py | Python | plugins/modules/nsxt_transport_node_collections.py | madhukark/ansible-for-nsxt | f75c698e24073305a968ce2f70739fee77a14bb2 | [
"BSD-2-Clause"
] | null | null | null | plugins/modules/nsxt_transport_node_collections.py | madhukark/ansible-for-nsxt | f75c698e24073305a968ce2f70739fee77a14bb2 | [
"BSD-2-Clause"
] | null | null | null | plugins/modules/nsxt_transport_node_collections.py | madhukark/ansible-for-nsxt | f75c698e24073305a968ce2f70739fee77a14bb2 | [
"BSD-2-Clause"
] | 1 | 2021-12-03T08:26:09.000Z | 2021-12-03T08:26:09.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 VMware, Inc.
# SPDX-License-Identifier: BSD-2-Clause OR GPL-3.0-only
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nsxt_transport_node_collections
short_description: Create transport node collection by attaching Transport Node Profile to cluster.
description: When transport node collection is created the hosts which are part
of compute collection will be prepared automatically i.e. NSX Manager
attempts to install the NSX components on hosts. Transport nodes for these
hosts are created using the configuration specified in transport node
profile.
version_added: "2.7"
author: Rahul Raghuvanshi
options:
hostname:
description: Deployed NSX manager hostname.
required: true
type: str
username:
description: The username to authenticate with the NSX manager.
required: true
type: str
password:
description: The password to authenticate with the NSX manager.
required: true
type: str
cluster_name:
description: CLuster Name
required: false
type: str
compute_manager_name:
description: Cluster Manager Name
required: false
type: str
description:
description: Description
required: true
type: str
display_name:
description: Display name
required: true
type: str
resource_type:
description: "Must be set to the value TransportNodeCollection"
required: true
type: str
state:
choices:
- present
- absent
description: "State can be either 'present' or 'absent'.
'present' is used to create or update resource.
'absent' is used to delete resource."
required: true
transport_node_profile_name:
description: Transport Node Profile Names
required: true
type: str
'''
EXAMPLES = '''
- name: Create transport node collection
nsxt_transport_node_collections:
hostname: "{{hostname}}"
username: "{{username}}"
password: "{{password}}"
validate_certs: False
display_name: "TNC1"
resource_type: "TransportNodeCollection"
description: "Transport Node Collections 1"
compute_manager_name: "VC1"
cluster_name: "cl1"
transport_node_profile_name: "TNP1"
state: present
'''
RETURN = '''# '''
import json, time
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.vmware_nsxt import vmware_argument_spec, request
from ansible.module_utils._text import to_native
import ssl
import socket
import hashlib
def get_transport_node_collections_params(args=None):
args_to_remove = ['state', 'username', 'password', 'port', 'hostname', 'validate_certs']
for key in args_to_remove:
args.pop(key, None)
for key, value in args.copy().items():
if value == None:
args.pop(key, None)
return args
def get_transport_node_collections(module, manager_url, mgr_username, mgr_password, validate_certs):
try:
(rc, resp) = request(manager_url+ '/transport-node-collections', headers=dict(Accept='application/json'),
url_username=mgr_username, url_password=mgr_password, validate_certs=validate_certs, ignore_errors=True)
except Exception as err:
module.fail_json(msg='Error accessing transport-node-collections. Error [%s]' % (to_native(err)))
return resp
def get_id_from_display_name(module, manager_url, mgr_username, mgr_password, validate_certs, endpoint, display_name, exit_if_not_found=True):
try:
(rc, resp) = request(manager_url+ endpoint, headers=dict(Accept='application/json'),
url_username=mgr_username, url_password=mgr_password, validate_certs=validate_certs, ignore_errors=True)
except Exception as err:
module.fail_json(msg='Error accessing id for display name %s. Error [%s]' % (display_name, to_native(err)))
for result in resp['results']:
if result.__contains__('display_name') and result['display_name'] == display_name:
return result['id']
if exit_if_not_found:
module.fail_json(msg='No id exist with display name %s' % display_name)
def get_transport_node_collection_from_display_name(module, manager_url, mgr_username, mgr_password, validate_certs, display_name):
transport_node_collections = get_transport_node_collections(module, manager_url, mgr_username, mgr_password, validate_certs)
for transport_node_collection in transport_node_collections['results']:
if transport_node_collection.__contains__('display_name') and transport_node_collection['display_name'] == display_name:
return transport_node_collection
return None
def wait_till_delete(id, module, manager_url, mgr_username, mgr_password, validate_certs):
try:
while True:
(rc, resp) = request(manager_url+ '/transport-node-collections/%s'% id, headers=dict(Accept='application/json'),
url_username=mgr_username, url_password=mgr_password, validate_certs=validate_certs, ignore_errors=True)
time.sleep(10)
except Exception as err:
time.sleep(5)
return
def get_transport_node_profile_id (module, manager_url, mgr_username, mgr_password, validate_certs, transport_node_profile_name):
try:
return get_id_from_display_name (module, manager_url, mgr_username, mgr_password, validate_certs,
"/transport-node-profiles", transport_node_profile_name)
except Exception as err:
module.fail_json(msg='Error accessing id for display name %s. Error [%s]' % (transport_node_profile_name, to_native(err)))
def get_compute_collection_id (module, manager_url, mgr_username, mgr_password, validate_certs, manager_name, cluster_name):
try:
(rc, resp) = request(manager_url+ '/fabric/compute-collections', headers=dict(Accept='application/json'),
url_username=mgr_username, url_password=mgr_password, validate_certs=validate_certs, ignore_errors=True)
compute_manager_id = get_id_from_display_name (module, manager_url, mgr_username, mgr_password, validate_certs,
"/fabric/compute-managers", manager_name)
except Exception as err:
module.fail_json(msg='Error accessing compute collection id for manager %s, cluster %s. Error [%s]' % (manager_name, cluster_name, to_native(err)))
for result in resp['results']:
if result.__contains__('display_name') and result['display_name'] == cluster_name and \
result['origin_id'] == compute_manager_id:
return result['external_id']
module.fail_json(msg='No compute collection id exist with cluster name %s for compute manager %s' % (cluster_name, manager_name))
def update_params_with_id (module, manager_url, mgr_username, mgr_password, validate_certs, transport_node_collection_params ):
compute_manager_name = transport_node_collection_params.pop('compute_manager_name', None)
compute_cluster_name = transport_node_collection_params.pop('cluster_name', None)
compute_collection_id = get_compute_collection_id (module, manager_url, mgr_username, mgr_password, validate_certs,
compute_manager_name, compute_cluster_name)
transport_node_collection_params['compute_collection_id'] = compute_collection_id
transport_node_profile_name = transport_node_collection_params.pop('transport_node_profile_name', None)
transport_node_profile_id = get_transport_node_profile_id (module, manager_url, mgr_username, mgr_password, validate_certs,
transport_node_profile_name)
transport_node_collection_params['transport_node_profile_id'] = transport_node_profile_id
return transport_node_collection_params
def check_for_update(module, manager_url, mgr_username, mgr_password, validate_certs, transport_node_collection_with_ids):
existing_tnc = get_transport_node_collection_from_display_name(module, manager_url, mgr_username, mgr_password, validate_certs, transport_node_collection_with_ids['display_name'])
if existing_tnc is None:
return False
if existing_tnc['compute_collection_id'] == transport_node_collection_with_ids['compute_collection_id'] and \
existing_tnc['transport_node_profile_id'] != transport_node_collection_with_ids['transport_node_profile_id']:
return True
return False
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(display_name=dict(required=True, type='str'),
description=dict(required=True, type='str'),
resource_type=dict(required=True, type='str'),
transport_node_profile_name=dict(required=True, type='str'),
compute_manager_name=dict(required=False, type='str'),
cluster_name=dict(required=False, type='str'),
state=dict(required=True, choices=['present', 'absent']))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
transport_node_collections_params = get_transport_node_collections_params(module.params.copy())
state = module.params['state']
mgr_hostname = module.params['hostname']
mgr_username = module.params['username']
mgr_password = module.params['password']
validate_certs = module.params['validate_certs']
display_name = module.params['display_name']
manager_url = 'https://{}/api/v1'.format(mgr_hostname)
transport_node_collections_dict = get_transport_node_collection_from_display_name (module, manager_url, mgr_username, mgr_password, validate_certs, display_name)
transport_node_collection_id, revision = None, None
if transport_node_collections_dict:
transport_node_collection_id = transport_node_collections_dict['id']
revision = transport_node_collections_dict['_revision']
if state == 'present':
body = update_params_with_id(module, manager_url, mgr_username, mgr_password, validate_certs, transport_node_collections_params)
updated = check_for_update(module, manager_url, mgr_username, mgr_password, validate_certs, body)
headers = dict(Accept="application/json")
headers['Content-Type'] = 'application/json'
if not updated:
# add the transport_node_collections
request_data = json.dumps(transport_node_collections_params)
if module.check_mode:
module.exit_json(changed=True, debug_out=str(request_data), id='12345')
try:
if transport_node_collection_id:
module.exit_json(changed=False, id=transport_node_collection_id,
message="transport-node-collection with display_name %s already exist on cluster %s." % (module.params['display_name'], module.params['cluster_name']))
(rc, resp) = request(manager_url+ '/transport-node-collections', data=request_data, headers=headers, method='POST',
url_username=mgr_username, url_password=mgr_password, validate_certs=validate_certs, ignore_errors=True)
except Exception as err:
module.fail_json(msg="Failed to add transport_node_collections. Request body [%s]. Error[%s]." % (request_data, to_native(err)))
module.exit_json(changed=True, id=resp["id"], body= str(resp), message="transport-node-collection created for cluster %s." % module.params['cluster_name'])
else:
if module.check_mode:
module.exit_json(changed=True, debug_out=str(json.dumps(transport_node_collections_params)), id=transport_node_collection_id)
transport_node_collections_params['_revision'] = revision # update current revision
request_data = json.dumps(transport_node_collections_params)
id = transport_node_collection_id
try:
(rc, resp) = request(manager_url+ '/transport-node-collections/%s' % id, data=request_data, headers=headers, method='PUT',
url_username=mgr_username, url_password=mgr_password, validate_certs=validate_certs, ignore_errors=True)
except Exception as err:
module.fail_json(msg="Failed to update transport_node_collections with id %s. Request body [%s]. Error[%s]." % (id, request_data, to_native(err)))
module.exit_json(changed=True, id=resp["id"], body= str(resp), message="transport-node-collection with Compute collection fabric template id %s updated." % id)
elif state == 'absent':
# delete the array
id = transport_node_collection_id
if id is None:
module.exit_json(changed=False, msg='No transport-node-collection exist with display_name %s' % display_name)
if module.check_mode:
module.exit_json(changed=True, debug_out=str(json.dumps(transport_node_collections_params)), id=id)
try:
(rc, resp) = request(manager_url + "/transport-node-collections/%s" % id, method='DELETE',
url_username=mgr_username, url_password=mgr_password, validate_certs=validate_certs)
except Exception as err:
module.fail_json(msg="Failed to delete transport-node-collection with name %s. Error[%s]." % (display_name, to_native(err)))
wait_till_delete(id, module, manager_url, mgr_username, mgr_password, validate_certs)
module.exit_json(changed=True, id=id, message="transport-node-collection with name %s deleted." % display_name)
if __name__ == '__main__':
main()
| 52.442446 | 183 | 0.721243 | 1,834 | 14,579 | 5.43566 | 0.153217 | 0.108236 | 0.078443 | 0.060187 | 0.531949 | 0.473267 | 0.414485 | 0.391313 | 0.372254 | 0.36463 | 0 | 0.002201 | 0.189794 | 14,579 | 277 | 184 | 52.631769 | 0.841771 | 0.065574 | 0 | 0.241379 | 0 | 0 | 0.29867 | 0.060557 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043103 | false | 0.12931 | 0.034483 | 0 | 0.12931 | 0.00431 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
12c3f8688909dadef43a9224619f1323d1d373b9 | 972 | py | Python | exercicios-Python/ex042.py | pedrosimoes-programmer/exercicios-python | 150de037496d63d76086678d87425a8ccfc74573 | [
"MIT"
] | null | null | null | exercicios-Python/ex042.py | pedrosimoes-programmer/exercicios-python | 150de037496d63d76086678d87425a8ccfc74573 | [
"MIT"
] | null | null | null | exercicios-Python/ex042.py | pedrosimoes-programmer/exercicios-python | 150de037496d63d76086678d87425a8ccfc74573 | [
"MIT"
] | null | null | null | #Refaça o DESAFIO 035 dos triângulos, acrescentando o recurso de mostrar que tipo de triângulo será formado:
#- EQUILÁTERO: todos os lados iguais
#- ISÓSCELES: dois lados iguais, um diferente
#- ESCALENO: todos os lados diferentes
print('-' * 20, 'Programa Analisador de Triângulos', '-' * 20)
seg1 = float(input('Digite o valor do primeiro segmento: '))
seg2 = float(input('Digite o valor do segundo segmento: '))
seg3 = float(input('Digite o valor do terceiro segmento: '))
if seg1 < seg2 + seg3 and seg2 < seg1 + seg3 and seg3 < seg1 + seg2:
if seg1 == seg2 and seg3: # outra possibilidade --> seg1 == seg2 == seg3:
print('Os segmentos PODEM formar um triângulo do tipo EQUILÁTERO!')
elif seg1 != seg2 != seg3 != seg1:
print('Os segmentos acima PODEM formar um triângulo do tipo ESCALENO!')
else:
print('Os segmentos acima PODEM formar um triângulo do tipo ISÓSCELES!')
else:
print('Os segmentos NÃO PODEM formar um triângulo!')
| 54 | 108 | 0.700617 | 136 | 972 | 5.007353 | 0.389706 | 0.058737 | 0.093979 | 0.129222 | 0.290749 | 0.290749 | 0.143906 | 0.143906 | 0.143906 | 0.143906 | 0 | 0.037179 | 0.197531 | 972 | 17 | 109 | 57.176471 | 0.835897 | 0.276749 | 0 | 0.153846 | 0 | 0 | 0.531519 | 0 | 0 | 0 | 0 | 0.058824 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.384615 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12c9169d04a1b953b055c11fb6f8b67fa66071ff | 344 | py | Python | core/jobs/urls.py | InKyrNet/inkyrnet | fdb5c8def9b74049c4b48f2fccf5d52b040a4435 | [
"MIT"
] | null | null | null | core/jobs/urls.py | InKyrNet/inkyrnet | fdb5c8def9b74049c4b48f2fccf5d52b040a4435 | [
"MIT"
] | 4 | 2021-06-04T21:36:18.000Z | 2021-09-22T17:44:09.000Z | core/jobs/urls.py | InKyrNet/inkyrnet | fdb5c8def9b74049c4b48f2fccf5d52b040a4435 | [
"MIT"
] | null | null | null | from django.urls import path
from .views import *
from django_filters.views import FilterView
app_name = 'jobs'
urlpatterns = [
path('', FilterView.as_view(filterset_class=JobFilter,
template_name='jobs/job_list.html'), name='index'),
path('companies/', CompanyListView.as_view(), name='companies'),
]
| 28.666667 | 83 | 0.674419 | 40 | 344 | 5.625 | 0.6 | 0.088889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.197674 | 344 | 11 | 84 | 31.272727 | 0.815217 | 0 | 0 | 0 | 0 | 0 | 0.133721 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
12ca7aec9c936b7e376b5d6d2ed2e6e550f43708 | 8,570 | py | Python | src/rprblender/__init__.py | ralic/RadeonProRenderBlenderAddon | 310c650d4230289ac5d5407cc24a13b4c7ce0a90 | [
"Apache-2.0"
] | 1 | 2021-03-29T05:55:49.000Z | 2021-03-29T05:55:49.000Z | src/rprblender/__init__.py | ralic/RadeonProRenderBlenderAddon | 310c650d4230289ac5d5407cc24a13b4c7ce0a90 | [
"Apache-2.0"
] | 1 | 2021-04-03T09:39:28.000Z | 2021-04-03T09:39:28.000Z | src/rprblender/__init__.py | isabella232/RadeonProRenderBlenderAddon | ff4ede164c1e1e909f182be709422bc8c8878b1c | [
"Apache-2.0"
] | null | null | null | #**********************************************************************
# Copyright 2020 Advanced Micro Devices, Inc
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#********************************************************************
import traceback
import bpy
bl_info = {
"name": "Radeon ProRender",
"author": "AMD",
"version": (3, 1, 0),
"blender": (2, 80, 0),
"location": "Info header, render engine menu",
"description": "Radeon ProRender rendering plugin for Blender 2.8x",
"warning": "",
"tracker_url": "",
"wiki_url": "",
"category": "Render"
}
version_build = ""
from .utils import logging, version_updater
from .utils import install_libs
from .engine.engine import Engine
from . import (
nodes,
properties,
ui,
operators,
material_library,
)
from .engine.render_engine import RenderEngine
from .engine.render_engine_2 import RenderEngine2
from .engine.preview_engine import PreviewEngine
from .engine.viewport_engine import ViewportEngine
from .engine.viewport_engine_2 import ViewportEngine2
from .engine.animation_engine import AnimationEngine, AnimationEngine2
from .engine.render_engine_hybrid import RenderEngine as RenderEngineHybrid
from .engine.viewport_engine_hybrid import ViewportEngine as ViewportEngineHybrid
from .engine.animation_engine_hybrid import AnimationEngine as AnimationEngineHybrid
log = logging.Log(tag='init')
log("Loading RPR addon {}".format(bl_info['version']))
render_engine_cls = {
'FULL': RenderEngine,
'HIGH': RenderEngineHybrid,
'MEDIUM': RenderEngineHybrid,
'LOW': RenderEngineHybrid,
'FULL2': RenderEngine2,
}
animation_engine_cls = {
'FULL': AnimationEngine,
'HIGH': AnimationEngineHybrid,
'MEDIUM': AnimationEngineHybrid,
'LOW': AnimationEngineHybrid,
'FULL2': AnimationEngine2,
}
viewport_engine_cls = {
'FULL': ViewportEngine,
'HIGH': ViewportEngineHybrid,
'MEDIUM': ViewportEngineHybrid,
'LOW': ViewportEngineHybrid,
'FULL2': ViewportEngine2,
}
class RPREngine(bpy.types.RenderEngine):
"""
Main class of Radeon ProRender render engine for Blender v2.80+
"""
bl_idname = "RPR"
bl_label = "Radeon ProRender"
bl_use_preview = True
bl_use_shading_nodes = True
bl_use_shading_nodes_custom = False
bl_info = "Radeon ProRender rendering plugin"
engine: Engine = None
def __del__(self):
if isinstance(self.engine, ViewportEngine):
self.engine.stop_render()
log('__del__', self.as_pointer())
# final render
def update(self, data, depsgraph):
""" Called for final render """
log('update', self.as_pointer())
# TODO: We create for every view layer separate Engine. We should improve this by implementing sync_update()
try:
if self.is_preview:
engine_cls = PreviewEngine
elif self.is_animation:
engine_cls = animation_engine_cls[depsgraph.scene.rpr.render_quality]
else:
engine_cls = render_engine_cls[depsgraph.scene.rpr.render_quality]
self.engine = engine_cls(self)
self.engine.sync(depsgraph)
except Exception as e:
log.error(e, 'EXCEPTION:', traceback.format_exc())
self.error_set(f"ERROR | {e}. Please see log for more details.")
def render(self, depsgraph):
""" Called with final render and preview """
log("render", self.as_pointer())
try:
self.engine.render()
except Exception as e:
log.error(e, 'EXCEPTION:', traceback.format_exc())
self.error_set(f"ERROR | {e}. Please see log for more details.")
# This has to be called in the end of render due to possible memory leak RPRBLND-1635
# Important to call it in this function, not in __del__()
self.engine.stop_render()
# viewport render
def view_update(self, context, depsgraph):
""" Called when data is updated for viewport """
log('view_update', self.as_pointer())
try:
# if there is no engine set, create it and do the initial sync
engine_cls = viewport_engine_cls[depsgraph.scene.rpr.render_quality]
if self.engine and type(self.engine) == engine_cls:
self.engine.sync_update(context, depsgraph)
return
if self.engine:
self.engine.stop_render()
self.engine = engine_cls(self)
self.engine.sync(context, depsgraph)
except Exception as e:
log.error(e, 'EXCEPTION:', traceback.format_exc())
def view_draw(self, context, depsgraph):
""" called when viewport is to be drawn """
log('view_draw', self.as_pointer())
try:
self.engine.draw(context)
except Exception as e:
log.error(e, 'EXCEPTION:', traceback.format_exc())
# view layer AOVs
def update_render_passes(self, render_scene=None, render_layer=None):
"""
Update 'Render Layers' compositor node with active render passes info.
Called by Blender.
"""
aovs = properties.view_layer.RPR_ViewLayerProperites.aovs_info
cryptomatte_aovs = properties.view_layer.RPR_ViewLayerProperites.cryptomatte_aovs_info
scene = render_scene if render_scene else bpy.context.scene
layer = render_layer if render_scene else bpy.context.view_layer
def do_register_pass(aov):
pass_channel = aov['channel']
pass_name = aov['name']
pass_channels_size = len(pass_channel)
# convert from channel to blender type
blender_type = 'VALUE'
if pass_channel in ('RGB', 'RGBA'):
blender_type = 'COLOR'
elif pass_channel in {'XYZ', 'UVA'}:
blender_type = 'VECTOR'
self.register_pass(scene, layer,
pass_name, pass_channels_size, pass_channel, blender_type)
for index, enabled in enumerate(layer.rpr.enable_aovs):
if enabled:
do_register_pass(aovs[index])
if layer.rpr.crytomatte_aov_material:
for i in range(3):
do_register_pass(cryptomatte_aovs[i])
if layer.rpr.crytomatte_aov_object:
for i in range(3,6):
do_register_pass(cryptomatte_aovs[i])
@bpy.app.handlers.persistent
def on_version_update(*args, **kwargs):
""" On scene loading update old RPR data to current version """
log("on_version_update")
addon_version = bl_info['version']
if version_updater.is_scene_from_2_79(addon_version):
version_updater.update_2_79_scene()
@bpy.app.handlers.persistent
def on_save_pre(*args, **kwargs):
""" Handler on saving a blend file (before) """
log("on_save_pre")
# Save current plugin version in scene
bpy.context.scene.rpr.saved_addon_version = bl_info['version']
@bpy.app.handlers.persistent
def on_load_pre(*args, **kwargs):
""" Handler on loading a blend file (before) """
log("on_load_pre")
utils.clear_temp_dir()
def register():
""" Register all addon classes in Blender """
log("register")
install_libs.ensure_boto3()
bpy.utils.register_class(RPREngine)
material_library.register()
properties.register()
operators.register()
nodes.register()
ui.register()
bpy.app.handlers.save_pre.append(on_save_pre)
bpy.app.handlers.load_pre.append(on_load_pre)
bpy.app.handlers.version_update.append(on_version_update)
def unregister():
""" Unregister all addon classes from Blender """
log("unregister")
bpy.app.handlers.version_update.remove(on_version_update)
bpy.app.handlers.load_pre.remove(on_load_pre)
bpy.app.handlers.save_pre.remove(on_save_pre)
ui.unregister()
nodes.unregister()
operators.unregister()
properties.unregister()
material_library.unregister()
bpy.utils.unregister_class(RPREngine)
| 31.391941 | 116 | 0.655076 | 1,030 | 8,570 | 5.265049 | 0.256311 | 0.025816 | 0.023234 | 0.013277 | 0.248018 | 0.174442 | 0.094966 | 0.073391 | 0.059746 | 0.059746 | 0 | 0.006847 | 0.233139 | 8,570 | 272 | 117 | 31.507353 | 0.81832 | 0.192532 | 0 | 0.139535 | 0 | 0 | 0.09004 | 0 | 0 | 0 | 0 | 0.003676 | 0 | 1 | 0.069767 | false | 0.069767 | 0.087209 | 0 | 0.209302 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
12cc0f45c792a01e3a5bd5c42c13138e07ace531 | 1,561 | py | Python | plot_metric_err_vs_dim.py | wchen459/design_embeddings_jmd_2016 | 30dfec40b14c81e6cbe1c57efc2abe1a28dbdd5f | [
"MIT"
] | 9 | 2017-07-13T19:17:48.000Z | 2022-03-17T02:19:06.000Z | plot_metric_err_vs_dim.py | wchen459/design_embeddings_jmd_2016 | 30dfec40b14c81e6cbe1c57efc2abe1a28dbdd5f | [
"MIT"
] | null | null | null | plot_metric_err_vs_dim.py | wchen459/design_embeddings_jmd_2016 | 30dfec40b14c81e6cbe1c57efc2abe1a28dbdd5f | [
"MIT"
] | 2 | 2018-08-31T22:46:03.000Z | 2020-06-19T16:17:38.000Z | """
Plots reconstruction error vs semantic space dimensionality
Usage: python metric_err_vs_dim.py
Author(s): Wei Chen (wchen459@umd.edu)
"""
import matplotlib.pyplot as plt
import numpy as np
plt.rc("font", size=18)
examples = ['glass', 'sf_linear', 'sf_s_nonlinear', 'sf_v_nonlinear']
titles = {'glass': 'Glass',
'sf_linear': 'Superformula (linear)',
'sf_s_nonlinear': 'Superformula (slightly nonlinear)',
'sf_v_nonlinear': 'Superformula (very nonlinear)'}
n = len(examples)
x = range(1, 6)
for i in range(n):
plt.figure()
plt.xticks(np.arange(min(x), max(x)+1, dtype=np.int))
plt.xlabel('Semantic space dimensionality')
plt.ylabel('Reconstruction error')
plt.xlim(0.5, 5.5)
errs = np.zeros((3,5))
for j in x:
# Read reconstruction errors in rec_err.txt
txtfile = open('./results/'+examples[i]+'/n_samples = 115/n_control_points = 20/semantic_dim = '
+str(j)+'/rec_err.txt', 'r')
k = 0
for line in txtfile:
errs[k, j-1] = float(line)
k += 1
line_pca, = plt.plot(x, errs[0], '-ob', label='PCA')
line_kpca, = plt.plot(x, errs[1], '-vg', label='Kernel PCA')
line_ae, = plt.plot(x, errs[2], '-sr', label='Autoencoder')
plt.legend(handles=[line_pca, line_kpca, line_ae], fontsize=16)
plt.title(titles[examples[i]])
fig_name = 'err_vs_dim_'+examples[i]+'.png'
plt.tight_layout()
plt.savefig('./results/'+fig_name, dpi=300)
print fig_name+' saved!'
| 31.22 | 104 | 0.606022 | 224 | 1,561 | 4.080357 | 0.486607 | 0.02954 | 0.026258 | 0.039387 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024917 | 0.2287 | 1,561 | 49 | 105 | 31.857143 | 0.734219 | 0.026265 | 0 | 0 | 0 | 0 | 0.266909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.060606 | null | null | 0.030303 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12ccbde3bf71864760496c1e1f0963111fba9314 | 638 | py | Python | test/environments/instances/8x8/gen.py | Multi-Agent-Research-Group/hog2 | 544d7c0e933fd69025944a0a3abcf9a40e59f0be | [
"MIT"
] | 5 | 2020-08-03T09:43:26.000Z | 2022-01-11T08:28:30.000Z | test/environments/instances/8x8/gen.py | Multi-Agent-Research-Group/hog2 | 544d7c0e933fd69025944a0a3abcf9a40e59f0be | [
"MIT"
] | null | null | null | test/environments/instances/8x8/gen.py | Multi-Agent-Research-Group/hog2 | 544d7c0e933fd69025944a0a3abcf9a40e59f0be | [
"MIT"
] | 7 | 2017-07-31T13:01:28.000Z | 2021-05-16T10:15:49.000Z | #!/usr/bin/python
import random
import os
import errno
for i in range(100):
s=set()
g=set()
while len(s) < 50:
s.add((random.randint(0,7),random.randint(0,7)))
while len(g) < 50:
g.add((random.randint(0,7),random.randint(0,7)))
start=list(s)
goal=list(g)
for size in range(21,50):
if not os.path.exists("./%d"%size):
try:
os.makedirs("./%d"%size)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
with open("./%d/%d.csv"%(size,i), "w") as f:
for j in range(size):
f.write("%d,%d %d,%d\n"%(start[j][0],start[j][1],goal[j][0],goal[j][1]))
| 22.785714 | 80 | 0.548589 | 114 | 638 | 3.070175 | 0.429825 | 0.148571 | 0.16 | 0.171429 | 0.188571 | 0.188571 | 0.188571 | 0.188571 | 0.188571 | 0 | 0 | 0.046748 | 0.22884 | 638 | 27 | 81 | 23.62963 | 0.664634 | 0.025078 | 0 | 0 | 0 | 0 | 0.05314 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.136364 | 0 | 0.136364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12d2af7e340f2c0b16013db0e187eff0a983f2ec | 14,028 | py | Python | stashboard/handlers/site.py | kelnos/stashboard | 5f92ed14b8cf17f4b1be8441005b187e97ca74b8 | [
"MIT"
] | 1 | 2015-02-24T23:30:06.000Z | 2015-02-24T23:30:06.000Z | stashboard/handlers/site.py | ratchetio/stashboard | f8e4e6d175f48701a154e4baca10de2a4a577ab4 | [
"MIT"
] | null | null | null | stashboard/handlers/site.py | ratchetio/stashboard | f8e4e6d175f48701a154e4baca10de2a4a577ab4 | [
"MIT"
] | null | null | null | # The MIT License
#
# Copyright (c) 2008 William T. Katz
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
__author__ = 'Kyle Conroy'
import datetime
import calendar
import logging
import os
import re
import string
import urllib
import urlparse
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext import db
from datetime import date, timedelta
from django.conf import settings
from django.template.loader import render_to_string
from django.utils import simplejson as json
from time import mktime
from models import List, Status, Service, Event, Profile
import xml.etree.ElementTree as et
from utils import authorized
from wsgiref.handlers import format_date_time
def default_template_data():
data = {
"title": settings.SITE_NAME,
"report_url": settings.REPORT_URL,
"twitter_handle": settings.TWITTER_HANDLE,
}
user = users.get_current_user()
if user is not None:
data["user"] = user
data["logout_url"] = users.create_logout_url("/")
data["admin"] = users.is_current_user_admin()
return data
def get_past_days(num):
date = datetime.date.today()
dates = []
for i in range(1, num + 1):
dates.append(date - datetime.timedelta(days=i))
return dates
class BaseHandler(webapp.RequestHandler):
def render(self, template_values, filename):
self.response.out.write(render_to_string(filename, template_values))
def retrieve(self, key):
""" Helper for loading data from memcache """
all_pages = memcache.get("__all_pages__")
if all_pages is None:
all_pages = {}
item = memcache.get(key) if all_pages.has_key(key) else None
if item is not None:
return item
else:
item = self.data()
if not memcache.set(key, item):
logging.error("Memcache set failed on %s" % key)
else:
all_pages[key] = 1
if not memcache.set("__all_pages__", all_pages):
logging.error("Memcache set failed on __all_pages__")
return item
def not_found(self):
self.error(404)
self.render(default_template_data(), "404.html")
class NotFoundHandler(BaseHandler):
def get(self):
self.error(404)
self.render(default_template_data(), "404.html")
class UnauthorizedHandler(webapp.RequestHandler):
def get(self):
self.error(403)
self.render(default_template_data(), "404.html")
class RootHandler(BaseHandler):
def data(self):
services = []
default_status = Status.get_default()
for service in Service.all().order("list").order("name").fetch(100):
event = service.current_event()
if event is not None:
status = event.status
else:
status = default_status
today = date.today() + timedelta(days=1)
current, = service.history(1, default_status, start=today)
has_issues = (current["information"] and
status.key() == default_status.key())
service_dict = {
"slug": service.slug,
"name": service.name,
"url": service.url(),
"status": status,
"has_issues": has_issues,
"history": service.history(5, default_status),
}
services.append(service_dict)
return {
"days": get_past_days(5),
"statuses": Status.all().fetch(100),
"services": services,
}
def get(self):
td = default_template_data()
td.update(self.retrieve("frontpage"))
#td.update(self.data())
self.render(td, 'index.html')
class ListHandler(BaseHandler):
list = None
def data(self):
services = []
default_status = Status.get_default()
query = Service.all().filter("list =", self.list).order("name")
for service in query.fetch(100):
event = service.current_event()
if event is not None:
status = event.status
else:
status = default_status
today = date.today() + timedelta(days=1)
current, = service.history(1, default_status, start=today)
has_issues = (current["information"] and
status.key() == default_status.key())
service_dict = {
"slug": service.slug,
"name": service.name,
"url": service.url(),
"status": status,
"has_issues": has_issues,
"history": service.history(5, default_status),
}
services.append(service_dict)
return {
"days": get_past_days(5),
"statuses": Status.all().fetch(100),
"services": services,
}
def get(self, list_slug):
self.list = List.get_by_slug(list_slug)
if self.list is None:
self.not_found()
return
td = default_template_data()
td.update(self.retrieve("list"+list_slug))
#td.update(self.data())
self.render(td, 'index.html')
class ListListHandler(BaseHandler):
lists = []
statuses = []
def data(self):
services = []
default_status = Status.get_default()
lists = []
for list in self.lists:
l = List.get_by_slug(list)
if l is not None:
lists.append(l)
for service in Service.all().filter("list IN", lists).order("name").fetch(100):
event = service.current_event()
if event is not None:
status = event.status
else:
status = default_status
if len(self.statuses) and not status.slug in self.statuses: continue
today = date.today() + timedelta(days=1)
current, = service.history(1, default_status, start=today)
has_issues = (current["information"] and
status.key() == default_status.key())
service_dict = {
"slug": service.slug,
"name": service.name,
"url": service.url(),
"status": status,
"has_issues": has_issues,
"history": service.history(5, default_status),
}
services.append(service_dict)
return {
"days": get_past_days(5),
"statuses": Status.all().fetch(100),
"services": services,
}
def get(self):
self.lists = self.request.get_all('filter')
self.lists.sort()
self.statuses = self.request.get_all('status')
self.statuses.sort()
td = default_template_data()
td.update(self.retrieve("list"+"_".join(self.statuses)+"_".join(self.lists)))
#td.update(self.data())
self.render(td, 'index.html')
class ListSummaryHandler(BaseHandler):
def data(self):
lists = {}
default_status = Status.get_default()
for service in Service.all().order("list").fetch(100):
event = service.current_event()
if event is not None:
status = event.status
else:
status = default_status
if service.list and not lists.has_key(service.list.slug) or \
lists[service.list.slug]["status"].name < status.name:
lists[service.list.slug] = {"list": service.list, "status": status}
return { "lists": lists.items() }
def get(self):
td = default_template_data()
td.update(self.retrieve("summary"))
#td.update(self.data())
self.render(td, 'summary.html')
class ServiceHandler(BaseHandler):
def get(self, service_slug, year=None, month=None, day=None):
service = Service.get_by_slug(service_slug)
if not service:
self.not_found()
return
try:
if day:
start_date = date(int(year), int(month), int(day))
end_date = start_date + timedelta(days=1)
elif month:
start_date = date(int(year), int(month), 1)
days = calendar.monthrange(start_date.year,
start_date.month)[1]
end_date = start_date + timedelta(days=days)
elif year:
start_date = date(int(year), 1, 1)
end_date = start_date + timedelta(days=365)
else:
start_date = None
end_date = None
except ValueError:
self.not_found(404)
return
events = service.events
if start_date and end_date:
events.filter('start >= ', start_date).filter('start <', end_date)
td = default_template_data()
td["service"] = service
td["events"] = events.order("-start").fetch(500)
self.render(td, 'service.html')
class BaseDocumentationHandler(BaseHandler):
def get(self):
td = default_template_data()
td["selected"] = "overview"
self.render(td, 'publicdoc/index.html')
class DocumentationHandler(BaseHandler):
pages = [
"events",
"services",
"service-lists",
"status-images",
"statuses",
"status-images",
]
def get(self, page):
td = default_template_data()
if page not in self.pages:
self.render({}, '404.html')
return
td["selected"] = page
self.render(td, "publicdoc/%s.html" % page)
class CredentialsRedirectHandler(BaseHandler):
def get(self):
self.redirect("/admin/credentials")
class RSSHandler(BaseHandler):
""" Feed of the last settings.RSS_NUM_EVENTS_TO_FETCH events """
def get(self):
self.response.headers['Content-Type'] = "application/rss+xml; charset=utf-8"
host = self.request.headers.get('host', 'nohost')
base_url = self.request.scheme + "://" + host
events = []
query = Event.all().order("-start")
# Filter query by requested services, if specified in the 'service' URL parameter.
service_list = []
for service_arg in self.request.get_all('services'):
service_list.extend(service_arg.split(','))
service_list = map(lambda serv_slug: Service.get_by_slug(serv_slug), service_list)
# filter out any non-existent services
service_list = filter(lambda service: not service is None, service_list)
service_string = 'all services'
if len(service_list) > 0:
query.filter('service IN', service_list)
if len(service_list) == 1:
service_string = 'the %s service' % service_list[0].name
elif len(service_list) == 2:
service_string = 'the %s and %s services' % (service_list[0].name, service_list[1].name)
else:
service_string = 'the %s, and %s services' % (', '.join([service.name for service in service_list[:-1]]), service_list[-1].name)
# Create the root 'rss' element
rss_xml = et.Element('rss')
rss_xml.set('version', '2.0')
# Create the channel element and its metadata elements
channel = et.SubElement(rss_xml, 'channel')
title = et.SubElement(channel, 'title')
title.text = '%s Service Events' % settings.SITE_NAME
description = et.SubElement(channel, 'description')
description.text = 'This feed shows the last %d events on %s on %s.' % (settings.RSS_NUM_EVENTS_TO_FETCH, service_string, settings.SITE_NAME)
link = et.SubElement(channel, 'link')
link.text = base_url
# Create each of the feed events.
item_subelements = {
'title': lambda(event): '[%s - %s] %s' % (event.service.name, event.status.name, unicode(event.message)),
'description': lambda(event): '%s' % unicode(event.message),
'link': lambda(event): '%s/services/%s' % (base_url, event.service.slug),
'category': lambda(event): event.service.name,
'pubDate': lambda(event): format_date_time(mktime(event.start.timetuple())),
'guid': lambda(event): '%s/api/v1/services/%s/events/%s' % (base_url, event.service.slug, unicode(event.key()))
}
for event in query.fetch(settings.RSS_NUM_EVENTS_TO_FETCH):
item = et.SubElement(channel, 'item')
for tag, text_func in item_subelements.iteritems():
subelement = et.SubElement(item, tag)
subelement.text = text_func(event)
self.response.out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
self.response.out.write(et.tostring(rss_xml))
| 32.852459 | 149 | 0.590961 | 1,644 | 14,028 | 4.91545 | 0.189173 | 0.027224 | 0.025863 | 0.018191 | 0.350699 | 0.31704 | 0.289816 | 0.264819 | 0.255661 | 0.230293 | 0 | 0.009108 | 0.295623 | 14,028 | 426 | 150 | 32.929577 | 0.808724 | 0.09923 | 0 | 0.390728 | 0 | 0 | 0.087875 | 0.002481 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.069536 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12d68f272974ae7982471fbca3af702e552c3c1f | 597 | py | Python | ejercicios_python/Clase05/practica5-9.py | hcgalvan/UNSAM-Python-programming | c4b3f5ae0702dc03ea6010cb8051c7eec6aef42f | [
"MIT"
] | null | null | null | ejercicios_python/Clase05/practica5-9.py | hcgalvan/UNSAM-Python-programming | c4b3f5ae0702dc03ea6010cb8051c7eec6aef42f | [
"MIT"
] | null | null | null | ejercicios_python/Clase05/practica5-9.py | hcgalvan/UNSAM-Python-programming | c4b3f5ae0702dc03ea6010cb8051c7eec6aef42f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Sep 15 08:32:03 2021
@author: User
"""
import numpy as np
import matplotlib.pyplot as plt
a = np.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
print(a)
print(a[0])
print(a.ndim) #te dice la cantidad de ejes (o dimensiones) del arreglo
print(a.shape) #Te va a dar una tupla de enteros que indican la cantidad de elementos en cada eje.
print(a.size)
#%%
vec_fila = a[np.newaxis, :]
print(vec_fila.shape, a.shape)
#%%
print(a.sum())
print(a.min())
print(a.max())
#%%
print(a)
print(a.max(axis=1))
print(a.max(axis=0))
#%%
print(np.random.random(3)) | 22.111111 | 98 | 0.649916 | 116 | 597 | 3.327586 | 0.586207 | 0.170984 | 0.069948 | 0.062176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.062868 | 0.147404 | 597 | 27 | 99 | 22.111111 | 0.695481 | 0.365159 | 0 | 0.117647 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.117647 | 0 | 0.117647 | 0.764706 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
12d99705dd6d38a5113e0f5059a5a16ef3ce2532 | 231 | py | Python | LeetCode/Product and Sum/Subtract_Product_And_Sum.py | GSri30/Competetive_programming | 0dc1681500a80b6f0979d0dc9f749357ee07bcb8 | [
"MIT"
] | 22 | 2020-01-03T17:32:00.000Z | 2021-11-07T09:31:44.000Z | LeetCode/Product and Sum/Subtract_Product_And_Sum.py | GSri30/Competetive_programming | 0dc1681500a80b6f0979d0dc9f749357ee07bcb8 | [
"MIT"
] | 10 | 2020-09-30T09:41:18.000Z | 2020-10-11T11:25:09.000Z | LeetCode/Product and Sum/Subtract_Product_And_Sum.py | GSri30/Competetive_programming | 0dc1681500a80b6f0979d0dc9f749357ee07bcb8 | [
"MIT"
] | 25 | 2019-10-14T19:25:01.000Z | 2021-05-26T08:12:20.000Z | class Solution:
def subtractProductAndSum(self, n: int) -> int:
x = n
add = 0
mul = 1
while x > 0 :
add += x%10
mul *= x%10
x = x//10
return mul - add
| 21 | 51 | 0.402597 | 29 | 231 | 3.206897 | 0.517241 | 0.096774 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.077586 | 0.497836 | 231 | 10 | 52 | 23.1 | 0.724138 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0 | 0 | 0.3 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12dbd5bf3d381ee625187e0ae26efd79aef7f23a | 1,128 | py | Python | test/office_schema.py | chrismaille/marshmallow-pynamodb | 1e799041ff1053a6aa67ce72729e7262cb0f746f | [
"MIT"
] | 3 | 2020-05-17T15:04:27.000Z | 2021-08-12T14:27:15.000Z | test/office_schema.py | chrismaille/marshmallow-pynamodb | 1e799041ff1053a6aa67ce72729e7262cb0f746f | [
"MIT"
] | 2 | 2020-05-06T00:11:49.000Z | 2022-02-23T11:45:54.000Z | test/office_schema.py | chrismaille/marshmallow-pynamodb | 1e799041ff1053a6aa67ce72729e7262cb0f746f | [
"MIT"
] | 1 | 2020-04-30T19:34:22.000Z | 2020-04-30T19:34:22.000Z | from test.office_model import Headquarters, Office
from marshmallow import fields
from pynamodb.attributes import DiscriminatorAttribute
from marshmallow_pynamodb import ModelSchema
class OfficeSchema(ModelSchema):
"""Office Schema for PynamoDB Office Model.
We are overriding PynamoDB
NumberSetAttribute and UnicodeSetAttribute fields
to maintain list order
"""
numbers = fields.List(fields.Integer)
departments = fields.List(fields.String)
security_number = fields.Str(allow_none=True)
cls = DiscriminatorAttribute()
class Meta:
"""Schema Model Meta Class."""
model = Office
class HQSchema(OfficeSchema):
"""Model Schema with parent Schemas field Introspection.
Fields are introspected using
parent marshmallow ModelSchemas. (ex.: OfficeSchema Schema)
"""
class Meta:
model = Headquarters
class HeadquartersSchema(ModelSchema):
"""Model Schema with parent Models field Introspection.
Fields are introspected using
parent PynamoDB Models. (ex.: Office Model)
"""
class Meta:
model = Headquarters
| 22.117647 | 63 | 0.721631 | 117 | 1,128 | 6.923077 | 0.42735 | 0.040741 | 0.039506 | 0.051852 | 0.123457 | 0.123457 | 0.123457 | 0 | 0 | 0 | 0 | 0 | 0.212766 | 1,128 | 50 | 64 | 22.56 | 0.912162 | 0.389184 | 0 | 0.294118 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.235294 | 0 | 0.823529 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
12ddf9c1d17cbd9db7aea277570f0278393c93a6 | 1,599 | py | Python | energy_demand/initalisations/initialisations.py | willu47/energy_demand | 59a2712f353f47e3dc237479cc6cc46666b7d0f1 | [
"MIT"
] | null | null | null | energy_demand/initalisations/initialisations.py | willu47/energy_demand | 59a2712f353f47e3dc237479cc6cc46666b7d0f1 | [
"MIT"
] | null | null | null | energy_demand/initalisations/initialisations.py | willu47/energy_demand | 59a2712f353f47e3dc237479cc6cc46666b7d0f1 | [
"MIT"
] | null | null | null | """Helper initialising functions
"""
#pylint: disable=I0011, C0321, C0301, C0103, C0325, R0902, R0913, no-member, E0213
def init_fuel_tech_p_by(all_enduses_with_fuels, nr_of_fueltypes):
"""Helper function to define stocks for all enduse and fueltype
Parameters
----------
all_enduses_with_fuels : dict
Provided fuels
nr_of_fueltypes : int
Nr of fueltypes
Returns
-------
fuel_tech_p_by : dict
"""
fuel_tech_p_by = {}
for enduse in all_enduses_with_fuels:
fuel_tech_p_by[enduse] = dict.fromkeys(range(nr_of_fueltypes), {})
return fuel_tech_p_by
def dict_zero(first_level_keys):
"""Initialise a dictionary with one level
Parameters
----------
first_level_keys : list
First level data
Returns
-------
one_level_dict : dict
dictionary
"""
one_level_dict = dict.fromkeys(first_level_keys, 0) # set zero as argument
return one_level_dict
def service_type_tech_by_p(lu_fueltypes, fuel_tech_p_by):
"""Initialise dict and fill with zeros
Parameters
----------
lu_fueltypes : dict
Look-up dictionary
fuel_tech_p_by : dict
Fuel fraction per technology for base year
Return
-------
service_fueltype_tech_by_p : dict
Fraction of service per fueltype and technology for base year
"""
service_fueltype_tech_by_p = {}
for fueltype_int in lu_fueltypes.values():
service_fueltype_tech_by_p[fueltype_int] = dict.fromkeys(fuel_tech_p_by[fueltype_int].keys(), 0)
return service_fueltype_tech_by_p
| 24.984375 | 104 | 0.676048 | 218 | 1,599 | 4.59633 | 0.330275 | 0.063872 | 0.071856 | 0.087824 | 0.137725 | 0.093812 | 0 | 0 | 0 | 0 | 0 | 0.027687 | 0.23202 | 1,599 | 63 | 105 | 25.380952 | 0.788274 | 0.497186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0 | 0 | 0.461538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12e000a4e8578ea58e111e55e0187884ea14b784 | 26,842 | py | Python | Lib/site-packages/wx-3.0-msw/wx/tools/Editra/src/util.py | jickieduan/python27 | c752b552396bbed68d8555080d475718cea2edd0 | [
"bzip2-1.0.6"
] | 5 | 2019-03-11T14:30:31.000Z | 2021-12-04T14:11:54.000Z | Lib/site-packages/wx-3.0-msw/wx/tools/Editra/src/util.py | jickieduan/python27 | c752b552396bbed68d8555080d475718cea2edd0 | [
"bzip2-1.0.6"
] | 1 | 2018-07-28T20:07:04.000Z | 2018-07-30T18:28:34.000Z | Lib/site-packages/wx-3.0-msw/wx/tools/Editra/src/util.py | jickieduan/python27 | c752b552396bbed68d8555080d475718cea2edd0 | [
"bzip2-1.0.6"
] | 2 | 2019-12-02T01:39:10.000Z | 2021-02-13T22:41:00.000Z | ###############################################################################
# Name: util.py #
# Purpose: Misc utility functions used through out Editra #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2008 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
This file contains various helper functions and utilities that the program uses.
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: util.py 72623 2012-10-06 19:33:06Z CJP $"
__revision__ = "$Revision: 72623 $"
#--------------------------------------------------------------------------#
# Imports
import os
import sys
import mimetypes
import encodings
import codecs
import urllib2
import wx
# Editra Libraries
import ed_glob
import ed_event
import ed_crypt
import dev_tool
import syntax.syntax as syntax
import syntax.synglob as synglob
import ebmlib
_ = wx.GetTranslation
#--------------------------------------------------------------------------#
class DropTargetFT(wx.PyDropTarget):
"""Drop target capable of accepting dropped files and text
@todo: has some issues with the clipboard on windows under certain
conditions. They are not fatal but need fixing.
"""
def __init__(self, window, textcallback=None, filecallback=None):
"""Initializes the Drop target
@param window: window to receive drop objects
@keyword textcallback: Callback for when text is dropped
@keyword filecallback: Callback for when file(s) are dropped
"""
super(DropTargetFT, self).__init__()
# Attributes
self.window = window
self._data = dict(data=None, fdata=None, tdata=None,
tcallb=textcallback, fcallb=filecallback)
self._tmp = None
self._lastp = None
# Setup
self.InitObjects()
def CreateDragString(self, txt):
"""Creates a bitmap of the text that is being dragged
@todo: possibly set colors to match highlighting of text
@todo: generalize this to be usable by other widgets besides stc
"""
if not isinstance(self.window, wx.stc.StyledTextCtrl):
return
stc = self.window
txt = txt.split(stc.GetEOLChar())
longest = (0, 0)
for line in txt:
ext = stc.GetTextExtent(line)
if ext[0] > longest[0]:
longest = ext
cords = [ (0, x * longest[1]) for x in range(len(txt)) ]
try:
mdc = wx.MemoryDC(wx.EmptyBitmap(longest[0] + 5,
longest[1] * len(txt), 32))
mdc.SetBackgroundMode(wx.TRANSPARENT)
mdc.SetTextForeground(stc.GetDefaultForeColour())
mdc.SetFont(stc.GetDefaultFont())
mdc.DrawTextList(txt, cords)
self._tmp = wx.DragImage(mdc.GetAsBitmap())
except wx.PyAssertionError, msg:
Log("[droptargetft][err] %s" % str(msg))
def InitObjects(self):
"""Initializes the text and file data objects
@postcondition: all data objects are initialized
"""
self._data['data'] = wx.DataObjectComposite()
self._data['tdata'] = wx.TextDataObject()
self._data['fdata'] = wx.FileDataObject()
self._data['data'].Add(self._data['tdata'], True)
self._data['data'].Add(self._data['fdata'], False)
self.SetDataObject(self._data['data'])
def OnEnter(self, x_cord, y_cord, drag_result):
"""Called when a drag starts
@param x_cord: x cord of enter point
@param y_cord: y cord of enter point
@param drag_result: wxDrag value
@return: result of drop object entering window
"""
# GetData seems to happen automatically on msw, calling it again
# causes this to fail the first time.
if wx.Platform in ['__WXGTK__', '__WXMSW__']:
return wx.DragCopy
if wx.Platform == '__WXMAC__':
try:
self.GetData()
except wx.PyAssertionError:
return wx.DragError
self._lastp = (x_cord, y_cord)
files = self._data['fdata'].GetFilenames()
text = self._data['tdata'].GetText()
if len(files):
self.window.SetCursor(wx.StockCursor(wx.CURSOR_COPY_ARROW))
else:
self.CreateDragString(text)
return drag_result
def OnDrop(self, x_cord=0, y_cord=0):
"""Gets the drop cords
@keyword x_cord: x cord of drop object
@keyword y_cord: y cord of drop object
@todo: implement snapback when drop is out of range
"""
self._tmp = None
self._lastp = None
return True
def OnDragOver(self, x_cord, y_cord, drag_result):
"""Called when the cursor is moved during a drag action
@param x_cord: x cord of mouse
@param y_cord: y cord of mouse
@param drag_result: Drag result value
@return: result of drag over
@todo: For some reason the caret position changes which can be seen
by the brackets getting highlighted. However the actual caret
is not moved.
"""
stc = self.window
if self._tmp is None:
if hasattr(stc, 'DoDragOver'):
val = stc.DoDragOver(x_cord, y_cord, drag_result)
self.ScrollBuffer(stc, x_cord, y_cord)
drag_result = wx.DragCopy
else:
# A drag image was created
if hasattr(stc, 'DoDragOver'):
point = wx.Point(x_cord, y_cord)
self._tmp.BeginDrag(point - self._lastp, stc)
self._tmp.Hide()
stc.DoDragOver(x_cord, y_cord, drag_result)
self._tmp.Move(point)
self._tmp.Show()
self._tmp.RedrawImage(self._lastp, point, True, True)
self._lastp = point
self.ScrollBuffer(stc, x_cord, y_cord)
drag_result = wx.DragCopy
return drag_result
def OnData(self, x_cord, y_cord, drag_result):
"""Gets and processes the dropped data
@param x_cord: x coordinate
@param y_cord: y coordinate
@param drag_result: wx Drag result value
@postcondition: dropped data is processed
"""
self.window.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
if self.window.HasCapture():
self.window.ReleaseMouse()
try:
data = self.GetData()
except wx.PyAssertionError:
wx.PostEvent(self.window.GetTopLevelParent(), \
ed_event.StatusEvent(ed_event.edEVT_STATUS, -1,
_("Unable to accept dropped file "
"or text")))
data = False
drag_result = wx.DragCancel
if data:
files = self._data['fdata'].GetFilenames()
text = self._data['tdata'].GetText()
if len(files) > 0 and self._data['fcallb'] is not None:
self._data['fcallb'](files)
elif len(text) > 0:
if self._data['tcallb'] is not None:
self._data['tcallb'](text)
elif hasattr(self.window, 'DoDropText'):
self.window.DoDropText(x_cord, y_cord, text)
self.InitObjects()
return drag_result
def OnLeave(self):
"""Handles the event of when the drag object leaves the window
@postcondition: Cursor is set back to normal state
"""
self.window.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
if self.window.HasCapture():
self.window.ReleaseMouse()
if self._tmp is not None:
try:
self._tmp.EndDrag()
except wx.PyAssertionError, msg:
Log("[droptargetft][err] %s" % str(msg))
@staticmethod
def ScrollBuffer(stc, x_cord, y_cord):
"""Scroll the buffer as the dragged text is moved towards the
ends.
@param stc: StyledTextCtrl
@param x_cord: int (x position)
@param y_cord: int (y position)
@note: currently does not work on wxMac
"""
try:
cline = stc.PositionFromPoint(wx.Point(x_cord, y_cord))
if cline != wx.stc.STC_INVALID_POSITION:
cline = stc.LineFromPosition(cline)
fline = stc.GetFirstVisibleLine()
lline = stc.GetLastVisibleLine()
if (cline - fline) < 2:
stc.ScrollLines(-1)
elif lline - cline < 2:
stc.ScrollLines(1)
else:
pass
except wx.PyAssertionError, msg:
Log("[droptargetft][err] ScrollBuffer: %s" % msg)
#---- End FileDropTarget ----#
class EdClipboard(ebmlib.CycleCache):
"""Local clipboard object
@todo: make into a singleton
"""
def GetNext(self):
"""Get the next item in the cache"""
# Initialize the clipboard if it hasn't been loaded yet and
# there is something in the system clipboard
if self.GetCurrentSize() == 0:
txt = GetClipboardText()
if txt is not None:
self.Put(txt)
return super(EdClipboard, self).GetNext()
def IsAtIndex(self, txt):
"""Is the passed in phrase at the current cycle index in the
cache. Used to check if index should be reset or to continue in
the cycle.
@param txt: selected text
"""
pre = self.PeekPrev()
next = self.PeekNext()
if txt in (pre, next):
return True
else:
return False
def Put(self, txt):
"""Put some text in the clipboard
@param txt: Text to put in the system clipboard
"""
pre = self.PeekPrev()
next = self.PeekNext()
if len(txt) and txt not in (pre, next):
self.PutItem(txt)
#---- Misc Common Function Library ----#
# Used for holding the primary selection on mac/msw
FAKE_CLIPBOARD = None
def GetClipboardText(primary=False):
"""Get the primary selection from the clipboard if there is one
@return: str or None
"""
if primary and wx.Platform == '__WXGTK__':
wx.TheClipboard.UsePrimarySelection(True)
elif primary:
# Fake the primary selection on mac/msw
global FAKE_CLIPBOARD
return FAKE_CLIPBOARD
else:
pass
text_obj = wx.TextDataObject()
rtxt = None
if wx.TheClipboard.IsOpened() or wx.TheClipboard.Open():
if wx.TheClipboard.GetData(text_obj):
rtxt = text_obj.GetText()
wx.TheClipboard.Close()
if primary and wx.Platform == '__WXGTK__':
wx.TheClipboard.UsePrimarySelection(False)
return rtxt
def SetClipboardText(txt, primary=False):
"""Copies text to the clipboard
@param txt: text to put in clipboard
@keyword primary: Set txt as primary selection (x11)
"""
# Check if using primary selection
if primary and wx.Platform == '__WXGTK__':
wx.TheClipboard.UsePrimarySelection(True)
elif primary:
# Fake the primary selection on mac/msw
global FAKE_CLIPBOARD
FAKE_CLIPBOARD = txt
return True
else:
pass
data_o = wx.TextDataObject()
data_o.SetText(txt)
if wx.TheClipboard.IsOpened() or wx.TheClipboard.Open():
wx.TheClipboard.SetData(data_o)
wx.TheClipboard.Close()
if primary and wx.Platform == '__WXGTK__':
wx.TheClipboard.UsePrimarySelection(False)
return True
else:
return False
def FilterFiles(file_list):
"""Filters a list of paths and returns a list of paths
that can probably be opened in the editor.
@param file_list: list of files/folders to filter for good files in
"""
good = list()
checker = ebmlib.FileTypeChecker()
for path in file_list:
if not checker.IsBinary(path):
good.append(path)
return good
def GetFileType(fname):
"""Get what the type of the file is as Editra sees it
in a formatted string.
@param fname: file path
@return: string (formatted/translated filetype)
"""
if os.path.isdir(fname):
return _("Folder")
eguess = syntax.GetTypeFromExt(fname.split('.')[-1])
if eguess == synglob.LANG_TXT and fname.split('.')[-1] == 'txt':
return _("Text Document")
elif eguess == synglob.LANG_TXT:
mtype = mimetypes.guess_type(fname)[0]
if mtype is not None:
return mtype
else:
return _("Unknown")
else:
return _("%s Source File") % eguess
def GetFileReader(file_name, enc='utf-8'):
"""Returns a file stream reader object for reading the
supplied file name. It returns a file reader using the encoding
(enc) which defaults to utf-8. If lookup of the reader fails on
the host system it will return an ascii reader.
If there is an error in creating the file reader the function
will return a negative number.
@param file_name: name of file to get a reader for
@keyword enc: encoding to use for reading the file
@return file reader, or int if error.
"""
try:
file_h = file(file_name, "rb")
except (IOError, OSError):
dev_tool.DEBUGP("[file_reader] Failed to open file %s" % file_name)
return -1
try:
reader = codecs.getreader(enc)(file_h)
except (LookupError, IndexError, ValueError):
dev_tool.DEBUGP('[file_reader] Failed to get %s Reader' % enc)
reader = file_h
return reader
def GetFileWriter(file_name, enc='utf-8'):
"""Returns a file stream writer object for reading the
supplied file name. It returns a file writer in the supplied
encoding if the host system supports it other wise it will return
an ascii reader. The default will try and return a utf-8 reader.
If there is an error in creating the file reader the function
will return a negative number.
@param file_name: path of file to get writer for
@keyword enc: encoding to write text to file with
"""
try:
file_h = open(file_name, "wb")
except IOError:
dev_tool.DEBUGP("[file_writer][err] Failed to open file %s" % file_name)
return -1
try:
writer = codecs.getwriter(enc)(file_h)
except (LookupError, IndexError, ValueError):
dev_tool.DEBUGP('[file_writer][err] Failed to get %s Writer' % enc)
writer = file_h
return writer
# TODO: DEPRECATED - remove once callers migrate to ebmlib
GetFileManagerCmd = ebmlib.GetFileManagerCmd
def GetUserConfigBase():
"""Get the base user configuration directory path"""
cbase = ed_glob.CONFIG['CONFIG_BASE']
if cbase is None:
cbase = wx.StandardPaths_Get().GetUserDataDir()
if wx.Platform == '__WXGTK__':
if u'.config' not in cbase and not os.path.exists(cbase):
# If no existing configuration return xdg config path
base, cfgdir = os.path.split(cbase)
tmp_path = os.path.join(base, '.config')
if os.path.exists(tmp_path):
cbase = os.path.join(tmp_path, cfgdir.lstrip(u'.'))
return cbase + os.sep
def HasConfigDir(loc=u""):
""" Checks if the user has a config directory and returns True
if the config directory exists or False if it does not.
@return: whether config dir in question exists on an expected path
"""
cbase = GetUserConfigBase()
to_check = os.path.join(cbase, loc)
return os.path.exists(to_check)
def MakeConfigDir(name):
"""Makes a user config directory
@param name: name of config directory to make in user config dir
"""
cbase = GetUserConfigBase()
try:
os.mkdir(cbase + name)
except (OSError, IOError):
pass
def RepairConfigState(path):
"""Repair the state of profile path, updating and creating it
it does not exist.
@param path: path of profile
"""
if os.path.isabs(path) and os.path.exists(path):
return path
else:
# Need to fix some stuff up
CreateConfigDir()
import profiler
return profiler.Profile_Get("MYPROFILE")
def CreateConfigDir():
""" Creates the user config directory its default sub
directories and any of the default config files.
@postcondition: all default configuration files/folders are created
"""
#---- Resolve Paths ----#
config_dir = GetUserConfigBase()
profile_dir = os.path.join(config_dir, u"profiles")
dest_file = os.path.join(profile_dir, u"default.ppb")
ext_cfg = [u"cache", u"styles", u"plugins"]
#---- Create Directories ----#
if not os.path.exists(config_dir):
os.mkdir(config_dir)
if not os.path.exists(profile_dir):
os.mkdir(profile_dir)
for cfg in ext_cfg:
if not HasConfigDir(cfg):
MakeConfigDir(cfg)
import profiler
profiler.TheProfile.LoadDefaults()
profiler.Profile_Set("MYPROFILE", dest_file)
profiler.TheProfile.Write(dest_file)
profiler.UpdateProfileLoader()
def ResolvConfigDir(config_dir, sys_only=False):
"""Checks for a user config directory and if it is not
found it then resolves the absolute path of the executables
directory from the relative execution path. This is then used
to find the location of the specified directory as it relates
to the executable directory, and returns that path as a
string.
@param config_dir: name of config directory to resolve
@keyword sys_only: only get paths of system config directory or user one
@note: This method is probably much more complex than it needs to be but
the code has proven itself.
"""
# Try to get a User config directory
if not sys_only:
user_config = GetUserConfigBase()
user_config = os.path.join(user_config, config_dir)
if os.path.exists(user_config):
return user_config + os.sep
# Check if the system install path has already been resolved once before
if ed_glob.CONFIG['INSTALL_DIR'] != u"":
tmp = os.path.join(ed_glob.CONFIG['INSTALL_DIR'], config_dir)
tmp = os.path.normpath(tmp) + os.sep
if os.path.exists(tmp):
return tmp
else:
del tmp
# The following lines are used only when Editra is being run as a
# source package. If the found path does not exist then Editra is
# running as as a built package.
if not hasattr(sys, 'frozen'):
path = __file__
if not ebmlib.IsUnicode(path):
path = path.decode(sys.getfilesystemencoding())
path = os.sep.join(path.split(os.sep)[:-2])
path = path + os.sep + config_dir + os.sep
if os.path.exists(path):
if not ebmlib.IsUnicode(path):
path = unicode(path, sys.getfilesystemencoding())
return path
# If we get here we need to do some platform dependent lookup
# to find everything.
path = sys.argv[0]
if not ebmlib.IsUnicode(path):
path = unicode(path, sys.getfilesystemencoding())
# If it is a link get the real path
if os.path.islink(path):
path = os.path.realpath(path)
# Tokenize path
pieces = path.split(os.sep)
if wx.Platform == u'__WXMSW__':
# On Windows the exe is in same dir as config directories
pro_path = os.sep.join(pieces[:-1])
if os.path.isabs(pro_path):
pass
elif pro_path == u"":
pro_path = os.getcwd()
pieces = pro_path.split(os.sep)
pro_path = os.sep.join(pieces[:-1])
else:
pro_path = os.path.abspath(pro_path)
elif wx.Platform == u'__WXMAC__':
# On OS X the config directories are in the applet under Resources
stdpath = wx.StandardPaths_Get()
pro_path = stdpath.GetResourcesDir()
pro_path = os.path.join(pro_path, config_dir)
else:
pro_path = os.sep.join(pieces[:-2])
if pro_path.startswith(os.sep):
pass
elif pro_path == u"":
pro_path = os.getcwd()
pieces = pro_path.split(os.sep)
if pieces[-1] not in [ed_glob.PROG_NAME.lower(), ed_glob.PROG_NAME]:
pro_path = os.sep.join(pieces[:-1])
else:
pro_path = os.path.abspath(pro_path)
if wx.Platform != u'__WXMAC__':
pro_path = pro_path + os.sep + config_dir + os.sep
path = os.path.normpath(pro_path) + os.sep
# Make sure path is unicode
if not ebmlib.IsUnicode(path):
path = unicode(path, sys.getdefaultencoding())
return path
def GetResources(resource):
"""Returns a list of resource directories from a given toplevel config dir
@param resource: config directory name
@return: list of resource directory that exist under the given resource path
"""
rec_dir = ResolvConfigDir(resource)
if os.path.exists(rec_dir):
rec_lst = [ rec.title() for rec in os.listdir(rec_dir)
if os.path.isdir(rec_dir + rec) and rec[0] != u"." ]
return rec_lst
else:
return -1
def GetResourceFiles(resource, trim=True, get_all=False,
suffix=None, title=True):
"""Gets a list of resource files from a directory and trims the
file extentions from the names if trim is set to True (default).
If the get_all parameter is set to True the function will return
a set of unique items by looking up both the user and system level
files and combining them, the default behavior returns the user
level files if they exist or the system level files if the
user ones do not exist.
@param resource: name of config directory to look in (i.e cache)
@keyword trim: trim file extensions or not
@keyword get_all: get a set of both system/user files or just user level
@keyword suffix: Get files that have the specified suffix or all (default)
@keyword title: Titlize the results
"""
rec_dir = ResolvConfigDir(resource)
if get_all:
rec_dir2 = ResolvConfigDir(resource, True)
rec_list = list()
if not os.path.exists(rec_dir):
return -1
else:
recs = os.listdir(rec_dir)
if get_all and os.path.exists(rec_dir2):
recs.extend(os.listdir(rec_dir2))
for rec in recs:
if os.path.isfile(rec_dir + rec) or \
(get_all and os.path.isfile(rec_dir2 + rec)):
# If a suffix was specified only keep files that match
if suffix is not None:
if not rec.endswith(suffix):
continue
# Trim the last part of an extension if one exists
if trim:
rec = ".".join(rec.split(u".")[:-1]).strip()
# Make the resource name a title if requested
if title and len(rec):
rec = rec[0].upper() + rec[1:]
if len(rec):
rec_list.append(rec)
rec_list.sort()
return list(set(rec_list))
def GetAllEncodings():
"""Get all encodings found on the system
@return: list of strings
"""
elist = encodings.aliases.aliases.values()
elist = list(set(elist))
elist.sort()
elist = [ enc for enc in elist if not enc.endswith('codec') ]
return elist
def Log(msg, *args):
"""Push the message to the apps log
@param msg: message string to log
@param args: optional positional arguments to use as a printf formatting
to the message.
"""
try:
wx.GetApp().GetLog()(msg, args)
except:
pass
def GetProxyOpener(proxy_set):
"""Get a urlopener for use with a proxy
@param proxy_set: proxy settings to use
"""
Log("[util][info] Making proxy opener with %s" % str(proxy_set))
proxy_info = dict(proxy_set)
auth_str = "%(uname)s:%(passwd)s@%(url)s"
url = proxy_info['url']
if url.startswith('http://'):
auth_str = "http://" + auth_str
proxy_info['url'] = url.replace('http://', '')
else:
pass
if len(proxy_info.get('port', '')):
auth_str = auth_str + ":%(port)s"
proxy_info['passwd'] = ed_crypt.Decrypt(proxy_info['passwd'],
proxy_info['pid'])
Log("[util][info] Formatted proxy request: %s" % \
(auth_str.replace('%(passwd)s', '****') % proxy_info))
proxy = urllib2.ProxyHandler({"http" : auth_str % proxy_info})
opener = urllib2.build_opener(proxy, urllib2.HTTPHandler)
return opener
#---- GUI helper functions ----#
def SetWindowIcon(window):
"""Sets the given windows icon to be the programs
application icon.
@param window: window to set app icon for
"""
try:
if wx.Platform == "__WXMSW__":
ed_icon = ed_glob.CONFIG['SYSPIX_DIR'] + u"editra.ico"
window.SetIcon(wx.Icon(ed_icon, wx.BITMAP_TYPE_ICO))
else:
ed_icon = ed_glob.CONFIG['SYSPIX_DIR'] + u"editra.png"
window.SetIcon(wx.Icon(ed_icon, wx.BITMAP_TYPE_PNG))
finally:
pass
#-----------------------------------------------------------------------------#
class IntValidator(wx.PyValidator):
"""A Generic integer validator"""
def __init__(self, min_=0, max_=0):
"""Initialize the validator
@keyword min_: min value to accept
@keyword max_: max value to accept
"""
wx.PyValidator.__init__(self)
self._min = min_
self._max = max_
# Event management
self.Bind(wx.EVT_CHAR, self.OnChar)
def Clone(self):
"""Clones the current validator
@return: clone of this object
"""
return IntValidator(self._min, self._max)
def Validate(self, win):
"""Validate an window value
@param win: window to validate
"""
val = win.GetValue()
return val.isdigit()
def OnChar(self, event):
"""Process values as they are entered into the control
@param event: event that called this handler
"""
key = event.GetKeyCode()
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or \
key > 255 or chr(key) in '0123456789':
event.Skip()
return
if not wx.Validator_IsSilent():
wx.Bell()
return
| 33.891414 | 80 | 0.595671 | 3,376 | 26,842 | 4.628555 | 0.208531 | 0.012671 | 0.008639 | 0.00768 | 0.238577 | 0.199347 | 0.167797 | 0.149814 | 0.137463 | 0.112633 | 0 | 0.005092 | 0.297593 | 26,842 | 791 | 81 | 33.93426 | 0.823698 | 0.075255 | 0 | 0.300448 | 0 | 0 | 0.057785 | 0.002846 | 0 | 0 | 0 | 0.00885 | 0.011211 | 0 | null | null | 0.026906 | 0.035874 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12e101d3d1c0a3624036d3fc55bbec2095eca800 | 2,690 | py | Python | tests/test_user.py | munniomer/Send-IT-Api-v1 | 17041c987638c7e47c7c2ebed29bf7e2b5156bed | [
"CNRI-Python",
"OML"
] | null | null | null | tests/test_user.py | munniomer/Send-IT-Api-v1 | 17041c987638c7e47c7c2ebed29bf7e2b5156bed | [
"CNRI-Python",
"OML"
] | null | null | null | tests/test_user.py | munniomer/Send-IT-Api-v1 | 17041c987638c7e47c7c2ebed29bf7e2b5156bed | [
"CNRI-Python",
"OML"
] | 1 | 2019-02-05T07:44:19.000Z | 2019-02-05T07:44:19.000Z | import unittest
from app import create_app
import json
from tests.basetest import BaseTest
class TestUSer(BaseTest):
"""User tests class"""
def test_user_registration(self):
"tests if new user can register"
respon = self.client.post("/api/v1/user/register", json=self.new_user)
self.assertEqual(respon.status_code, 201)
def test_if_name_city_valid(self):
"""Tests if names and city are valid"""
respon = self.client.post(
"/api/v1/user/register", json=self.new_user1, content_type='application/json')
self.assertEqual(respon.status_code, 400)
self.assertIn('PLease check if your fname, lname or city is empty or contains numbers',
str(respon.data))
def test_if_email_valid(self):
"""Tests if email is valid"""
respon = self.client.post(
"/api/v1/user/register", json=self.new_user2, content_type='application/json')
self.assertEqual(respon.status_code, 400)
self.assertIn('Please enter a valid emai',
str(respon.data))
def test_if_email_exist(self):
"""Tests if email is valid"""
self.client.post(
"/api/v1/user/register", json=self.new_user6, content_type='application/json')
respon = self.client.post(
"/api/v1/user/register", json=self.new_user6, content_type='application/json')
self.assertEqual(respon.status_code, 400)
self.assertIn('That email exists. use a unique email',
str(respon.data))
def test_if_phone_valid(self):
"""Tests if email is exists"""
respon = self.client.post(
"/api/v1/user/register", json=self.new_user3, content_type='application/json')
self.assertEqual(respon.status_code, 400)
self.assertIn('Please enter a valid phone number ',
str(respon.data))
def test_if_password_valid(self):
"""Tests if passwords are empty or less than 3"""
respon = self.client.post(
"/api/v1/user/register", json=self.new_user4, content_type='application/json')
self.assertEqual(respon.status_code, 400)
self.assertIn('Please check if your password or confirm password are empty or less than 3',
str(respon.data))
def test_if_password_match(self):
"""Tests if passwords match"""
respon = self.client.post(
"/api/v1/user/register", json=self.new_user5, content_type='application/json')
self.assertEqual(respon.status_code, 400)
self.assertIn('confirm password does not match password',
str(respon.data))
| 42.03125 | 99 | 0.637546 | 349 | 2,690 | 4.787966 | 0.22063 | 0.067026 | 0.067026 | 0.081388 | 0.716936 | 0.698384 | 0.621185 | 0.552962 | 0.552962 | 0.552962 | 0 | 0.018803 | 0.248699 | 2,690 | 63 | 100 | 42.698413 | 0.808016 | 0.0829 | 0 | 0.425532 | 0 | 0 | 0.239643 | 0.068237 | 0 | 0 | 0 | 0 | 0.276596 | 1 | 0.148936 | false | 0.085106 | 0.085106 | 0 | 0.255319 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
12e805833151bd1898679d1e39b89a2e7fde7f1c | 2,600 | py | Python | custom_components/panasonic_cc/__init__.py | shyne99/panasonic_cc | ec7912e4067ebd0c08ea2a16c123c50d69a2fca6 | [
"MIT"
] | null | null | null | custom_components/panasonic_cc/__init__.py | shyne99/panasonic_cc | ec7912e4067ebd0c08ea2a16c123c50d69a2fca6 | [
"MIT"
] | null | null | null | custom_components/panasonic_cc/__init__.py | shyne99/panasonic_cc | ec7912e4067ebd0c08ea2a16c123c50d69a2fca6 | [
"MIT"
] | null | null | null | """Platform for the Panasonic Comfort Cloud."""
from datetime import timedelta
import logging
from typing import Any, Dict
import asyncio
from async_timeout import timeout
import voluptuous as vol
from homeassistant.core import HomeAssistant
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_USERNAME, CONF_PASSWORD)
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers import discovery
from .const import TIMEOUT
from .panasonic import PanasonicApiDevice
_LOGGER = logging.getLogger(__name__)
DOMAIN = "panasonic_cc"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
PANASONIC_DEVICES = "panasonic_devices"
COMPONENT_TYPES = ["climate", "sensor", "switch"]
def setup(hass, config):
pass
async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
"""Set up the Garo Wallbox component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Establish connection with Comfort Cloud."""
import pcomfortcloud
conf = entry.data
if PANASONIC_DEVICES not in hass.data:
hass.data[PANASONIC_DEVICES] = []
username = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
api = pcomfortcloud.Session(username, password, verifySsl=False)
devices = await hass.async_add_executor_job(api.get_devices)
for device in devices:
try:
api_device = PanasonicApiDevice(hass, api, device)
await api_device.update()
hass.data[PANASONIC_DEVICES].append(api_device)
except Exception as e:
_LOGGER.warning(f"Failed to setup device: {device['name']} ({e})")
if hass.data[PANASONIC_DEVICES]:
for component in COMPONENT_TYPES:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
await asyncio.wait(
[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in COMPONENT_TYPES
]
)
hass.data.pop(PANASONIC_DEVICES)
return True
| 27.368421 | 83 | 0.699231 | 298 | 2,600 | 5.922819 | 0.342282 | 0.063456 | 0.022096 | 0.040793 | 0.10085 | 0.074788 | 0 | 0 | 0 | 0 | 0 | 0 | 0.22 | 2,600 | 94 | 84 | 27.659574 | 0.870316 | 0.015769 | 0 | 0.044776 | 0 | 0 | 0.038572 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014925 | false | 0.074627 | 0.238806 | 0 | 0.298507 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
12ec838b4e6e3d1f8f2bea5549297c2e3c075ade | 2,484 | py | Python | test/core/bad_ssl/gen_build_yaml.py | Akrog/grpc | 14800b0c1acc2d10d4fd0826731ecae2cb448143 | [
"Apache-2.0"
] | 3 | 2020-10-07T14:20:21.000Z | 2021-10-08T14:49:17.000Z | test/core/bad_ssl/gen_build_yaml.py | Akrog/grpc | 14800b0c1acc2d10d4fd0826731ecae2cb448143 | [
"Apache-2.0"
] | 1 | 2021-03-04T02:33:56.000Z | 2021-03-04T02:33:56.000Z | test/core/bad_ssl/gen_build_yaml.py | Akrog/grpc | 14800b0c1acc2d10d4fd0826731ecae2cb448143 | [
"Apache-2.0"
] | 5 | 2021-02-19T09:46:00.000Z | 2022-03-13T17:33:34.000Z | #!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate build.json data for all the end2end tests."""
import collections
import yaml
TestOptions = collections.namedtuple('TestOptions', 'flaky cpu_cost')
default_test_options = TestOptions(False, 1.0)
# maps test names to options
BAD_CLIENT_TESTS = {
'cert': default_test_options._replace(cpu_cost=0.1),
# Disabling this test because it does not link correctly as written
# 'alpn': default_test_options._replace(cpu_cost=0.1),
}
def main():
json = {
'#':
'generated with test/bad_ssl/gen_build_json.py',
'libs': [{
'name': 'bad_ssl_test_server',
'build': 'private',
'language': 'c',
'src': ['test/core/bad_ssl/server_common.cc'],
'headers': ['test/core/bad_ssl/server_common.h'],
'vs_proj_dir': 'test',
'platforms': ['linux', 'posix', 'mac'],
'deps': ['grpc_test_util', 'grpc', 'gpr']
}],
'targets': [{
'name': 'bad_ssl_%s_server' % t,
'build': 'test',
'language': 'c',
'run': False,
'src': ['test/core/bad_ssl/servers/%s.cc' % t],
'vs_proj_dir': 'test/bad_ssl',
'platforms': ['linux', 'posix', 'mac'],
'deps': ['bad_ssl_test_server', 'grpc_test_util', 'grpc', 'gpr']
} for t in sorted(BAD_CLIENT_TESTS.keys())] + [{
'name': 'bad_ssl_%s_test' % t,
'cpu_cost': BAD_CLIENT_TESTS[t].cpu_cost,
'build': 'test',
'language': 'c',
'src': ['test/core/bad_ssl/bad_ssl_test.cc'],
'vs_proj_dir': 'test',
'platforms': ['linux', 'posix', 'mac'],
'deps': ['grpc_test_util', 'grpc', 'gpr']
} for t in sorted(BAD_CLIENT_TESTS.keys())]
}
print yaml.dump(json)
if __name__ == '__main__':
main()
| 35.485714 | 76 | 0.595813 | 320 | 2,484 | 4.421875 | 0.4375 | 0.046643 | 0.039576 | 0.039576 | 0.279859 | 0.24947 | 0.222615 | 0.185866 | 0.137809 | 0.137809 | 0 | 0.009194 | 0.255636 | 2,484 | 69 | 77 | 36 | 0.756084 | 0.289855 | 0 | 0.272727 | 0 | 0 | 0.349671 | 0.096234 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.045455 | null | null | 0.022727 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12ed9629940a31dc96db1b6d58b951b990da8233 | 3,723 | py | Python | infoblox_netmri/api/remote/models/device_password_log_remote.py | IngmarVG-IB/infoblox-netmri | b0c725fd64aee1890d83917d911b89236207e564 | [
"Apache-2.0"
] | null | null | null | infoblox_netmri/api/remote/models/device_password_log_remote.py | IngmarVG-IB/infoblox-netmri | b0c725fd64aee1890d83917d911b89236207e564 | [
"Apache-2.0"
] | null | null | null | infoblox_netmri/api/remote/models/device_password_log_remote.py | IngmarVG-IB/infoblox-netmri | b0c725fd64aee1890d83917d911b89236207e564 | [
"Apache-2.0"
] | null | null | null | from ..remote import RemoteModel
from infoblox_netmri.utils.utils import check_api_availability
class DevicePasswordLogRemote(RemoteModel):
"""
This table list out entries of DevicePasswordLog
| ``DevicePwLogID:`` The internal NetMRI identifier for the device password log.
| ``attribute type:`` number
| ``DataSourceID:`` The internal NetMRI identifier for the collector NetMRI that collected this data record.
| ``attribute type:`` number
| ``DeviceID:`` The internal NetMRI identifier for the device from which device password log table information was collected.
| ``attribute type:`` number
| ``DevicePwLogTimestamp:`` The date and time this record was collected or calculated.
| ``attribute type:`` datetime
| ``DevicePwLogProtocol:`` The protocol of the device password log.
| ``attribute type:`` string
| ``DevicePwLogPassword:`` The password of the device password log.
| ``attribute type:`` string
| ``DevicePwLogSNMPAuthProto:`` The SNMP password is authenticated for the device password log.
| ``attribute type:`` string
| ``DevicePwLogSNMPPrivProto:`` The SNMP private password protocol of the device password log.
| ``attribute type:`` string
| ``DevicePwLogStatus:`` The status of the device password log.
| ``attribute type:`` string
| ``DevicePwLogPasswordSecure:`` The password of the device password log.
| ``attribute type:`` string
| ``DevicePwLogUsernameSecure:`` The username of the device password log.
| ``attribute type:`` string
| ``DevicePwLogEnablePasswordSecure:`` The password is enabled for device password log.
| ``attribute type:`` string
| ``DevicePwLogSNMPAuthPWSecure:`` The SNMP password is authenticated for the device password log.
| ``attribute type:`` string
| ``DevicePwLogSNMPPrivPWSecure:`` The SNMP private password of the device password log.
| ``attribute type:`` string
| ``SecureVersion:`` The encryption version of the username and passwords.
| ``attribute type:`` number
"""
properties = ("DevicePwLogID",
"DataSourceID",
"DeviceID",
"DevicePwLogTimestamp",
"DevicePwLogProtocol",
"DevicePwLogPassword",
"DevicePwLogSNMPAuthProto",
"DevicePwLogSNMPPrivProto",
"DevicePwLogStatus",
"DevicePwLogPasswordSecure",
"DevicePwLogUsernameSecure",
"DevicePwLogEnablePasswordSecure",
"DevicePwLogSNMPAuthPWSecure",
"DevicePwLogSNMPPrivPWSecure",
"SecureVersion",
)
@property
@check_api_availability
def data_source(self):
"""
The collector NetMRI that collected this data record.
``attribute type:`` model
"""
return self.broker.data_source(**{"DevicePwLogID": self.DevicePwLogID })
@property
@check_api_availability
def device(self):
"""
The device from which this data was collected.
``attribute type:`` model
"""
return self.broker.device(**{"DevicePwLogID": self.DevicePwLogID })
@property
@check_api_availability
def infradevice(self):
"""
The device from which this data was collected.
``attribute type:`` model
"""
return self.broker.infradevice(**{"DevicePwLogID": self.DevicePwLogID })
| 31.285714 | 130 | 0.608649 | 314 | 3,723 | 7.181529 | 0.251592 | 0.103769 | 0.090466 | 0.126829 | 0.48204 | 0.468293 | 0.429712 | 0.384479 | 0.294013 | 0.228825 | 0 | 0 | 0.299221 | 3,723 | 119 | 131 | 31.285714 | 0.864316 | 0.551168 | 0 | 0.193548 | 0 | 0 | 0.251282 | 0.134066 | 0 | 0 | 0 | 0 | 0 | 1 | 0.096774 | false | 0.129032 | 0.064516 | 0 | 0.322581 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
12f2a17d10d6e7d8016a1adfcae38305fb8b1df9 | 2,386 | py | Python | franka_lcas_experiments/script/load_model_rtp.py | arsh09/franka_ros_lcas | b6211125436849d5c7def8ad96a384cc34f2f121 | [
"Apache-2.0"
] | 2 | 2021-11-09T00:50:43.000Z | 2021-11-15T09:50:47.000Z | franka_lcas_experiments/script/load_model_rtp.py | arsh09/franka_ros_lcas | b6211125436849d5c7def8ad96a384cc34f2f121 | [
"Apache-2.0"
] | null | null | null | franka_lcas_experiments/script/load_model_rtp.py | arsh09/franka_ros_lcas | b6211125436849d5c7def8ad96a384cc34f2f121 | [
"Apache-2.0"
] | 1 | 2021-11-17T13:24:23.000Z | 2021-11-17T13:24:23.000Z | import numpy as np
import os, sys
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from tensorflow.keras.models import Model
import tensorflow as tf
from PIL import Image
from utils_rtp import ProMP
class Predictor:
def __init__(self, encoder_model_path, predictor_model_path):
self.all_phi = self.promp_train()
encoder_model = tf.keras.models.load_model(encoder_model_path)
self.encoder = Model(encoder_model.input, encoder_model.get_layer("bottleneck").output)
self.exp_model = tf.keras.models.load_model(predictor_model_path, compile=False)
def promp_train(self):
phi = ProMP().basis_func_gauss_glb()
zeros = np.zeros([phi.shape[0], 8])
h1 = np.hstack((phi, zeros, zeros, zeros, zeros, zeros, zeros))
h2 = np.hstack((zeros, phi, zeros, zeros, zeros, zeros, zeros))
h3 = np.hstack((zeros, zeros, phi, zeros, zeros, zeros, zeros))
h4 = np.hstack((zeros, zeros, zeros, phi, zeros, zeros, zeros))
h5 = np.hstack((zeros, zeros, zeros, zeros, phi, zeros, zeros))
h6 = np.hstack((zeros, zeros, zeros, zeros, zeros, phi, zeros))
h7 = np.hstack((zeros, zeros, zeros, zeros, zeros, zeros, phi))
vstack = np.vstack((h1, h2, h3, h4, h5, h6, h7))
vstack = tf.cast(vstack, tf.float32)
return vstack
def preprocess_image(self, image):
return np.asarray(image.resize((256, 256)))
def predict(self, image_numpy):
# image_numpy = np.expand_dims(image_numpy, axis=0)
latent_img = self.encoder.predict(image_numpy/255)
q_val_pred = self.exp_model.predict(latent_img)
traj_pred = np.matmul(self.all_phi, np.transpose(q_val_pred)).squeeze()
return traj_pred #np.reshape(traj_pred, (-1, 150))
if __name__ == "__main__":
ENCODED_MODEL_PATH = "/home/arshad/Documents/reach_to_palpate_validation_models/encoded_model_regions"
PREDICTOR_MODEL = "/home/arshad/Documents/reach_to_palpate_validation_models/model_cnn_rgb_1"
image = np.load( "/home/arshad/catkin_ws/image_xy_rtp.npy" )
predictor = Predictor(ENCODED_MODEL_PATH, PREDICTOR_MODEL)
traj = predictor.predict(image)
np.save("/home/arshad/catkin_ws/predicted_joints_values_rtp.npy", traj)
print ("\n Predicted ProMPs weights for RTP task. Joint trajectory is saved in the file. \n Press 'p' to display the trajectory...")
| 40.440678 | 138 | 0.690696 | 343 | 2,386 | 4.568513 | 0.35277 | 0.191449 | 0.191449 | 0.153159 | 0.264837 | 0.259732 | 0.104659 | 0.06254 | 0 | 0 | 0 | 0.017544 | 0.187762 | 2,386 | 58 | 139 | 41.137931 | 0.791022 | 0.033948 | 0 | 0 | 0 | 0.02439 | 0.177237 | 0.106429 | 0 | 0 | 0 | 0 | 0 | 1 | 0.097561 | false | 0 | 0.146341 | 0.02439 | 0.341463 | 0.02439 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12f80c5f985c410a5af8bdf06f87e46b6aa396c4 | 1,241 | py | Python | parsers/parsers_base.py | xm4dn355x/async_test | 92e7ec6a693ff4850ed603c0f4f0fa83e63b4e49 | [
"MIT"
] | null | null | null | parsers/parsers_base.py | xm4dn355x/async_test | 92e7ec6a693ff4850ed603c0f4f0fa83e63b4e49 | [
"MIT"
] | null | null | null | parsers/parsers_base.py | xm4dn355x/async_test | 92e7ec6a693ff4850ed603c0f4f0fa83e63b4e49 | [
"MIT"
] | null | null | null | #
# Общие функции для всех парсеров
#
# Автор: Никитенко Михаил
# Лицензия: MIT License
#
from time import sleep
import requests
def get_htmls(urls):
"""
Получает список URL-адресов
Возвращает список из всех полученных HTML документов
:param urls: Список URL-адресов
:type urls: list
:return: Возвращаем список HTML-документов
"""
htmls = [] # Готовим болванку для возвращаемого значения
for url in urls: # Прогоняем все URL из списка
html = get_html(url) # Получаем HTML по полученному URL из списка
htmls.append(html) # Добавляем полученный HTML в возвращаемый список
sleep(1)
return htmls # Возвращаем список в котором каждый элемент - это HTML документ
def get_html(url):
"""
Получает URL-адрес
Возвращает тело HTML документа
:param url: URL-адрес
:type url: str
:return: Возвращаем HTML-документ
"""
print(f"""get_html url={url}""")
r = requests.get(url, headers={'User-Agent': 'Custom'}) # Создаем объект web-страницы по полученному url
print(r) # Ответ от сервера <Response [200]>
return r.text # Возвращаем тело HTML документа
if __name__ == '__main__':
pass | 27.577778 | 109 | 0.654311 | 154 | 1,241 | 5.194805 | 0.532468 | 0.02625 | 0.0375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004372 | 0.262691 | 1,241 | 45 | 110 | 27.577778 | 0.869945 | 0.578566 | 0 | 0 | 0 | 0 | 0.102941 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.0625 | 0.125 | 0 | 0.375 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
12f867945891bf95b1fd61c639ac565c8cecb9f9 | 16,303 | py | Python | smbspider/smbspider.py | vonahi/pentesting_scripts | 233b07a13e631cd121985465c083327f2fe372b6 | [
"MIT"
] | 13 | 2019-09-18T17:15:22.000Z | 2022-02-20T00:28:35.000Z | smbspider/smbspider.py | vonahi/pentesting_scripts | 233b07a13e631cd121985465c083327f2fe372b6 | [
"MIT"
] | null | null | null | smbspider/smbspider.py | vonahi/pentesting_scripts | 233b07a13e631cd121985465c083327f2fe372b6 | [
"MIT"
] | 4 | 2019-07-24T10:03:41.000Z | 2021-11-22T06:19:54.000Z | #!/usr/bin/python
#
# This post-exploitation script can be used to spider numerous systems
# to identify sensitive and/or confidential data. A good scenario to
# use this script is when you have admin credentials to tons of
# Windows systems, and you want to look for files containing data such
# as PII, network password documents, etc. For the most part,
# this script uses smbclient, parses the results, and prints
# out the results in a nice format for you.
#
# Author: Alton Johnson <alton@vonahi.io
# Version: 2.4
# Updated: 01/23/2014
#
import commands, time, getopt, re, os
from sys import argv
start_time = time.time()
class colors:
red = "\033[1;31m"
blue = "\033[1;34m"
norm = "\033[0;00m"
green = "\033[1;32m"
banner = "\n " + "*" * 56
banner += "\n * _ *"
banner += "\n * | | // \\\\ *"
banner += "\n * ___ _ __ ___ | |__ _\\\\()//_ *"
banner += "\n * / __| '_ ` _ \| '_ \ / // \\\\ \ *"
banner += "\n * \__ \ | | | | | |_) | |\__/| *"
banner += "\n * |___/_| |_| |_|_.__/ *"
banner += "\n * *"
banner += "\n * SMB Spider v2.4, Alton Johnson (alton@vonahi.io) *"
banner += "\n " + "*" * 56 + "\n"
def help():
print banner
print " Usage: %s <OPTIONS>" % argv[0]
print colors.red + "\n Target(s) (required): \n" + colors.norm
print "\t -h <host>\t Provide IP address or a text file containing IPs."
print "\t\t\t Supported formats: IP, smb://ip/share, \\\\ip\\share\\"
print colors.red + "\n Credentials (required): \n" + colors.norm
print "\t -u <user>\t Specify a valid username to authenticate to the system(s)."
print "\t -p <pass>\t Specify the password which goes with the username."
print "\t -P <hash>\t Use -P to provide password hash if cleartext password isn't known."
print "\t -d <domain>\t If using a domain account, provide domain name."
print colors.green + "\n Shares (optional):\n" + colors.norm
print "\t -s <share>\t Specify shares (separate by comma) or specify \"profile\" to spider user profiles."
print "\t -f <file>\t Specify a list of shares from a file."
print colors.green + "\n Other (optional):\n" + colors.norm
print "\t -w \t\t Avoid verbose output. Output successful spider results to smbspider_host_share_user.txt."
print "\t\t\t This option is HIGHLY recommended if numerous systems are being scanned."
print "\t -n \t\t ** Ignore authentication check prior to spidering."
print "\t -g <file> \t Grab (download) files that match strings provided in text file. (Case sensitive.)"
print "\t\t\t ** Examples: *assword.doc, *assw*.doc, pass*.xls, etc."
print colors.norm
exit()
def start(argv):
if len(argv) < 1:
help()
try:
opts, args = getopt.getopt(argv, "u:p:d:h:s:f:P:wng:")
except getopt.GetoptError, err:
print colors.red + "\n [-] Error: " + str(err) + colors.norm
# set default variables to prevent errors later in script
sensitive_strings = []
smb_user = ""
smb_pass = ""
smb_domain = ""
smb_host = []
smb_share = ["profile"]
pth = False
output = False
unique_systems = []
ignorecheck = False
inputfile = False
#parse through arguments
for opt, arg in opts:
if opt == "-u":
smb_user = arg
elif opt == "-p":
smb_pass = arg
elif opt == "-d":
smb_domain = arg
elif opt == "-h":
try:
smb_host = open(arg).read().split('\n')
inputfile = True
except:
if "\\\\" in arg and "\\" not in arg[-1:]:
test = arg[2:].replace("\\","\\")
smb_host.append("\\\\%s\\" % test)
else:
smb_host.append(arg)
elif opt == "-f":
smb_share = open(arg).read().split()
elif opt == "-s":
smb_share = arg.split(',')
elif opt == "-P":
if arg[-3:] == ":::":
arg = arg[:-3]
smb_pass = arg
pth = True
elif opt == "-w":
output = True
elif opt == "-n":
ignorecheck = True
elif opt == "-g":
sensitive_strings = open(arg).read().split("\n")[:-1]
#check options before proceeding
if (not smb_user or not smb_pass or not smb_host):
print colors.red + "\n [-] " + colors.norm + "Error: Please check to ensure that all required options are provided."
help()
if pth:
result = commands.getoutput("pth-smbclient")
if "not found" in result.lower():
print colors.red + "\n [-] " + colors.norm + "Error: The passing-the-hash package was not found. Therefore, you cannot pass hashes."
print "Please run \"apt-get install passing-the-hash\" to fix this error and try running the script again.\n"
exit()
#make smb_domain, smb_user, and smb_pass one variable
if smb_domain:
credentials = smb_domain + "\\\\" + smb_user + " " + smb_pass
else:
credentials = smb_user + " " + smb_pass
for system in smb_host:
if "\\" in system or "//" in system:
if "\\" in system:
sys = system[system.find("\\")+2:]
sys = sys[:sys.find("\\")]
else:
sys = system[system.find("/")+2:]
sys = sys[:sys.find("/")]
if sys not in unique_systems:
unique_systems.append(sys)
else:
unique_systems.append(system)
#start spidering
print banner
unique_systems = [i for i in unique_systems if i != ''] #remove blank elements from list
print " [*] Spidering %s system(s)..." % len(unique_systems)
begin = spider(credentials, smb_host, smb_share, pth, output, ignorecheck, inputfile, sensitive_strings)
begin.start_spidering()
class spider:
def __init__(self, credentials, hosts, shares, pth, output, ignorecheck, inputfile, sensitive_strings):
self.list_of_hosts = hosts
self.list_of_shares = shares
self.credentials = credentials
self.smb_host = ""
self.smb_share = ""
self.skip_host = ""
self.pth = pth
self.outputfile = output
self.blacklisted = []
self.ignorecheck = ignorecheck
self.inputfile = inputfile
self.smb_download = True
self.file_locations = []
self.sensitive_strings = sensitive_strings
self.profile = False
def start_spidering(self):
share = ""
self.total_hosts = 0
empty_share_error = colors.red + " [-] " + colors.norm + "Error: Empty share detected for host %s. Skipping share."
for test_host in self.list_of_hosts:
temp = test_host
if ("//" in temp or "\\\\" in temp) and self.list_of_shares[0] != "profile":
print colors.red + " [-] " + colors.norm + "Error: You cannot specify a share if your target(s) contains \\\\<ip>\\<share> or //<ip>/<share>\n"
exit()
for host in self.list_of_hosts:
self.total_hosts += 1
tmp_share = host.replace("/","")
tmp_share = host.replace("\\","")
orig_host = host # ensures that we can check the original host value later on if we need to
if "\\\\" in host: # this checks to see if host is in the format of something like \\192.168.0.1\C$
host = host[2:]
host = host[:host.find("\\")]
elif "smb://" in host: # this checks to see if the host contains a format such as smb://192.168.0.1/C$
host = host[6:]
host = host[:host.find("/")]
if self.skip_host == host:
self.blacklisted.append(host)
continue
if len(self.list_of_shares) == 1 and ("//" in orig_host or "\\\\" in orig_host):
if "//" in orig_host:
share = orig_host[orig_host.rfind("/")+1:]
elif "\\\\" in orig_host:
if orig_host[-1] == "\\":
temp = orig_host[:-1]
share = temp[temp.rfind("\\")+1:]
self.smb_host = host
self.smb_share = share
else:
for share in self.list_of_shares:
if self.skip_host == host:
self.blacklisted.append(host)
break
self.smb_host = host
self.smb_share = share
tmp_share = tmp_share.replace(self.smb_host,"")
tmp_share = tmp_share.replace("smb:///","")
if len(tmp_share) == 0 and (self.smb_share != "profile" and len(self.smb_share) == 0):
print empty_share_error % self.smb_host
continue
if len(self.list_of_shares) > 1:
for x in self.list_of_shares:
self.smb_share = x
print "\n [*] Attempting to spider smb://%s/%s" % (self.smb_host, self.smb_share.replace("profile","<user profiles>"))
self.spider_host()
else:
print "\n [*] Attempting to spider smb://%s/%s " % (self.smb_host, self.smb_share.replace("profile","<user profiles>"))
self.spider_host()
if self.list_of_shares[0] == "profile":
if self.inputfile:
print " [*] Finished with smb://%s/<user profiles>. [Remaining: %s] " % (self.smb_host, str(len(self.list_of_hosts)-self.total_hosts-1))
else:
print " [*] Finished with smb://%s/<user profiles>. [Remaining: %s] " % (self.smb_host, str(len(self.list_of_hosts)-self.total_hosts))
else:
print " [*] Finished with smb://%s/%s. [Remaining: %s] " % (self.smb_host, self.smb_share, str(len(self.list_of_hosts)-self.total_hosts))
if self.smb_download: self.start_downloading()
def start_downloading(self):
if len(self.sensitive_strings) == 0: return
print "\n" + colors.blue + " [*] " + colors.norm + "Attempting to download files that were deemed sensitive."
if not os.path.exists('smbspider-downloads'):
os.makedirs('smbspider-downloads')
for f in self.file_locations:
host = f[2:]
host = str(host[:host.find("\\")])
share = f[len(host)+3:]
share = share[:share.find("\\")]
full_path = f.replace("\\\\%s\\%s\\" % (host, share), "").strip()
file_name = full_path[full_path.rfind("\\")+1:]
for s in self.sensitive_strings:
if s in file_name:
result = commands.getoutput("%s -c \"get \\\"%s\\\" \\\"%s_%s\\\"\" //%s/%s -U %s " % (self.smbclient(), full_path.replace("\\","\\\\"), \
host,file_name, host, share, self.credentials))
print colors.blue + " [*] " + colors.norm + "Downloaded: %s from smb://%s/%s" % (file_name, host, share)
commands.getoutput("mv \"%s_%s\" \"smbspider-downloads/%s\"" % (host, file_name, host, file_name))
else:
temp_file = s.split("*")
all_match = 0
for tmp in temp_file:
if tmp in full_path:
all_match = 1
else:
all_match = 0
break
if all_match == 1:
result = commands.getoutput("%s -c \"get \\\"%s\\\" \\\"%s_%s\\\"\" //%s/%s -U %s " % (self.smbclient(), full_path.replace("\\","\\\\"), \
host,file_name, host, share, self.credentials))
print colors.blue + " [*] " + colors.norm + "Downloaded: %s from smb://%s/%s" % (file_name, host, share)
commands.getoutput("mv \"%s_%s\" \"smbspider-downloads/%s_%s\"" % (host, file_name, host, file_name))
def parse_result(self, result):
############################################################
# this small section removes all of the unnecessary crap. a bit ugly, i know! :x
errors = ["O_SUCH_F","ACCESS_DEN",
"US_OBJECT_NAME_IN", "US_INVALID_NETWORK_RE", "CT_NAME_NOT",
"not present","CONNECTION_REFUSED"
]
result = result.split('\n')
purge = []
trash = [" . ", " .. ", "Domain=", " D", "blocks of size",
"wrapper called", "Substituting user supplied"]
for num in range(0,len(result)):
for d in trash:
if d in result[num] or len(result[num]) < 2:
purge.append(num)
purge = list(set(purge))
purge = sorted(purge, reverse=True)
for i in purge:
del result[i]
############################################################
directory = ""
filename = ""
file_locations = []
file_change = False
for x in result:
if x[0] == "\\":
directory = x
file_change = False
else:
filename = x[2:]
filename = filename[:filename.find(" ")]
file_change = True
fail = 0
if not file_change: continue
for error in errors:
if error in filename:
fail = 1
if fail == 0 and len(filename) > 0:
if not self.outputfile:
file_complete_path = "\\\\%s\%s" % (self.smb_host,self.smb_share) + directory + "\\" + filename
print colors.blue + " [*] " + colors.norm + file_complete_path
else:
if not os.path.exists('smbspider'):
os.makedirs('smbspider')
if self.profile:
lawl_share = "profile"
else:
lawl_share = self.smb_share
output = open("smbspider/smbspider_%s_%s_%s.txt" % (self.smb_host, lawl_share, self.credentials.split()[0]), 'a')
file_complete_path = colors.blue + " [*] " + colors.norm + "\\\\%s\%s" % (self.smb_host,lawl_share) + directory + "\\" + filename + "\n"
output.write(file_complete_path)
output.close()
if self.smb_download:
self.file_locations.append(file_complete_path[file_complete_path.find("\\\\"):])
def fingerprint_fs(self):
result = commands.getoutput("%s -c \"ls Users\\*\" //%s/C$ -U %s" % (self.smbclient(), self.smb_host, self.credentials)).split()
if self.check_errors(result[-1]):
return "error"
if "NT_STATUS_OBJECT_NAME_NOT_FOUND" in result:
return "old"
else:
return "new"
def find_users(self, result):
result = result.split('\n')
purge = []
users = []
for num in range(0,len(result)): # cleans some stuff up a bit.
if " . " in result[num] or " .. " in result[num] or "Domain=" in result[num]\
or len(result[num]) < 2 or "blocks of size" in result[num]:
purge.append(num)
purge = sorted(purge, reverse=True)
for i in purge:
del result[i]
#clean up users list a little bit
for i in result:
user = i[:i.find(" D")]
user = user[2:user.rfind(re.sub(r'\W+', '', user)[-1])+1]
users.append(user)
return users
def check_errors(self, result):
access_error = {
"UNREACHABLE":" [-] Error [%s]: Check to ensure that host is online and that share is accessible." % self.smb_host,
"UNSUCCESSFUL":" [-] Error [%s]: Check to ensure that host is online and that share is accessible." % self.smb_host,
"TIMEOUT":" [-] Error [%s]: Check to ensure that host is online and that share is accessible." % self.smb_host,
"LOGON_SERVER":" [-] Error %s Cannot contact logon server. Skipping host." % self.smb_host
}
for err in access_error:
if err in result:
print colors.red + access_error[err] + colors.norm
self.skip_host = self.smb_host
return True
if "LOGON_FAIL" in result.split()[-1] and not self.ignorecheck:
print colors.red + " [-] " + colors.norm + "Error [%s]: Invalid credentials. Please correct credentials and try again." % self.smb_host
exit()
elif "ACCESS_DENIED" in result.split()[-1]:
print colors.red + " [-] " + colors.norm + "Error [%s]: Valid credentials, but no access. Try another account." % self.smb_host
elif "BAD_NETWORK" in result.split()[-1] or "CONNECTION_REFUSED" in result.split()[-1]:
print colors.red + " [-] " + colors.norm + "Error: Invalid share -> smb://%s/%s" % (self.smb_host,self.smb_share)
return True
def smbclient(self):
if self.pth:
return "pth-smbclient"
else:
return "smbclient"
def spider_host(self):
if self.smb_share.lower() == "profile":
self.smb_share = "C$"
self.profile = True
if self.fingerprint_fs() == "error":
return
elif self.fingerprint_fs() == "old":
folders = ['My Documents','Desktop','Documents']
result = commands.getoutput("%s -c \"ls \\\"Documents and Settings\\*\" //%s/C$ -U %s" % (self.smbclient(), self.smb_host, self.credentials))
if self.check_errors(result):
return
users = self.find_users(result)
for user in users:
for folder in folders:
result = commands.getoutput("%s -c \"recurse;ls \\\"Documents and Settings\\%s\\%s\" //%s/C$ -U %s"\
% (self.smbclient(), user, folder, self.smb_host, self.credentials))
self.parse_result(result)
else:
folders = ['Documents','Desktop','Music','Videos','Downloads','Pictures']
result = commands.getoutput("%s -c \"ls \\\"Users\\*\" //%s/C$ -U %s" % (self.smbclient(), self.smb_host, self.credentials))
if self.check_errors(result):
return
users = self.find_users(result)
for user in users:
for folder in folders:
result = commands.getoutput("%s -c \"recurse;ls \\\"Users\\%s\\%s\" //%s/C$ -U %s" % (self.smbclient(), user, folder, self.smb_host, self.credentials))
self.parse_result(result)
else:
result = commands.getoutput("%s -c \"recurse;ls\" \"//%s/%s\" -U %s" % (self.smbclient(), self.smb_host, self.smb_share, self.credentials))
if self.check_errors(result):
return
self.parse_result(result)
if __name__ == "__main__":
try:
start(argv[1:])
except KeyboardInterrupt:
print "\nExiting. Interrupted by user (ctrl-c)."
exit()
except Exception, err:
print err
exit()
print "\n-----"
print "Completed in: %.1fs" % (time.time() - start_time)
| 38.541371 | 157 | 0.626326 | 2,339 | 16,303 | 4.238563 | 0.164173 | 0.031773 | 0.029958 | 0.018156 | 0.353843 | 0.311781 | 0.271031 | 0.234819 | 0.210006 | 0.181864 | 0 | 0.008337 | 0.198 | 16,303 | 422 | 158 | 38.632701 | 0.749904 | 0.067227 | 0 | 0.241935 | 0 | 0.016129 | 0.259545 | 0.007569 | 0.005376 | 0 | 0 | 0 | 0 | 0 | null | null | 0.02957 | 0.005376 | null | null | 0.129032 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12fd58577de1528a698dc2d572273da89af94b00 | 217 | py | Python | serempre_todo/utils/choices.py | pygabo/Serempre | 6b29e337abd8d1b3f71ee889d318a2d473d6c744 | [
"MIT"
] | null | null | null | serempre_todo/utils/choices.py | pygabo/Serempre | 6b29e337abd8d1b3f71ee889d318a2d473d6c744 | [
"MIT"
] | null | null | null | serempre_todo/utils/choices.py | pygabo/Serempre | 6b29e337abd8d1b3f71ee889d318a2d473d6c744 | [
"MIT"
] | null | null | null | TASK_STATUS = [
('TD', 'To Do'),
('IP', 'In Progress'),
('QA', 'Testing'),
('DO', 'Done'),
]
TASK_PRIORITY = [
('ME', 'Medium'),
('HI', 'Highest'),
('HG', 'High'),
('LO', 'Lowest'),
]
| 15.5 | 26 | 0.40553 | 22 | 217 | 3.909091 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.271889 | 217 | 13 | 27 | 16.692308 | 0.544304 | 0 | 0 | 0 | 0 | 0 | 0.304147 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12fea94d07f9c12bbbce2e89b9de91f96defafac | 1,330 | py | Python | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/Pmw/Pmw_1_3/demos/SelectionDialog.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | 3 | 2017-09-26T03:09:14.000Z | 2022-03-20T11:12:34.000Z | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/Pmw/Pmw_1_3/demos/SelectionDialog.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | null | null | null | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/Pmw/Pmw_1_3/demos/SelectionDialog.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | 2 | 2019-10-05T23:02:41.000Z | 2020-06-25T20:21:02.000Z | title = 'Pmw.SelectionDialog demonstration'
# Import Pmw from this directory tree.
import sys
sys.path[:0] = ['../../..']
import Tkinter
import Pmw
class Demo:
def __init__(self, parent):
# Create the dialog.
self.dialog = Pmw.SelectionDialog(parent,
title = 'My SelectionDialog',
buttons = ('OK', 'Cancel'),
defaultbutton = 'OK',
scrolledlist_labelpos = 'n',
label_text = 'What do you think of Pmw?',
scrolledlist_items = ('Cool man', 'Cool', 'Good', 'Bad', 'Gross'),
command = self.execute)
self.dialog.withdraw()
# Create button to launch the dialog.
w = Tkinter.Button(parent, text = 'Show selection dialog',
command = self.dialog.activate)
w.pack(padx = 8, pady = 8)
def execute(self, result):
sels = self.dialog.getcurselection()
if len(sels) == 0:
print 'You clicked on', result, '(no selection)'
else:
print 'You clicked on', result, sels[0]
self.dialog.deactivate(result)
######################################################################
# Create demo in root window for testing.
if __name__ == '__main__':
root = Tkinter.Tk()
Pmw.initialise(root)
root.title(title)
exitButton = Tkinter.Button(root, text = 'Exit', command = root.destroy)
exitButton.pack(side = 'bottom')
widget = Demo(root)
root.mainloop()
| 27.708333 | 76 | 0.619549 | 158 | 1,330 | 5.120253 | 0.531646 | 0.061805 | 0.037083 | 0.042027 | 0.05686 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00466 | 0.193233 | 1,330 | 47 | 77 | 28.297872 | 0.749301 | 0.098496 | 0 | 0 | 0 | 0 | 0.177936 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.088235 | null | null | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
12ff9748e2c126e4060dc274380a9e865c327195 | 778 | py | Python | py3plex/algorithms/infomap/examples/python/example-simple.py | awesome-archive/Py3plex | a099acb992441c1630208ba13694acb8e2a38895 | [
"BSD-3-Clause"
] | 1 | 2020-02-20T07:37:02.000Z | 2020-02-20T07:37:02.000Z | py3plex/algorithms/infomap/examples/python/example-simple.py | awesome-archive/Py3plex | a099acb992441c1630208ba13694acb8e2a38895 | [
"BSD-3-Clause"
] | null | null | null | py3plex/algorithms/infomap/examples/python/example-simple.py | awesome-archive/Py3plex | a099acb992441c1630208ba13694acb8e2a38895 | [
"BSD-3-Clause"
] | null | null | null | from infomap import infomap
infomapWrapper = infomap.Infomap("--two-level")
# Add weight as an optional third argument
infomapWrapper.addLink(0, 1)
infomapWrapper.addLink(0, 2)
infomapWrapper.addLink(0, 3)
infomapWrapper.addLink(1, 0)
infomapWrapper.addLink(1, 2)
infomapWrapper.addLink(2, 1)
infomapWrapper.addLink(2, 0)
infomapWrapper.addLink(3, 0)
infomapWrapper.addLink(3, 4)
infomapWrapper.addLink(3, 5)
infomapWrapper.addLink(4, 3)
infomapWrapper.addLink(4, 5)
infomapWrapper.addLink(5, 4)
infomapWrapper.addLink(5, 3)
infomapWrapper.run()
tree = infomapWrapper.tree
print("Found %d modules with codelength: %f" % (tree.numTopModules(), tree.codelength()))
print("\n#node module")
for node in tree.leafIter():
print("%d %d" % (node.physIndex, node.moduleIndex()))
| 25.096774 | 89 | 0.75964 | 104 | 778 | 5.682692 | 0.365385 | 0.497462 | 0.111675 | 0.077834 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.039829 | 0.096401 | 778 | 30 | 90 | 25.933333 | 0.800853 | 0.051414 | 0 | 0 | 0 | 0 | 0.089796 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.045455 | 0 | 0.045455 | 0.136364 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4206719b66d7095a812ba8babe145ead4c49882e | 1,325 | py | Python | test/test_edge.py | jbschwartz/spatial | 04dc619ae024ebb4f516cd6483f835421c7d84b1 | [
"MIT"
] | 1 | 2022-01-02T22:03:09.000Z | 2022-01-02T22:03:09.000Z | test/test_edge.py | jbschwartz/spatial | 04dc619ae024ebb4f516cd6483f835421c7d84b1 | [
"MIT"
] | null | null | null | test/test_edge.py | jbschwartz/spatial | 04dc619ae024ebb4f516cd6483f835421c7d84b1 | [
"MIT"
] | null | null | null | import unittest
from spatial import Edge, Vector3
class TestEdge(unittest.TestCase):
def setUp(self) -> None:
self.start = Vector3(1, 2, 3)
self.end = Vector3(-1, -2, -3)
self.middle = Vector3(0, 0, 0)
self.edge = Edge(self.start, self.end)
def test__init__accepts_endpoints(self) -> None:
self.assertEqual(self.edge.start, self.start)
self.assertEqual(self.edge.end, self.end)
def test__eq__returns_true_for_edges_regardless_of_direction(self) -> None:
same_edge = Edge(self.start, self.end)
self.assertEqual(self.edge, same_edge)
opposite_edge = Edge(self.end, self.start)
self.assertEqual(self.edge, opposite_edge)
other_edge = Edge(self.start, self.middle)
self.assertNotEqual(other_edge, self.edge)
def test__eq__returns_notimplemented_for_incompatible_types(self) -> None:
self.assertTrue(self.edge.__eq__(2) == NotImplemented)
self.assertTrue(self.edge.__eq__("string") == NotImplemented)
def test_length_returns_the_length_of_the_edge(self) -> None:
self.assertEqual(self.edge.length, (self.start - self.end).length())
def test_vector_returns_the_vector_between_the_edges_endpoints(self) -> None:
self.assertEqual(self.edge.vector, self.end - self.start)
| 36.805556 | 81 | 0.695849 | 178 | 1,325 | 4.88764 | 0.258427 | 0.091954 | 0.089655 | 0.158621 | 0.367816 | 0.256322 | 0.091954 | 0 | 0 | 0 | 0 | 0.013023 | 0.188679 | 1,325 | 35 | 82 | 37.857143 | 0.796279 | 0 | 0 | 0 | 0 | 0 | 0.004528 | 0 | 0 | 0 | 0 | 0 | 0.36 | 1 | 0.24 | false | 0 | 0.08 | 0 | 0.36 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4207202cb690f62fcf73ad7c61a82a12bebf477d | 419 | py | Python | src/login/migrations/0017_auto_20191006_1716.py | vandana0608/Pharmacy-Managament | f99bdec11c24027a432858daa19247a21cecc092 | [
"bzip2-1.0.6"
] | null | null | null | src/login/migrations/0017_auto_20191006_1716.py | vandana0608/Pharmacy-Managament | f99bdec11c24027a432858daa19247a21cecc092 | [
"bzip2-1.0.6"
] | null | null | null | src/login/migrations/0017_auto_20191006_1716.py | vandana0608/Pharmacy-Managament | f99bdec11c24027a432858daa19247a21cecc092 | [
"bzip2-1.0.6"
] | null | null | null | # Generated by Django 2.0.7 on 2019-10-06 11:46
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('login', '0016_auto_20191006_1715'),
]
operations = [
migrations.AlterField(
model_name='login',
name='logout',
field=models.DateTimeField(default=datetime.datetime.now),
),
]
| 20.95 | 70 | 0.620525 | 45 | 419 | 5.688889 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.101307 | 0.26969 | 419 | 19 | 71 | 22.052632 | 0.735294 | 0.107399 | 0 | 0 | 1 | 0 | 0.104839 | 0.061828 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4209d56bec0f4b46b06778591fc9cb1f2f7511a5 | 3,140 | py | Python | swagger_server/models/linecode_r_matrix.py | garagonc/simulation-engine | c129f0bf601e0d56d924c9e5fa2cf94f7e31a356 | [
"Apache-2.0"
] | 3 | 2019-06-24T09:02:21.000Z | 2020-01-30T10:37:46.000Z | swagger_server/models/linecode_r_matrix.py | linksmart/simulation-engine | c129f0bf601e0d56d924c9e5fa2cf94f7e31a356 | [
"Apache-2.0"
] | null | null | null | swagger_server/models/linecode_r_matrix.py | linksmart/simulation-engine | c129f0bf601e0d56d924c9e5fa2cf94f7e31a356 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server.models.impedance import Impedance # noqa: F401,E501
from swagger_server import util
class LinecodeRMatrix(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, phase_r: Impedance=None, phase_s: Impedance=None, phase_t: Impedance=None): # noqa: E501
"""LinecodeRMatrix - a model defined in Swagger
:param phase_r: The phase_r of this LinecodeRMatrix. # noqa: E501
:type phase_r: Impedance
:param phase_s: The phase_s of this LinecodeRMatrix. # noqa: E501
:type phase_s: Impedance
:param phase_t: The phase_t of this LinecodeRMatrix. # noqa: E501
:type phase_t: Impedance
"""
self.swagger_types = {
'phase_r': Impedance,
'phase_s': Impedance,
'phase_t': Impedance
}
self.attribute_map = {
'phase_r': 'phase_R',
'phase_s': 'phase_S',
'phase_t': 'phase_T'
}
self._phase_r = phase_r
self._phase_s = phase_s
self._phase_t = phase_t
@classmethod
def from_dict(cls, dikt) -> 'LinecodeRMatrix':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Linecode_R_Matrix of this LinecodeRMatrix. # noqa: E501
:rtype: LinecodeRMatrix
"""
return util.deserialize_model(dikt, cls)
@property
def phase_r(self) -> Impedance:
"""Gets the phase_r of this LinecodeRMatrix.
:return: The phase_r of this LinecodeRMatrix.
:rtype: Impedance
"""
return self._phase_r
@phase_r.setter
def phase_r(self, phase_r: Impedance):
"""Sets the phase_r of this LinecodeRMatrix.
:param phase_r: The phase_r of this LinecodeRMatrix.
:type phase_r: Impedance
"""
self._phase_r = phase_r
@property
def phase_s(self) -> Impedance:
"""Gets the phase_s of this LinecodeRMatrix.
:return: The phase_s of this LinecodeRMatrix.
:rtype: Impedance
"""
return self._phase_s
@phase_s.setter
def phase_s(self, phase_s: Impedance):
"""Sets the phase_s of this LinecodeRMatrix.
:param phase_s: The phase_s of this LinecodeRMatrix.
:type phase_s: Impedance
"""
self._phase_s = phase_s
@property
def phase_t(self) -> Impedance:
"""Gets the phase_t of this LinecodeRMatrix.
:return: The phase_t of this LinecodeRMatrix.
:rtype: Impedance
"""
return self._phase_t
@phase_t.setter
def phase_t(self, phase_t: Impedance):
"""Sets the phase_t of this LinecodeRMatrix.
:param phase_t: The phase_t of this LinecodeRMatrix.
:type phase_t: Impedance
"""
self._phase_t = phase_t
| 26.610169 | 112 | 0.62293 | 396 | 3,140 | 4.699495 | 0.174242 | 0.07093 | 0.180548 | 0.029554 | 0.494358 | 0.361096 | 0.240193 | 0.212789 | 0.132187 | 0 | 0 | 0.012618 | 0.293312 | 3,140 | 117 | 113 | 26.837607 | 0.826048 | 0.426115 | 0 | 0.214286 | 0 | 0 | 0.052174 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.190476 | false | 0 | 0.142857 | 0 | 0.452381 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
420dab6ca09e09f7cbafe716ac539156b5dcaa62 | 773 | py | Python | setup.py | atait/klayout-gadgets | a8d9655e547fc4531982bbe55e632009bad39096 | [
"MIT"
] | 13 | 2018-12-02T23:32:29.000Z | 2022-02-11T19:28:49.000Z | setup.py | atait/klayout-gadgets | a8d9655e547fc4531982bbe55e632009bad39096 | [
"MIT"
] | 3 | 2019-01-15T23:59:59.000Z | 2020-12-04T16:30:48.000Z | setup.py | atait/klayout-gadgets | a8d9655e547fc4531982bbe55e632009bad39096 | [
"MIT"
] | 1 | 2020-12-01T22:56:03.000Z | 2020-12-01T22:56:03.000Z | from setuptools import setup
def readme():
with open('README.md', 'r') as fx:
return fx.read()
setup(name='lygadgets',
version='0.1.31',
description='Tools to make klayout, the standalone, and python environments work better together',
long_description=readme(),
long_description_content_type='text/markdown',
author='Alex Tait',
author_email='alexander.tait@nist.gov',
license='MIT',
packages=['lygadgets'],
install_requires=['future', 'xmltodict'],
package_data={'': ['*.lym']},
include_package_data=True,
entry_points={'console_scripts': [
'lygadgets_link=lygadgets.command_line:cm_link_any',
'lygadgets_unlink=lygadgets.command_line:cm_unlink_any',
]},
)
| 29.730769 | 104 | 0.654592 | 89 | 773 | 5.47191 | 0.752809 | 0.061602 | 0.082136 | 0.090349 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006515 | 0.205692 | 773 | 25 | 105 | 30.92 | 0.786645 | 0 | 0 | 0 | 0 | 0 | 0.390686 | 0.161708 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | true | 0 | 0.047619 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
420ed2750c333b6a9c2bf33a7391b56504549e6c | 4,639 | py | Python | stackalytics/get_metric.py | yaoice/python_demo | 024f42f9cfce757bdaddf24202d8547801f0e8f6 | [
"Apache-2.0"
] | null | null | null | stackalytics/get_metric.py | yaoice/python_demo | 024f42f9cfce757bdaddf24202d8547801f0e8f6 | [
"Apache-2.0"
] | 2 | 2021-02-08T20:17:39.000Z | 2021-06-01T21:49:12.000Z | stackalytics/get_metric.py | yaoice/python_demo | 024f42f9cfce757bdaddf24202d8547801f0e8f6 | [
"Apache-2.0"
] | null | null | null | #/usr/bin/env python
import httplib2
import json
import sys
from prettytable import PrettyTable
from config import field
class BaseStackalytics(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(BaseStackalytics, cls).__new__(cls, *args, **kwargs)
return cls._instance
class Stackalytics(BaseStackalytics):
def __init__(self, prefix):
super(Stackalytics, self).__init__()
self._prefix = prefix
self._http_instance = self.get_http_instance()
def get_http_instance(self):
return httplib2.Http(".cache")
def get_metrics(self, url):
try:
return self._http_instance.request(self._prefix + url,
"GET",
headers={'Accept': 'application/json'})
except httplib2.ServerNotFoundError:
print "Url {} not found".format(url)
sys.exit(1)
def main():
company_statistics = {}
engineer_statistics = {}
stackalytics = Stackalytics("http://stackalytics.com")
for project_type in field['project_type']:
company_statistics[project_type] = {}
for company in field['company']:
company_statistics[project_type][company] = {}
for metric in field['metric']:
company_statistics[project_type][company][metric] = {}
url = "/api/1.0/stats/companies?release={}&metric={}&project_type={}&company={}".format(field['release'],
metric,
project_type,
company)
resp, content = stackalytics.get_metrics(url)
stats = json.loads(content)['stats']
try:
metric_dict = stats[0]
except IndexError:
metric_dict = {'id': company, 'metric': 0}
company_statistics[project_type][company][metric] = metric_dict
for project_type in field['project_type']:
engineer_statistics[project_type] = {}
for engineer in field['engineers']['ids']:
engineer_statistics[project_type][engineer] = {}
for metric in field['metric']:
engineer_statistics[project_type][engineer][metric] = {}
engineers_url = "/api/1.0/stats/engineers?&release={}&metric={}"\
"&project_type={}&company={}&user_id={}".format(field['release'],
metric,
project_type,
field['engineers']['owercompany'],
engineer)
engineers_resp, engineers_content = stackalytics.get_metrics(engineers_url)
engineers_stats = json.loads(engineers_content)['stats']
try:
engineers_metric_dict = engineers_stats[0]
except IndexError:
engineers_metric_dict = {'id': engineer, 'metric': 0}
engineer_statistics[project_type][engineer][metric] = engineers_metric_dict
engineer_table_field = ['metric'] + [engineer for engineer in field['engineers']['ids']]
for project_type in field['project_type']:
print "{} {} project by tencent individual:".format(field['release'], project_type)
table = PrettyTable(engineer_table_field)
for metric in field['metric']:
table.add_row([metric] + [engineer_statistics[project_type][engineer][metric]['metric'] for engineer in field['engineers']['ids']])
print table
table_field = ['metric'] + [company.replace('%20', ' ') for company in field['company']]
for project_type in field['project_type']:
print "{} {} project by company:".format(field['release'], project_type)
table = PrettyTable(table_field)
for metric in field['metric']:
table.add_row([metric] + [company_statistics[project_type][company][metric]['metric'] for company in field['company']])
print table
# print company_statistics
if __name__ == '__main__':
sys.exit(main())
| 43.764151 | 143 | 0.527053 | 416 | 4,639 | 5.629808 | 0.194712 | 0.112724 | 0.089667 | 0.059778 | 0.456447 | 0.340734 | 0.272417 | 0.081127 | 0.081127 | 0.081127 | 0 | 0.004757 | 0.365596 | 4,639 | 105 | 144 | 44.180952 | 0.79103 | 0.010131 | 0 | 0.231707 | 0 | 0 | 0.114597 | 0.033987 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.060976 | null | null | 0.060976 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
42144545d417abe762a3d9307033d86aace5b332 | 805 | py | Python | ontask/migrations/0004_remove_old_migration_refs.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 33 | 2017-12-02T04:09:24.000Z | 2021-11-07T08:41:57.000Z | ontask/migrations/0004_remove_old_migration_refs.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 189 | 2017-11-16T04:06:29.000Z | 2022-03-11T23:35:59.000Z | ontask/migrations/0004_remove_old_migration_refs.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 30 | 2017-11-30T03:35:44.000Z | 2022-01-31T03:08:08.000Z | # Generated by Django 2.2.4 on 2019-08-24 06:02
from django.db import connection as con, migrations
from psycopg2 import sql
def remove_old_migration_refs(apps, schema_editor):
__sql_delete_migration_ref = 'DELETE FROM django_migrations WHERE app={0}'
old_apps = [
'action', 'core', 'dataops', 'logs', 'oauth', 'ontask_oauth',
'profiles', 'scheduler', 'table', 'workflow']
with con.cursor() as cursor:
for app_name in old_apps:
cursor.execute(
sql.SQL(__sql_delete_migration_ref).format(
sql.Literal(app_name)))
class Migration(migrations.Migration):
dependencies = [
('ontask', '0003_transfer_siteprefs'),
]
operations = [
migrations.RunPython(code=remove_old_migration_refs),
]
| 26.833333 | 78 | 0.650932 | 97 | 805 | 5.14433 | 0.608247 | 0.04008 | 0.072144 | 0.088176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034202 | 0.237267 | 805 | 29 | 79 | 27.758621 | 0.778502 | 0.055901 | 0 | 0 | 1 | 0 | 0.184697 | 0.030343 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.105263 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
42149897d0b37e2db558007492da879e2a80968d | 639 | py | Python | scripts/tfloc_summary.py | lldelisle/bx-python | 19ab41e0905221e3fcaaed4b74faf2d7cda0d15a | [
"MIT"
] | 122 | 2015-07-01T12:00:22.000Z | 2022-03-02T09:27:35.000Z | scripts/tfloc_summary.py | lldelisle/bx-python | 19ab41e0905221e3fcaaed4b74faf2d7cda0d15a | [
"MIT"
] | 64 | 2015-11-06T21:03:18.000Z | 2022-03-24T00:55:27.000Z | scripts/tfloc_summary.py | lldelisle/bx-python | 19ab41e0905221e3fcaaed4b74faf2d7cda0d15a | [
"MIT"
] | 60 | 2015-10-05T19:19:36.000Z | 2021-11-19T20:53:54.000Z | #!/usr/bin/env python
"""
Read TFLOC output from stdin and write out a summary in which the nth line
contains the number of sites found in the nth alignment of the input.
TODO: This is very special case, should it be here?
"""
import sys
from collections import defaultdict
counts = defaultdict(int)
max_index = -1
for line in sys.stdin:
if line[0].isdigit():
current_index = int(line)
max_index = max(current_index, max_index)
elif line[0] == "'":
counts[current_index] += 1
else:
raise ValueError("Invalid input line " + line)
for i in range(max_index + 1):
print(counts.get(i, 0))
| 22.821429 | 74 | 0.674491 | 100 | 639 | 4.24 | 0.59 | 0.075472 | 0.042453 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012146 | 0.226917 | 639 | 27 | 75 | 23.666667 | 0.846154 | 0.341158 | 0 | 0 | 0 | 0 | 0.048426 | 0 | 0 | 0 | 0 | 0.037037 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0.071429 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
421c88021499b88620b09442779453fef21cf565 | 1,212 | py | Python | task_manager/users/forms.py | Ritesh-Aggarwal/Task-Manager-Django | b8f8df10b0b0a9cc9cd27346a0b5d4d5892d2f24 | [
"MIT"
] | null | null | null | task_manager/users/forms.py | Ritesh-Aggarwal/Task-Manager-Django | b8f8df10b0b0a9cc9cd27346a0b5d4d5892d2f24 | [
"MIT"
] | null | null | null | task_manager/users/forms.py | Ritesh-Aggarwal/Task-Manager-Django | b8f8df10b0b0a9cc9cd27346a0b5d4d5892d2f24 | [
"MIT"
] | null | null | null | from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import (
AuthenticationForm,
UserCreationForm,
UsernameField,
)
User = get_user_model()
class UserLoginForm(AuthenticationForm):
def __init__(self, *args, **kwargs):
super(UserLoginForm, self).__init__(*args, **kwargs)
username = UsernameField(widget=forms.TextInput(
attrs={'class': 'bg-gray-100 rounded-lg p-2'}))
password = forms.CharField(widget=forms.PasswordInput(
attrs={
'class': 'bg-gray-100 rounded-lg p-2',
}
))
class UserSignUpForm(UserCreationForm):
def __init__(self, *args, **kwargs):
super(UserSignUpForm, self).__init__(*args, **kwargs)
username = forms.CharField(
widget=forms.TextInput(attrs={"class": "bg-gray-100 rounded-lg p-2"})
)
password1 = forms.CharField(
widget=forms.PasswordInput(
attrs={
"class": "bg-gray-100 rounded-lg p-2",
}
)
)
password2 = forms.CharField(
widget=forms.PasswordInput(
attrs={
"class": "bg-gray-100 rounded-lg p-2",
}
)
)
| 26.347826 | 77 | 0.605611 | 127 | 1,212 | 5.622047 | 0.291339 | 0.077031 | 0.084034 | 0.112045 | 0.571429 | 0.498599 | 0.42577 | 0.42577 | 0.42577 | 0.42577 | 0 | 0.024691 | 0.264851 | 1,212 | 45 | 78 | 26.933333 | 0.776655 | 0 | 0 | 0.236842 | 0 | 0 | 0.127888 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0.131579 | 0.078947 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
4222c98b7de332bf9b4c1cc8bba790b9eea99314 | 1,021 | py | Python | wiiu.py | RN-JK/UBIART-Texture-Decoder | 71e190c12b1b8813dcda1f26cd115d9f89cc7619 | [
"MIT"
] | null | null | null | wiiu.py | RN-JK/UBIART-Texture-Decoder | 71e190c12b1b8813dcda1f26cd115d9f89cc7619 | [
"MIT"
] | null | null | null | wiiu.py | RN-JK/UBIART-Texture-Decoder | 71e190c12b1b8813dcda1f26cd115d9f89cc7619 | [
"MIT"
] | 1 | 2021-11-29T05:57:55.000Z | 2021-11-29T05:57:55.000Z | import os, glob
try:
os.mkdir("output")
except:
pass
wiiudir="input/wiiu"
try:
os.makedirs(wiiudir)
print('The directories have been made.')
input('Insert your textures in input/wiiu and then run the tool again to convert it.')
except:
pass
dir = 'input/temp'
try:
os.makedirs(dir)
except:
pass
try:
for ckdtextures in os.listdir(wiiudir):
with open(wiiudir+'/'+ckdtextures,'rb') as f:
f.read(44)
data = f.read()
dds=open('input/temp/'+ckdtextures.replace('.tga.ckd','.gtx').replace('.png.ckd','.gtx'),'wb')
dds.write(data)
dds.close()
except:
pass
try:
for gtx in os.listdir(dir):
print('making '+gtx.replace(".gtx","")+'...')
os.system("texconv2 -i input/temp/"+gtx+" -o output/"+gtx.replace(".gtx",".dds"))
except:
pass
filelist = glob.glob(os.path.join(dir, "*"))
for f in filelist:
os.remove(f)
os.rmdir(dir) | 18.563636 | 103 | 0.5524 | 134 | 1,021 | 4.208955 | 0.462687 | 0.088652 | 0.046099 | 0.056738 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004087 | 0.281097 | 1,021 | 55 | 104 | 18.563636 | 0.764305 | 0 | 0 | 0.405405 | 0 | 0 | 0.238636 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.135135 | 0.027027 | 0 | 0.027027 | 0.054054 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
4224f59023f612daa74db320160910b42cc05439 | 3,897 | py | Python | push-package.py | OpenTrustGroup/scripts | 31ca2ca5bae055113c6f92a2eb75b0c7528902b3 | [
"BSD-3-Clause"
] | null | null | null | push-package.py | OpenTrustGroup/scripts | 31ca2ca5bae055113c6f92a2eb75b0c7528902b3 | [
"BSD-3-Clause"
] | null | null | null | push-package.py | OpenTrustGroup/scripts | 31ca2ca5bae055113c6f92a2eb75b0c7528902b3 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import errno
import json
import os
import subprocess
import sys
import tempfile
DEFAULT_DST_ROOT = '/system'
DEFAULT_OUT_DIR = 'out/debug-x64'
def netaddr_cmd(out_dir, device):
path = os.path.join(out_dir, '../build-zircon/tools/netaddr')
command = [
path,
'--fuchsia',
device,
]
return command
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def parse_package_manifest(paths, dst_root):
data = []
for path in paths:
with open(path) as package_manifest:
for line in package_manifest:
items = line.rstrip().split('=')
if len(items) != 2:
raise ValueError('Malformed manifest entry: ' + line)
dst = os.path.join(dst_root, items[0].lstrip('/'))
src = items[1]
data.append([dst, src])
return data
def update_device(device, batch_file, verbose, out_dir):
ssh_config_path = os.path.join(out_dir, 'ssh-keys', 'ssh_config')
try:
netaddr = netaddr_cmd(out_dir, device)
ipv6 = '[' + subprocess.check_output(netaddr).strip() + ']'
except subprocess.CalledProcessError:
# netaddr prints its own errors, no need to add another one here.
return 1
with open(os.devnull, 'w') as devnull:
status = subprocess.call(
['sftp', '-F', ssh_config_path, '-b', batch_file, ipv6],
stdout=sys.stdout if verbose else devnull)
if status != 0:
print >> sys.stderr, 'error: sftp failed'
return status
def scp_everything(devices, package_data, out_dir, name_filter, verbose):
# Temporary file for sftp
count = 0
with tempfile.NamedTemporaryFile() as f:
# Create a directory tree that mirrors what we want on the device.
for entry in package_data:
dst_path = entry[0]
src_path = entry[1]
if name_filter is not None and name_filter not in os.path.basename(
dst_path):
continue
# must "rm" the file first because memfs requires it
print >> f, '-rm %s' % dst_path
print >> f, 'put -P %s %s' % (src_path, dst_path)
count += 1
f.flush()
for device in devices:
if update_device(device, f.name, verbose, out_dir) == 0:
print 'Updated %d files on "%s".' % (count, device)
else:
print 'Update FAILED on "%s"' % device
return 0
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'package_files',
nargs='+',
help='Files containing manifest data. For example, ' \
'(e.g. out/debug-x64/package/modular*/system_manifest)')
parser.add_argument('-d', '--device', default=[':'], help='Device to update')
parser.add_argument(
'-o',
'--out-dir',
metavar='DIR',
default=DEFAULT_OUT_DIR,
help='Directory containing build products')
parser.add_argument(
'-t',
'--dst-root',
metavar='PATH',
default=DEFAULT_DST_ROOT,
help='Path on device to the directory to copy package products')
parser.add_argument(
'-f',
'--filter',
metavar='FILTER',
help='Push products with a name that contains FILTER')
parser.add_argument(
'-v', '--verbose', action='store_true', help='Display copy filenames')
args = parser.parse_args()
out_dir = args.out_dir or DEFAULT_OUT_DIR
dst_root = args.dst_root or DEFAULT_DST_ROOT
name_filter = args.filter
verbose = args.verbose
package_data = parse_package_manifest(args.package_files, dst_root)
return scp_everything(args.device, package_data, out_dir, name_filter,
verbose)
if __name__ == '__main__':
sys.exit(main())
| 26.691781 | 79 | 0.647164 | 535 | 3,897 | 4.560748 | 0.353271 | 0.034426 | 0.041803 | 0.013115 | 0.062295 | 0.044262 | 0.027869 | 0 | 0 | 0 | 0 | 0.0077 | 0.233513 | 3,897 | 145 | 80 | 26.875862 | 0.809173 | 0.100334 | 0 | 0.084112 | 0 | 0 | 0.162997 | 0.021733 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.009346 | 0.065421 | null | null | 0.046729 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
422eaaa92344214317cacbe394deaa82d7096b9d | 6,552 | py | Python | endpoints/v2/errors.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 2,027 | 2019-11-12T18:05:48.000Z | 2022-03-31T22:25:04.000Z | endpoints/v2/errors.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 496 | 2019-11-12T18:13:37.000Z | 2022-03-31T10:43:45.000Z | endpoints/v2/errors.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 249 | 2019-11-12T18:02:27.000Z | 2022-03-22T12:19:19.000Z | import bitmath
class V2RegistryException(Exception):
def __init__(
self,
error_code_str,
message,
detail,
http_status_code=400,
repository=None,
scopes=None,
is_read_only=False,
):
super(V2RegistryException, self).__init__(message)
self.http_status_code = http_status_code
self.repository = repository
self.scopes = scopes
self.is_read_only = is_read_only
self._error_code_str = error_code_str
self._detail = detail
def as_dict(self):
error_dict = {
"code": self._error_code_str,
"message": str(self),
"detail": self._detail if self._detail is not None else {},
}
if self.is_read_only:
error_dict["is_readonly"] = True
return error_dict
class BlobUnknown(V2RegistryException):
def __init__(self, detail=None):
super(BlobUnknown, self).__init__("BLOB_UNKNOWN", "blob unknown to registry", detail, 404)
class BlobUploadInvalid(V2RegistryException):
def __init__(self, detail=None):
super(BlobUploadInvalid, self).__init__(
"BLOB_UPLOAD_INVALID", "blob upload invalid", detail
)
class BlobUploadUnknown(V2RegistryException):
def __init__(self, detail=None):
super(BlobUploadUnknown, self).__init__(
"BLOB_UPLOAD_UNKNOWN", "blob upload unknown to registry", detail, 404
)
class DigestInvalid(V2RegistryException):
def __init__(self, detail=None):
super(DigestInvalid, self).__init__(
"DIGEST_INVALID", "provided digest did not match uploaded content", detail
)
class ManifestBlobUnknown(V2RegistryException):
def __init__(self, detail=None):
super(ManifestBlobUnknown, self).__init__(
"MANIFEST_BLOB_UNKNOWN", "manifest blob unknown to registry", detail
)
class ManifestInvalid(V2RegistryException):
def __init__(self, detail=None, http_status_code=400):
super(ManifestInvalid, self).__init__(
"MANIFEST_INVALID", "manifest invalid", detail, http_status_code
)
class ManifestUnknown(V2RegistryException):
def __init__(self, detail=None):
super(ManifestUnknown, self).__init__("MANIFEST_UNKNOWN", "manifest unknown", detail, 404)
class TagExpired(V2RegistryException):
def __init__(self, message=None, detail=None):
super(TagExpired, self).__init__("TAG_EXPIRED", message or "Tag has expired", detail, 404)
class ManifestUnverified(V2RegistryException):
def __init__(self, detail=None):
super(ManifestUnverified, self).__init__(
"MANIFEST_UNVERIFIED", "manifest failed signature verification", detail
)
class NameInvalid(V2RegistryException):
def __init__(self, detail=None, message=None):
super(NameInvalid, self).__init__(
"NAME_INVALID", message or "invalid repository name", detail
)
class NameUnknown(V2RegistryException):
def __init__(self, detail=None):
super(NameUnknown, self).__init__(
"NAME_UNKNOWN", "repository name not known to registry", detail, 404
)
class SizeInvalid(V2RegistryException):
def __init__(self, detail=None):
super(SizeInvalid, self).__init__(
"SIZE_INVALID", "provided length did not match content length", detail
)
class TagAlreadyExists(V2RegistryException):
def __init__(self, detail=None):
super(TagAlreadyExists, self).__init__(
"TAG_ALREADY_EXISTS", "tag was already pushed", detail, 409
)
class TagInvalid(V2RegistryException):
def __init__(self, detail=None):
super(TagInvalid, self).__init__("TAG_INVALID", "manifest tag did not match URI", detail)
class LayerTooLarge(V2RegistryException):
def __init__(self, uploaded=None, max_allowed=None):
detail = {}
message = "Uploaded blob is larger than allowed by this registry"
if uploaded is not None and max_allowed is not None:
detail = {
"reason": "%s is greater than maximum allowed size %s" % (uploaded, max_allowed),
"max_allowed": max_allowed,
"uploaded": uploaded,
}
up_str = bitmath.Byte(uploaded).best_prefix().format("{value:.2f} {unit}")
max_str = bitmath.Byte(max_allowed).best_prefix().format("{value:.2f} {unit}")
message = "Uploaded blob of %s is larger than %s allowed by this registry" % (
up_str,
max_str,
)
class Unauthorized(V2RegistryException):
def __init__(self, detail=None, repository=None, scopes=None):
super(Unauthorized, self).__init__(
"UNAUTHORIZED",
"access to the requested resource is not authorized",
detail,
401,
repository=repository,
scopes=scopes,
)
class Unsupported(V2RegistryException):
def __init__(self, detail=None, message=None):
super(Unsupported, self).__init__(
"UNSUPPORTED", message or "The operation is unsupported.", detail, 405
)
class InvalidLogin(V2RegistryException):
def __init__(self, message=None):
super(InvalidLogin, self).__init__(
"UNAUTHORIZED", message or "Specified credentials are invalid", {}, 401
)
class InvalidRequest(V2RegistryException):
def __init__(self, message=None):
super(InvalidRequest, self).__init__(
"INVALID_REQUEST", message or "Invalid request", {}, 400
)
class NamespaceDisabled(V2RegistryException):
def __init__(self, message=None):
message = message or "This namespace is disabled. Please contact your system administrator."
super(NamespaceDisabled, self).__init__("DENIED", message, {}, 405)
class BlobDownloadGeoBlocked(V2RegistryException):
def __init__(self, detail=None):
message = (
"The region from which you are pulling has been geo-ip blocked. "
+ "Please contact the namespace owner."
)
super(BlobDownloadGeoBlocked, self).__init__("DENIED", message, detail, 403)
class ReadOnlyMode(V2RegistryException):
def __init__(self, detail=None):
message = (
"System is currently read-only. Pulls will succeed but all write operations "
+ "are currently suspended."
)
super(ReadOnlyMode, self).__init__("DENIED", message, detail, 405, is_read_only=True)
| 32.435644 | 100 | 0.654609 | 675 | 6,552 | 5.985185 | 0.216296 | 0.039851 | 0.062624 | 0.163366 | 0.303465 | 0.265099 | 0.196287 | 0.027723 | 0.027723 | 0 | 0 | 0.014419 | 0.248474 | 6,552 | 201 | 101 | 32.597015 | 0.806052 | 0 | 0 | 0.147651 | 0 | 0 | 0.200397 | 0.003205 | 0 | 0 | 0 | 0 | 0 | 1 | 0.161074 | false | 0 | 0.006711 | 0 | 0.328859 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
423075718e222b99f83bdb4ab73a14063da9d0ee | 37,354 | py | Python | ui/staff.py | AryaStarkSakura/Stylized-Neural-Painting | 0502c9f12eb582fe2ebd0ffdc7008dc81cefa74c | [
"CC0-1.0"
] | null | null | null | ui/staff.py | AryaStarkSakura/Stylized-Neural-Painting | 0502c9f12eb582fe2ebd0ffdc7008dc81cefa74c | [
"CC0-1.0"
] | null | null | null | ui/staff.py | AryaStarkSakura/Stylized-Neural-Painting | 0502c9f12eb582fe2ebd0ffdc7008dc81cefa74c | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'staff.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
MainWindow.setStyleSheet("QListWidget, QListView, QTreeWidget, QTreeView,QFrame {\n"
" outline: 0px;\n"
"}\n"
"/*设置左侧选项的最小最大宽度,文字颜色和背景颜色*/\n"
"QListWidget {\n"
" min-width: 200px;\n"
" max-width: 200px;\n"
" color: white;\n"
" background-color:#2f4050\n"
"}\n"
"#head\n"
"{\n"
"background:white;\n"
"border-radius:30px;\n"
"}\n"
"#head_2\n"
"{\n"
"background:#CCFFCC;\n"
"border:1px solid;\n"
"border-color:#CCFFCC;\n"
"border-radius:60px;\n"
"}\n"
"#Search\n"
"{\n"
"border-radius:5px;\n"
"background:#293846;\n"
"border:0.5px solid;\n"
"border-color:white;\n"
"\n"
"}\n"
"QListWidget::item\n"
"{\n"
"height:60;\n"
"background-color:#293846;\n"
"}\n"
"#frame\n"
"{\n"
"background-color:#2f4050\n"
"\n"
"}\n"
"/*被选中时的背景颜色和左边框颜色*/\n"
"QListWidget::item:selected {\n"
" background: rgb(52, 52, 52);\n"
" border-left: 2px solid rgb(9, 187, 7);\n"
"}\n"
"/*鼠标悬停颜色*/\n"
"HistoryPanel::item:hover {\n"
" background: rgb(52, 52, 52);\n"
"}\n"
"/*右侧的层叠窗口的背景颜色*/\n"
"QStackedWidget {\n"
" background: white;\n"
"}\n"
"/*模拟的页面*/\n"
"#frame > QLabel\n"
"{\n"
"color:white;\n"
"}\n"
"#frame_2\n"
"{\n"
"background-color:#CCFFCC;\n"
"}\n"
"#page_2 > QLineEdit,QDateEdit\n"
"{\n"
"border-radius:5px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#6699CC;\n"
"}\n"
"#page_4 > QLineEdit\n"
"{\n"
"border-radius:5px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#6699CC;\n"
"}\n"
"QLineEdit\n"
"{\n"
"border-radius:5px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#6699CC;\n"
"}\n"
"\n"
"\n"
"")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget)
self.stackedWidget.setGeometry(QtCore.QRect(190, 0, 611, 601))
self.stackedWidget.setStyleSheet("background-color:#FFFFFF\n"
"")
self.stackedWidget.setObjectName("stackedWidget")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.split = QtWidgets.QFrame(self.page)
self.split.setGeometry(QtCore.QRect(10, 210, 600, 2))
self.split.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split.setFrameShape(QtWidgets.QFrame.HLine)
self.split.setFrameShadow(QtWidgets.QFrame.Raised)
self.split.setObjectName("split")
self.head_2 = QtWidgets.QToolButton(self.page)
self.head_2.setGeometry(QtCore.QRect(260, 30, 121, 121))
self.head_2.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("./pictures/staff3.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.head_2.setIcon(icon)
self.head_2.setIconSize(QtCore.QSize(100, 100))
self.head_2.setObjectName("head_2")
self.name = QtWidgets.QLabel(self.page)
self.name.setGeometry(QtCore.QRect(260, 160, 131, 31))
self.name.setAlignment(QtCore.Qt.AlignCenter)
self.name.setObjectName("name")
self.label = QtWidgets.QLabel(self.page)
self.label.setGeometry(QtCore.QRect(190, 240, 61, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label.setFont(font)
self.label.setObjectName("label")
self.label_3 = QtWidgets.QLabel(self.page)
self.label_3.setGeometry(QtCore.QRect(190, 290, 51, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.page)
self.label_4.setGeometry(QtCore.QRect(190, 340, 71, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.page)
self.label_5.setGeometry(QtCore.QRect(190, 390, 61, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.page)
self.label_6.setGeometry(QtCore.QRect(190, 440, 71, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.label_7 = QtWidgets.QLabel(self.page)
self.label_7.setGeometry(QtCore.QRect(190, 490, 81, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.sname = QtWidgets.QLabel(self.page)
self.sname.setGeometry(QtCore.QRect(300, 250, 131, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sname.setFont(font)
self.sname.setObjectName("sname")
self.ssex = QtWidgets.QLabel(self.page)
self.ssex.setGeometry(QtCore.QRect(300, 300, 81, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.ssex.setFont(font)
self.ssex.setObjectName("ssex")
self.stime = QtWidgets.QLabel(self.page)
self.stime.setGeometry(QtCore.QRect(300, 350, 91, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.stime.setFont(font)
self.stime.setObjectName("stime")
self.srole = QtWidgets.QLabel(self.page)
self.srole.setGeometry(QtCore.QRect(300, 400, 81, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.srole.setFont(font)
self.srole.setObjectName("srole")
self.sphone = QtWidgets.QLabel(self.page)
self.sphone.setGeometry(QtCore.QRect(300, 450, 141, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sphone.setFont(font)
self.sphone.setObjectName("sphone")
self.sidcard = QtWidgets.QLabel(self.page)
self.sidcard.setGeometry(QtCore.QRect(300, 500, 181, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sidcard.setFont(font)
self.sidcard.setObjectName("sidcard")
self.label_8 = QtWidgets.QLabel(self.page)
self.label_8.setGeometry(QtCore.QRect(190, 540, 81, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.sidcard_2 = QtWidgets.QLabel(self.page)
self.sidcard_2.setGeometry(QtCore.QRect(300, 550, 181, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sidcard_2.setFont(font)
self.sidcard_2.setObjectName("sidcard_2")
self.stackedWidget.addWidget(self.page)
self.page_3 = QtWidgets.QWidget()
self.page_3.setObjectName("page_3")
self.searchTable = QtWidgets.QTableWidget(self.page_3)
self.searchTable.setGeometry(QtCore.QRect(0, 240, 611, 361))
self.searchTable.setStyleSheet("")
self.searchTable.setObjectName("searchTable")
self.searchTable.setColumnCount(9)
self.searchTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(8, item)
self.frame_2 = QtWidgets.QFrame(self.page_3)
self.frame_2.setGeometry(QtCore.QRect(10, 30, 611, 211))
self.frame_2.setStyleSheet("background-color:rgb(255, 249, 246)")
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.searchName = QtWidgets.QLineEdit(self.frame_2)
self.searchName.setGeometry(QtCore.QRect(170, 40, 181, 41))
self.searchName.setStyleSheet("border-radius:10px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#CCCCFF;\n"
"")
self.searchName.setObjectName("searchName")
self.searchNB = QtWidgets.QToolButton(self.frame_2)
self.searchNB.setGeometry(QtCore.QRect(370, 40, 101, 41))
self.searchNB.setStyleSheet("background-color:rgb(255, 249, 246);\n"
"border:0px;\n"
"\n"
"border-radius:5px")
self.searchNB.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("./pictures/search.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.searchNB.setIcon(icon1)
self.searchNB.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.searchNB.setObjectName("searchNB")
self.label_74 = QtWidgets.QLabel(self.frame_2)
self.label_74.setGeometry(QtCore.QRect(310, 149, 151, 40))
self.label_74.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_74.setObjectName("label_74")
self.modifyvalue = QtWidgets.QLineEdit(self.frame_2)
self.modifyvalue.setGeometry(QtCore.QRect(430, 160, 111, 21))
self.modifyvalue.setStyleSheet("border-radius:5px")
self.modifyvalue.setText("")
self.modifyvalue.setObjectName("modifyvalue")
self.commitTableModify = QtWidgets.QPushButton(self.frame_2)
self.commitTableModify.setGeometry(QtCore.QRect(170, 155, 121, 31))
self.commitTableModify.setStyleSheet("#commitTableModify{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitTableModify:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitTableModify.setObjectName("commitTableModify")
self.label_78 = QtWidgets.QLabel(self.frame_2)
self.label_78.setGeometry(QtCore.QRect(360, 10, 231, 21))
font = QtGui.QFont()
font.setPointSize(8)
self.label_78.setFont(font)
self.label_78.setObjectName("label_78")
self.commitTableDel = QtWidgets.QPushButton(self.frame_2)
self.commitTableDel.setGeometry(QtCore.QRect(170, 110, 121, 31))
self.commitTableDel.setStyleSheet("#commitTableDel{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitTableDel:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitTableDel.setObjectName("commitTableDel")
self.split_3 = QtWidgets.QFrame(self.page_3)
self.split_3.setGeometry(QtCore.QRect(10, 30, 600, 2))
self.split_3.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split_3.setFrameShape(QtWidgets.QFrame.HLine)
self.split_3.setFrameShadow(QtWidgets.QFrame.Raised)
self.split_3.setObjectName("split_3")
self.toolButton_2 = QtWidgets.QToolButton(self.page_3)
self.toolButton_2.setGeometry(QtCore.QRect(20, 0, 101, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.toolButton_2.setFont(font)
self.toolButton_2.setStyleSheet("border:none")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap("./pictures/search1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_2.setIcon(icon2)
self.toolButton_2.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_2.setObjectName("toolButton_2")
self.line = QtWidgets.QFrame(self.page_3)
self.line.setGeometry(QtCore.QRect(10, 230, 601, 16))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.stackedWidget.addWidget(self.page_3)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.label_9 = QtWidgets.QLabel(self.page_2)
self.label_9.setGeometry(QtCore.QRect(100, 60, 101, 40))
self.label_9.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_9.setObjectName("label_9")
self.split_2 = QtWidgets.QFrame(self.page_2)
self.split_2.setGeometry(QtCore.QRect(10, 30, 600, 2))
self.split_2.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split_2.setFrameShape(QtWidgets.QFrame.HLine)
self.split_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.split_2.setObjectName("split_2")
self.label_10 = QtWidgets.QLabel(self.page_2)
self.label_10.setGeometry(QtCore.QRect(100, 260, 101, 41))
self.label_10.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.page_2)
self.label_11.setGeometry(QtCore.QRect(100, 110, 101, 41))
self.label_11.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(self.page_2)
self.label_12.setGeometry(QtCore.QRect(100, 310, 101, 41))
self.label_12.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_12.setObjectName("label_12")
self.label_13 = QtWidgets.QLabel(self.page_2)
self.label_13.setGeometry(QtCore.QRect(100, 160, 101, 41))
self.label_13.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_13.setObjectName("label_13")
self.label_14 = QtWidgets.QLabel(self.page_2)
self.label_14.setGeometry(QtCore.QRect(100, 360, 101, 41))
self.label_14.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_14.setObjectName("label_14")
self.label_15 = QtWidgets.QLabel(self.page_2)
self.label_15.setGeometry(QtCore.QRect(100, 210, 101, 41))
self.label_15.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_15.setObjectName("label_15")
self.label_16 = QtWidgets.QLabel(self.page_2)
self.label_16.setGeometry(QtCore.QRect(100, 410, 101, 41))
self.label_16.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_16.setObjectName("label_16")
self.label_17 = QtWidgets.QLabel(self.page_2)
self.label_17.setGeometry(QtCore.QRect(100, 460, 101, 41))
self.label_17.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_17.setObjectName("label_17")
self.inputsid = QtWidgets.QLineEdit(self.page_2)
self.inputsid.setGeometry(QtCore.QRect(220, 70, 221, 21))
self.inputsid.setObjectName("inputsid")
self.inputname = QtWidgets.QLineEdit(self.page_2)
self.inputname.setGeometry(QtCore.QRect(220, 120, 221, 21))
self.inputname.setObjectName("inputname")
self.inputuser = QtWidgets.QLineEdit(self.page_2)
self.inputuser.setGeometry(QtCore.QRect(220, 270, 221, 21))
self.inputuser.setObjectName("inputuser")
self.inputpwd = QtWidgets.QLineEdit(self.page_2)
self.inputpwd.setGeometry(QtCore.QRect(220, 320, 221, 21))
self.inputpwd.setObjectName("inputpwd")
self.inputrole = QtWidgets.QLineEdit(self.page_2)
self.inputrole.setGeometry(QtCore.QRect(220, 370, 221, 21))
self.inputrole.setObjectName("inputrole")
self.inputidcard = QtWidgets.QLineEdit(self.page_2)
self.inputidcard.setGeometry(QtCore.QRect(220, 420, 221, 21))
self.inputidcard.setObjectName("inputidcard")
self.inputphone = QtWidgets.QLineEdit(self.page_2)
self.inputphone.setGeometry(QtCore.QRect(220, 470, 221, 21))
self.inputphone.setObjectName("inputphone")
self.toolButton_3 = QtWidgets.QToolButton(self.page_2)
self.toolButton_3.setGeometry(QtCore.QRect(20, 0, 111, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.toolButton_3.setFont(font)
self.toolButton_3.setStyleSheet("border:none\n"
"")
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap("./pictures/insert.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_3.setIcon(icon3)
self.toolButton_3.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_3.setObjectName("toolButton_3")
self.commitAdd = QtWidgets.QPushButton(self.page_2)
self.commitAdd.setGeometry(QtCore.QRect(200, 530, 211, 31))
self.commitAdd.setStyleSheet("#commitAdd{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitAdd:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitAdd.setObjectName("commitAdd")
self.inputdate = QtWidgets.QDateEdit(self.page_2)
self.inputdate.setGeometry(QtCore.QRect(220, 220, 221, 22))
self.inputdate.setDateTime(QtCore.QDateTime(QtCore.QDate(2020, 1, 1), QtCore.QTime(0, 0, 0)))
self.inputdate.setObjectName("inputdate")
self.inputfemale = QtWidgets.QRadioButton(self.page_2)
self.inputfemale.setGeometry(QtCore.QRect(320, 170, 115, 19))
self.inputfemale.setObjectName("inputfemale")
self.inputmale = QtWidgets.QRadioButton(self.page_2)
self.inputmale.setGeometry(QtCore.QRect(220, 170, 81, 19))
self.inputmale.setObjectName("inputmale")
self.stackedWidget.addWidget(self.page_2)
self.page_4 = QtWidgets.QWidget()
self.page_4.setObjectName("page_4")
self.split_4 = QtWidgets.QFrame(self.page_4)
self.split_4.setGeometry(QtCore.QRect(10, 30, 600, 2))
self.split_4.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split_4.setFrameShape(QtWidgets.QFrame.HLine)
self.split_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.split_4.setObjectName("split_4")
self.toolButton_4 = QtWidgets.QToolButton(self.page_4)
self.toolButton_4.setGeometry(QtCore.QRect(20, 0, 111, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.toolButton_4.setFont(font)
self.toolButton_4.setStyleSheet("border:none\n"
"")
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap("./pictures/delete.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_4.setIcon(icon4)
self.toolButton_4.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_4.setObjectName("toolButton_4")
self.deleteTable = QtWidgets.QTableWidget(self.page_4)
self.deleteTable.setGeometry(QtCore.QRect(10, 260, 601, 341))
self.deleteTable.setStyleSheet("")
self.deleteTable.setObjectName("deleteTable")
self.deleteTable.setColumnCount(9)
self.deleteTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(8, item)
self.desid = QtWidgets.QLineEdit(self.page_4)
self.desid.setGeometry(QtCore.QRect(250, 90, 221, 21))
self.desid.setObjectName("desid")
self.label_18 = QtWidgets.QLabel(self.page_4)
self.label_18.setGeometry(QtCore.QRect(150, 80, 91, 40))
self.label_18.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_18.setObjectName("label_18")
self.dename = QtWidgets.QLineEdit(self.page_4)
self.dename.setGeometry(QtCore.QRect(250, 130, 221, 21))
self.dename.setObjectName("dename")
self.label_19 = QtWidgets.QLabel(self.page_4)
self.label_19.setGeometry(QtCore.QRect(150, 120, 91, 41))
self.label_19.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_19.setObjectName("label_19")
self.deidcard = QtWidgets.QLineEdit(self.page_4)
self.deidcard.setGeometry(QtCore.QRect(250, 170, 221, 21))
self.deidcard.setObjectName("deidcard")
self.label_20 = QtWidgets.QLabel(self.page_4)
self.label_20.setGeometry(QtCore.QRect(150, 160, 81, 41))
self.label_20.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_20.setObjectName("label_20")
self.commitDe = QtWidgets.QPushButton(self.page_4)
self.commitDe.setGeometry(QtCore.QRect(240, 210, 93, 28))
self.commitDe.setStyleSheet("#commitDe{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitDe:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitDe.setObjectName("commitDe")
self.label_21 = QtWidgets.QLabel(self.page_4)
self.label_21.setGeometry(QtCore.QRect(210, 35, 211, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.stackedWidget.addWidget(self.page_4)
self.listWidget = QtWidgets.QListWidget(self.centralwidget)
self.listWidget.setGeometry(QtCore.QRect(0, 200, 204, 400))
self.listWidget.setObjectName("listWidget")
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap("./pictures/staff5.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon5)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap("./pictures/staff2.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon6)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap("./pictures/staff4.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon7)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
item.setIcon(icon5)
self.listWidget.addItem(item)
self.frame = QtWidgets.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(0, 0, 204, 211))
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.head = QtWidgets.QToolButton(self.frame)
self.head.setGeometry(QtCore.QRect(60, 20, 60, 60))
self.head.setText("")
self.head.setIcon(icon)
self.head.setIconSize(QtCore.QSize(60, 60))
self.head.setObjectName("head")
self.welcome = QtWidgets.QLabel(self.frame)
self.welcome.setGeometry(QtCore.QRect(30, 90, 110, 20))
self.welcome.setText("")
self.welcome.setAlignment(QtCore.Qt.AlignCenter)
self.welcome.setObjectName("welcome")
self.label_2 = QtWidgets.QLabel(self.frame)
self.label_2.setGeometry(QtCore.QRect(40, 140, 121, 16))
font = QtGui.QFont()
font.setPointSize(8)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.Search = QtWidgets.QLineEdit(self.frame)
self.Search.setGeometry(QtCore.QRect(20, 170, 145, 25))
font = QtGui.QFont()
font.setFamily("微软雅黑")
font.setPointSize(7)
self.Search.setFont(font)
self.Search.setStyleSheet("")
self.Search.setObjectName("Search")
self.toolButton = QtWidgets.QToolButton(self.frame)
self.toolButton.setGeometry(QtCore.QRect(170, 170, 21, 20))
self.toolButton.setStyleSheet("background-color:#2f4050;\n"
"border:0px;\n"
"\n"
"border-radius:5px")
self.toolButton.setText("")
self.toolButton.setIcon(icon1)
self.toolButton.setIconSize(QtCore.QSize(15, 15))
self.toolButton.setObjectName("toolButton")
self.role = QtWidgets.QLabel(self.frame)
self.role.setGeometry(QtCore.QRect(30, 120, 110, 15))
font = QtGui.QFont()
font.setPointSize(7)
self.role.setFont(font)
self.role.setText("")
self.role.setAlignment(QtCore.Qt.AlignCenter)
self.role.setObjectName("role")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.stackedWidget.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.name.setText(_translate("MainWindow", "csa "))
self.label.setText(_translate("MainWindow", "姓名:"))
self.label_3.setText(_translate("MainWindow", "性别:"))
self.label_4.setText(_translate("MainWindow", "申请时间:"))
self.label_5.setText(_translate("MainWindow", "权限:"))
self.label_6.setText(_translate("MainWindow", "手机号:"))
self.label_7.setText(_translate("MainWindow", "身份证号:"))
self.sname.setText(_translate("MainWindow", "邵嘉毅"))
self.ssex.setText(_translate("MainWindow", "男"))
self.stime.setText(_translate("MainWindow", "2019-12-12"))
self.srole.setText(_translate("MainWindow", "1"))
self.sphone.setText(_translate("MainWindow", "2332121323"))
self.sidcard.setText(_translate("MainWindow", "1111111111111111111"))
self.label_8.setText(_translate("MainWindow", "用户号:"))
self.sidcard_2.setText(_translate("MainWindow", "1"))
item = self.searchTable.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "用户编号"))
item = self.searchTable.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "姓名"))
item = self.searchTable.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "性别"))
item = self.searchTable.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "登记申请时间"))
item = self.searchTable.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "账户名"))
item = self.searchTable.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "密码"))
item = self.searchTable.horizontalHeaderItem(6)
item.setText(_translate("MainWindow", "权限"))
item = self.searchTable.horizontalHeaderItem(7)
item.setText(_translate("MainWindow", "身份证号"))
item = self.searchTable.horizontalHeaderItem(8)
item.setText(_translate("MainWindow", "手机号"))
self.searchName.setPlaceholderText(_translate("MainWindow", "搜索用户姓名"))
self.label_74.setText(_translate("MainWindow", "选中部分修改为:"))
self.modifyvalue.setPlaceholderText(_translate("MainWindow", "修改值"))
self.commitTableModify.setText(_translate("MainWindow", "确认修改"))
self.label_78.setText(_translate("MainWindow", "*选中表格内可以进行修改和删除操作"))
self.commitTableDel.setText(_translate("MainWindow", "确认删除"))
self.toolButton_2.setText(_translate("MainWindow", "查询用户"))
self.label_9.setText(_translate("MainWindow", "用户编号:"))
self.label_10.setText(_translate("MainWindow", "账户名:"))
self.label_11.setText(_translate("MainWindow", "用户姓名:"))
self.label_12.setText(_translate("MainWindow", "密码:"))
self.label_13.setText(_translate("MainWindow", "用户性别:"))
self.label_14.setText(_translate("MainWindow", "权限:"))
self.label_15.setText(_translate("MainWindow", "登记入职时间:"))
self.label_16.setText(_translate("MainWindow", "身份证:"))
self.label_17.setText(_translate("MainWindow", "手机号:"))
self.inputsid.setPlaceholderText(_translate("MainWindow", "编号"))
self.inputname.setPlaceholderText(_translate("MainWindow", "姓名"))
self.inputuser.setPlaceholderText(_translate("MainWindow", "账号名"))
self.inputpwd.setPlaceholderText(_translate("MainWindow", "密码"))
self.inputrole.setPlaceholderText(_translate("MainWindow", "权限"))
self.inputidcard.setPlaceholderText(_translate("MainWindow", "身份证"))
self.inputphone.setPlaceholderText(_translate("MainWindow", "手机号"))
self.toolButton_3.setText(_translate("MainWindow", "增添用户"))
self.commitAdd.setText(_translate("MainWindow", "确认录入"))
self.inputfemale.setText(_translate("MainWindow", "女"))
self.inputmale.setText(_translate("MainWindow", "男"))
self.toolButton_4.setText(_translate("MainWindow", "删除用户"))
item = self.deleteTable.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "用户编号"))
item = self.deleteTable.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "姓名"))
item = self.deleteTable.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "性别"))
item = self.deleteTable.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "登记入职时间"))
item = self.deleteTable.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "账户名"))
item = self.deleteTable.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "密码"))
item = self.deleteTable.horizontalHeaderItem(6)
item.setText(_translate("MainWindow", "权限"))
item = self.deleteTable.horizontalHeaderItem(7)
item.setText(_translate("MainWindow", "身份证号"))
item = self.deleteTable.horizontalHeaderItem(8)
item.setText(_translate("MainWindow", "手机号"))
self.desid.setPlaceholderText(_translate("MainWindow", "编号"))
self.label_18.setText(_translate("MainWindow", "用户编号:"))
self.dename.setPlaceholderText(_translate("MainWindow", "姓名"))
self.label_19.setText(_translate("MainWindow", "用户姓名:"))
self.deidcard.setPlaceholderText(_translate("MainWindow", "身份证"))
self.label_20.setText(_translate("MainWindow", "身份证:"))
self.commitDe.setText(_translate("MainWindow", "确认删除"))
self.label_21.setText(_translate("MainWindow", "选择要删除的用户:"))
__sortingEnabled = self.listWidget.isSortingEnabled()
self.listWidget.setSortingEnabled(False)
item = self.listWidget.item(0)
item.setText(_translate("MainWindow", " 个人信息"))
item = self.listWidget.item(1)
item.setText(_translate("MainWindow", " 查询用户*"))
item = self.listWidget.item(2)
item.setText(_translate("MainWindow", " 增添用户*"))
item = self.listWidget.item(3)
item.setText(_translate("MainWindow", " 删除用户*"))
self.listWidget.setSortingEnabled(__sortingEnabled)
self.label_2.setText(_translate("MainWindow", "*表示需要最高权限"))
self.Search.setPlaceholderText(_translate("MainWindow", "搜索"))
| 46.750939 | 101 | 0.591235 | 3,742 | 37,354 | 5.810262 | 0.095671 | 0.047604 | 0.071843 | 0.02962 | 0.474519 | 0.396836 | 0.282219 | 0.18278 | 0.170454 | 0.110109 | 0 | 0.052539 | 0.282058 | 37,354 | 798 | 102 | 46.809524 | 0.758185 | 0.004792 | 0 | 0.311203 | 1 | 0 | 0.118489 | 0.032313 | 0 | 0 | 0 | 0 | 0 | 1 | 0.002766 | false | 0 | 0.001383 | 0 | 0.005533 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
423268278bdfbc38d38322d8349807e008e76abd | 1,262 | py | Python | sun.py | funxiun/AstroAlgorithms4Python | 98098956daba2706c993fa6370d8cdfa4013cb8d | [
"Unlicense"
] | 7 | 2018-09-29T11:35:40.000Z | 2022-01-11T14:06:44.000Z | sun.py | funxiun/AstroAlgorithms4Python | 98098956daba2706c993fa6370d8cdfa4013cb8d | [
"Unlicense"
] | null | null | null | sun.py | funxiun/AstroAlgorithms4Python | 98098956daba2706c993fa6370d8cdfa4013cb8d | [
"Unlicense"
] | 8 | 2018-09-29T11:36:01.000Z | 2021-10-17T15:25:55.000Z | '''Meeus: Astronomical Algorithms (2nd ed.), chapter 25'''
import math
from nutation_ecliptic import ecliptic
from constants import AU
def coordinates(jd):
'''equatorial coordinates of Sun'''
lon=math.radians(longitude(jd))
eps=math.radians(ecliptic(jd))
ra=math.degrees(math.atan2(math.cos(eps)*math.sin(lon),math.cos(lon)))
dec=math.degrees(math.asin(math.sin(eps)*math.sin(lon)))
return ra,dec
def longitude(jd):
'''longitude of Sun'''
T=(jd-2451545)/36525.
L=math.radians(280.46646+36000.76983*T+0.0003032*T**2)
M=math.radians(357.52911+35999.05029*T-0.0001537*T**2)
C=math.radians((1.914602-0.004817*T-0.000014*T**2)*math.sin(M)+(0.019993-0.000101*T)*math.sin(2*M)+0.000289*math.sin(3*M))
lon=L+C
return math.degrees(lon)
def distance(jd,km=True):
'''Earth-Sun distance in km'''
T=(jd-2451545)/36525.
e=0.016708634-0.000042037*T-0.0000001267*T**2
M=math.radians(357.52911+35999.05029*T-0.0001537*T**2)
C=math.radians((1.914602-0.004817*T-0.000014*T**2)*math.sin(M)+(0.019993-0.000101*T)*math.sin(2*M)+0.000289*math.sin(3*M))
nu=M+C
R=1.000001018*(1-e**2)/(1+e*math.cos(nu))
if km: R*=AU
return R
| 26.291667 | 126 | 0.62916 | 220 | 1,262 | 3.604545 | 0.322727 | 0.079445 | 0.037831 | 0.032787 | 0.320303 | 0.320303 | 0.320303 | 0.320303 | 0.320303 | 0.320303 | 0 | 0.233941 | 0.173534 | 1,262 | 47 | 127 | 26.851064 | 0.526366 | 0.098257 | 0 | 0.24 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12 | false | 0 | 0.12 | 0 | 0.36 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4233e43b1aa8c3735bfa71a29e6ebbf01825729f | 5,681 | py | Python | test/paths.py | cychitivav/kobuki_navigation | 9da1ad425b8804b49005720594e9837295eb9976 | [
"MIT"
] | null | null | null | test/paths.py | cychitivav/kobuki_navigation | 9da1ad425b8804b49005720594e9837295eb9976 | [
"MIT"
] | null | null | null | test/paths.py | cychitivav/kobuki_navigation | 9da1ad425b8804b49005720594e9837295eb9976 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import numpy as np
import cv2
from matplotlib import pyplot as plt
import networkx as nx
def rotate_image(image, angle):
image_center = tuple(np.array(image.shape[0:2]) / 2)
rot_mat = cv2.getRotationMatrix2D(image_center, angle, 1)
vertical = cv2.warpAffine(image, rot_mat, image.shape[0:2], flags=cv2.INTER_CUBIC)
im = vertical.copy()
for i in range(image.shape[0]):
for j in range(image.shape[1]):
if i < 100 or j < 100 or j > 924 or i > 924:
im[i,j] = 205
else:
neighbor = 0
if vertical[i+1,j] < 43.0:
neighbor += 1
if vertical[i-1,j] < 43.0:
neighbor += 1
if vertical[i+1,j-1] < 43.0:
neighbor += 1
if vertical[i+1,j+1] < 43.0:
neighbor += 1
if vertical[i-1,j+1] < 43.0:
neighbor += 1
if vertical[i-1,j-1] < 43.0:
neighbor += 1
if vertical[i,j+1] < 43.0:
neighbor += 1
if vertical[i,j-1] < 43.0:
neighbor += 1
if neighbor >= 5:
im[i,j] = 0
return im
if __name__ == "__main__":
image = cv2.imread('map/map.pgm', 0)
rotated = rotate_image(image, -7.66)
#cv2.imwrite('map/rotated.pgm', rotated)
_, th = cv2.threshold(rotated, 245, 255, cv2.THRESH_BINARY)
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (3,3))
op = cv2.morphologyEx(th, cv2.MORPH_OPEN, kernel)
skel = cv2.ximgproc.thinning(op)
plt.figure()
plt.subplot(1,3,1)
plt.imshow(image, cmap='gray')
plt.axis('off')
plt.title('Original')
plt.subplot(1,3,2)
plt.imshow(rotated, cmap='gray')
plt.axis('off')
plt.title('Rotada')
plt.subplot(1,3,3)
plt.imshow(skel, cmap='gray')
plt.axis('off')
plt.title('Adelgazada')
base = cv2.dilate(skel, None, iterations=12)
path = cv2.cvtColor(base, cv2.COLOR_GRAY2RGB)
corners = cv2.cornerHarris(skel,7,7,0.04)
corners = cv2.dilate(corners, None)
_, corners = cv2.threshold(corners,0.001,255,cv2.THRESH_BINARY)
corners = np.uint8(corners)
contours, _ = cv2.findContours(corners,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
path[corners>0.0]=[0,255,0]
cv2.drawContours(path,contours,-1,(255,0,0),1)
G = nx.Graph()
points = []
for i, c in enumerate(contours):
# calculate moments for each contour
M = cv2.moments(c)
# calculate x,y coordinate of center
cX = int(round(M["m10"] / M["m00"]))
cY = int(round(M["m01"] / M["m00"]))
path[cY,cX]=[0,0,255]
G.add_node(i, pos=(cX,cY))
points.append((cX,cY))
font = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 0.4
fontColor = (0,0,255)
thickness = 1
path = cv2.putText(path, str(i), (cX,cY), font, fontScale, fontColor, thickness)
plt.figure()
plt.subplot(1,2,1)
plt.imshow(base,cmap='gray')
plt.axis('off')
plt.title('Imagen base')
plt.subplot(1,2,2)
plt.imshow(path)
plt.axis('off')
plt.title('Esquinas')
noBlack = cv2.countNonZero(cv2.cvtColor(path,cv2.COLOR_BGR2GRAY))
for i, p1 in enumerate(points):
for j, p2 in enumerate(points):
if p1 == p2: continue
test_img = cv2.line(path.copy(), p1, p2, (234,0,234), 1)
# Recount to see if the images are the same
if cv2.countNonZero(cv2.cvtColor(test_img,cv2.COLOR_BGR2GRAY)) == noBlack:
# path = cv2.line(path, p1, p2, (234,0,234), 1)
G.add_edge(i,j,weight=np.hypot(p1[0]-p2[0], p1[1]-p2[1]))
plt.figure()
nx.draw(G,with_labels=True)
x_0, y_0 = [492,500]
x_f = np.random.randint(487) + 277
y_f = np.random.randint(448) + 368
path[y_0+1,x_0+1] = (255,0,0)
path[y_f+1,x_f+1] = (255,0,0)
_, th = cv2.threshold(rotated, 245, 255, cv2.THRESH_BINARY)
ero = cv2.erode(th,None,iterations=10)
th = ero.copy()
noBlack = cv2.countNonZero(th)
for i, p in enumerate(points):
test_img = cv2.line(th.copy(), (x_0,y_0), p, 234, 1)
# Recount to see if the images are the same
if cv2.countNonZero(test_img) == noBlack:
# path = cv2.line(path, p1, p2, (234,0,234), 1)
G.add_edge('p_0',i,weight=np.hypot(p[0]-x_0, y_0-p[1]))
for i, p in enumerate(points):
test_img = cv2.line(th.copy(), (x_f,y_f), p, 234, 1)
# Recount to see if the images are the same
if cv2.countNonZero(test_img) == noBlack:
# path = cv2.line(path, p1, p2, (234,0,234), 1)
G.add_edge('p_f',i,weight=np.hypot(p[0]-x_f, y_f-p[1]))
plan = nx.shortest_path(G,'p_0','p_f')
print plan
for i in range(len(plan)-1):
if i == 0:
path = cv2.line(path, (x_0,y_0), points[plan[i+1]], (251,229,78), 1)
elif i == len(plan)-2:
path = cv2.line(path, points[plan[i]], (x_f,y_f), (251,229,78), 1)
else:
path = cv2.line(path, points[plan[i]], points[plan[i+1]], (251,229,78), 1)
plt.figure()
plt.imshow(ero,cmap='gray')
plt.axis('off')
plt.title('Imagen erosionada')
plt.show()
| 31.38674 | 88 | 0.520155 | 827 | 5,681 | 3.490931 | 0.232164 | 0.010391 | 0.030481 | 0.033253 | 0.357465 | 0.332525 | 0.329754 | 0.272948 | 0.236231 | 0.207135 | 0 | 0.091124 | 0.329695 | 5,681 | 181 | 89 | 31.38674 | 0.667017 | 0.068474 | 0 | 0.206349 | 0 | 0 | 0.026689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.031746 | null | null | 0.007937 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
423dba72ede1b75a23e84d734d1a416227c1565d | 2,116 | py | Python | DeepBrainSeg/readers/nib.py | JasperHG90/DeepBrainSeg | 92cf5f758f115e7ac51202966a1287fb58c09d78 | [
"MIT"
] | 130 | 2019-04-09T02:35:44.000Z | 2022-02-26T15:53:19.000Z | DeepBrainSeg/readers/nib.py | koriavinash1/DeepMedX | 02fcee6d7b21b16e7f1e28089f24be56ef6b9383 | [
"MIT"
] | 11 | 2019-09-18T03:55:29.000Z | 2021-01-03T13:11:20.000Z | DeepBrainSeg/readers/nib.py | koriavinash1/DeepMedX | 02fcee6d7b21b16e7f1e28089f24be56ef6b9383 | [
"MIT"
] | 38 | 2018-11-28T01:34:41.000Z | 2022-01-17T03:53:47.000Z | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# author: Avinash Kori
# contact: koriavinash1@gmail.com
# MIT License
# Copyright (c) 2020 Avinash Kori
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import tempfile
from time import time
import datetime
import numpy as np
import nibabel as nib
class nib_loader(object):
"""
"""
def __init__(self):
pass
def load_vol(self, path):
"""
path : patient data path
returns numpy array of patient data
"""
self.patient = nib.load(path)
self.affine = self.patient.affine
return self.patient.get_data()
def write_vol(self, path, volume):
"""
path : path to write the data
vol : modifient volume
return: True or False based on saving of volume
"""
try:
volume = np.uint8(volume)
volume = nib.Nifti1Image(volume, self.affine)
volume.set_data_dtype(np.uint8)
nib.save(volume, path)
return True
except:
return False
| 30.666667 | 80 | 0.676749 | 290 | 2,116 | 4.903448 | 0.517241 | 0.061885 | 0.018284 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005714 | 0.255671 | 2,116 | 68 | 81 | 31.117647 | 0.897143 | 0.626654 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.136364 | false | 0.045455 | 0.272727 | 0 | 0.590909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
423fee1037a4130b27a1927c09025e289e851a6f | 1,491 | py | Python | utils_test.py | lostsquirrel/words | aaa4bb2b3a9c8c7c7300e29ec73f39cff4409b8d | [
"MIT"
] | null | null | null | utils_test.py | lostsquirrel/words | aaa4bb2b3a9c8c7c7300e29ec73f39cff4409b8d | [
"MIT"
] | null | null | null | utils_test.py | lostsquirrel/words | aaa4bb2b3a9c8c7c7300e29ec73f39cff4409b8d | [
"MIT"
] | null | null | null | import json
import unittest
from utils import CustomEncoder, Paging, ValidationError, generate_uuid, Validator
class UtilsTest(unittest.TestCase):
def test_uuid(self):
print(generate_uuid())
self.assertEqual(len(generate_uuid()), 32)
def test_valiate(self):
form = dict(
a=1,
b=2,
c=3
)
v = Validator().rule("a").rule("b").rule("c").rule("d", False, 4)
_a, _b, _c, _d = v.validate_form(form)
self.assertEqual(_a, 1)
self.assertEqual(_b, 2)
self.assertEqual(_c, 3)
self.assertEqual(_d, 4)
def test_validate_none_form(self):
v = Validator().rule("page", False, 1).rule("per_page", False, 10)
page, per_page = v.validate_form(None)
self.assertEqual(page, 1)
self.assertEqual(per_page, 10)
def test_validate_none_form_required(self):
v = Validator().rule("page")
try:
v.validate_form(None)
except ValidationError as e:
print(e)
try:
v.validate_form(dict(size=2))
except ValidationError as e:
print(e)
def test_extend(self):
try:
[].extend(None)
except TypeError as e:
print(e)
def test_paging(self):
p = Paging(101, 1, 10)
print(json.dumps(p.__dict__))
def test_json_encode(self):
p = Paging(101, 1, 10)
print(CustomEncoder().encode(p))
| 26.625 | 82 | 0.564051 | 187 | 1,491 | 4.31016 | 0.278075 | 0.060794 | 0.064516 | 0.033499 | 0.269231 | 0.157568 | 0.054591 | 0 | 0 | 0 | 0 | 0.028293 | 0.312542 | 1,491 | 56 | 83 | 26.625 | 0.758049 | 0 | 0 | 0.222222 | 1 | 0 | 0.013405 | 0 | 0 | 0 | 0 | 0 | 0.155556 | 1 | 0.155556 | false | 0 | 0.066667 | 0 | 0.244444 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4252097259c5f8f2219e8a65c81337c134ef50fa | 1,151 | py | Python | src/clean_property_file.py | wmaciel/van-crime | e70d0310f41de3a1b54572f6c6bf01083e56e0ab | [
"MIT"
] | 2 | 2016-03-03T00:14:59.000Z | 2016-08-21T14:28:02.000Z | src/clean_property_file.py | wmaciel/van-crime | e70d0310f41de3a1b54572f6c6bf01083e56e0ab | [
"MIT"
] | null | null | null | src/clean_property_file.py | wmaciel/van-crime | e70d0310f41de3a1b54572f6c6bf01083e56e0ab | [
"MIT"
] | null | null | null | __author__ = 'walthermaciel'
import pandas as pd
import numpy as np
def load_csv(path):
# Load
print 'Loading', path
df = pd.read_csv(path)
# Remove unwanted columns
print 'Dropping unwanted columns'
df = df[['PID', 'TAX_ASSESSMENT_YEAR', 'CURRENT_LAND_VALUE', 'STREET_NAME', 'TO_CIVIC_NUMBER']]
df.columns = ['PID', 'YEAR', 'VALUE', 'STREET_NAME', 'STREET_NUMBER']
# Remove unwanted rows
print 'Removing null rows'
df.replace('', np.nan, inplace=True)
df.dropna(inplace=True)
# Compute average value for each property
print 'Computing average value for same address properties'
g_df = df.groupby(['STREET_NAME', 'STREET_NUMBER']).mean()
df = g_df.reset_index()
return df
def main():
for y in xrange(2006, 2016):
print y
path_in = '../data/property_tax_06_15/property_tax_report_csv' + str(y) + '.csv'
df = load_csv(path_in)
path_out = '../data/property_tax_06_15/avg_property_tax_'+ str(y) + '.csv'
print 'Saving', path_out
df.to_csv(path_or_buf=path_out, index=False)
print '\n'
if __name__ == '__main__':
main()
| 28.775 | 99 | 0.650738 | 162 | 1,151 | 4.320988 | 0.45679 | 0.04 | 0.031429 | 0.062857 | 0.054286 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017798 | 0.21894 | 1,151 | 39 | 100 | 29.512821 | 0.760845 | 0.077324 | 0 | 0 | 0 | 0 | 0.338694 | 0.088931 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.074074 | null | null | 0.259259 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4252c9d8b3317ae5bd56696743e5b2124dce1942 | 4,040 | py | Python | homeassistant/components/sensor/verisure.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | 1 | 2016-07-14T05:20:54.000Z | 2016-07-14T05:20:54.000Z | homeassistant/components/sensor/verisure.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | null | null | null | homeassistant/components/sensor/verisure.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | 1 | 2018-11-22T13:55:23.000Z | 2018-11-22T13:55:23.000Z | """
Interfaces with Verisure sensors.
For more details about this platform, please refer to the documentation at
documentation at https://home-assistant.io/components/verisure/
"""
import logging
from homeassistant.components.verisure import HUB as hub
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Verisure platform."""
sensors = []
if int(hub.config.get('thermometers', '1')):
hub.update_climate()
sensors.extend([
VerisureThermometer(value.id)
for value in hub.climate_status.values()
if hasattr(value, 'temperature') and value.temperature
])
if int(hub.config.get('hygrometers', '1')):
hub.update_climate()
sensors.extend([
VerisureHygrometer(value.id)
for value in hub.climate_status.values()
if hasattr(value, 'humidity') and value.humidity
])
if int(hub.config.get('mouse', '1')):
hub.update_mousedetection()
sensors.extend([
VerisureMouseDetection(value.deviceLabel)
for value in hub.mouse_status.values()
# is this if needed?
if hasattr(value, 'amountText') and value.amountText
])
add_devices(sensors)
class VerisureThermometer(Entity):
"""Representation of a Verisure thermometer."""
def __init__(self, device_id):
"""Initialize the sensor."""
self._id = device_id
@property
def name(self):
"""Return the name of the device."""
return '{} {}'.format(
hub.climate_status[self._id].location,
"Temperature")
@property
def state(self):
"""Return the state of the device."""
# Remove ° character
return hub.climate_status[self._id].temperature[:-1]
@property
def available(self):
"""Return True if entity is available."""
return hub.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return TEMP_CELSIUS
def update(self):
"""Update the sensor."""
hub.update_climate()
class VerisureHygrometer(Entity):
"""Representation of a Verisure hygrometer."""
def __init__(self, device_id):
"""Initialize the sensor."""
self._id = device_id
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(
hub.climate_status[self._id].location,
"Humidity")
@property
def state(self):
"""Return the state of the sensor."""
# remove % character
return hub.climate_status[self._id].humidity[:-1]
@property
def available(self):
"""Return True if entity is available."""
return hub.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this sensor."""
return "%"
def update(self):
"""Update the sensor."""
hub.update_climate()
class VerisureMouseDetection(Entity):
"""Representation of a Verisure mouse detector."""
def __init__(self, device_id):
"""Initialize the sensor."""
self._id = device_id
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(
hub.mouse_status[self._id].location,
"Mouse")
@property
def state(self):
"""Return the state of the sensor."""
return hub.mouse_status[self._id].count
@property
def available(self):
"""Return True if entity is available."""
return hub.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this sensor."""
return "Mice"
def update(self):
"""Update the sensor."""
hub.update_mousedetection()
| 26.933333 | 74 | 0.611881 | 449 | 4,040 | 5.376392 | 0.216036 | 0.054681 | 0.048467 | 0.03314 | 0.61889 | 0.543911 | 0.519056 | 0.488401 | 0.44449 | 0.428335 | 0 | 0.001699 | 0.271535 | 4,040 | 149 | 75 | 27.114094 | 0.818213 | 0.235644 | 0 | 0.639535 | 0 | 0 | 0.03504 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.22093 | false | 0 | 0.046512 | 0 | 0.44186 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
4253d0f64f25024f864712c154a198a0bd7c1158 | 1,135 | py | Python | articles/blogs/tests/factories.py | MahmoudFarid/articles | f0238908b1430c949dace50401fb3ddf268a581b | [
"MIT"
] | null | null | null | articles/blogs/tests/factories.py | MahmoudFarid/articles | f0238908b1430c949dace50401fb3ddf268a581b | [
"MIT"
] | null | null | null | articles/blogs/tests/factories.py | MahmoudFarid/articles | f0238908b1430c949dace50401fb3ddf268a581b | [
"MIT"
] | null | null | null | import factory
from factory.django import DjangoModelFactory as Factory
from django.contrib.auth.models import Permission
from ..models import Blog
from articles.users.tests.factories import UserFactory
class Blogfactory(Factory):
user = user = factory.SubFactory(UserFactory)
title = factory.Faker('sentence', nb_words=3)
description = factory.Faker('paragraph', nb_sentences=5)
content = factory.Faker('paragraph', nb_sentences=10)
gdoc_link = 'https://docs.google.com/document/d/1NcF8_6ZMraTXp7H7DVzR6pbqzJgNIyg3gYLUUoFoYe8/edit'
status = factory.Faker('random_element', elements=[sttaus[0] for sttaus in Blog.STATUS_CHOICES])
class Meta:
model = Blog
def create_user_writer_with_permission():
user = UserFactory()
write_blogs_perm = Permission.objects.filter(codename='can_write_blogs').first()
user.user_permissions.add(write_blogs_perm)
return user
def create_editor_user_with_permission():
user = UserFactory()
review_blogs_perm = Permission.objects.filter(codename='can_review_blogs').first()
user.user_permissions.add(review_blogs_perm)
return user
| 33.382353 | 102 | 0.767401 | 141 | 1,135 | 5.971631 | 0.503546 | 0.057007 | 0.049881 | 0.054632 | 0.254157 | 0.178147 | 0.102138 | 0 | 0 | 0 | 0 | 0.013252 | 0.135683 | 1,135 | 33 | 103 | 34.393939 | 0.845056 | 0 | 0 | 0.166667 | 0 | 0 | 0.136564 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.208333 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
425489e4c1a682c5eeaad70ce3b5e922f8f9536b | 8,847 | py | Python | api_formatter/serializers.py | RockefellerArchiveCenter/argo | c02fec68dbb50382f3f0bdf11c51240ca22a181c | [
"MIT"
] | null | null | null | api_formatter/serializers.py | RockefellerArchiveCenter/argo | c02fec68dbb50382f3f0bdf11c51240ca22a181c | [
"MIT"
] | 115 | 2019-08-19T20:19:06.000Z | 2022-03-04T17:40:50.000Z | api_formatter/serializers.py | RockefellerArchiveCenter/argo | c02fec68dbb50382f3f0bdf11c51240ca22a181c | [
"MIT"
] | null | null | null | from datetime import datetime
from django.urls import reverse
from rest_framework import serializers
from .view_helpers import description_from_notes
class ExternalIdentifierSerializer(serializers.Serializer):
identifier = serializers.CharField()
source = serializers.CharField()
class DateSerializer(serializers.Serializer):
expression = serializers.CharField()
begin = serializers.DateField()
end = serializers.CharField(allow_null=True)
label = serializers.DateField()
type = serializers.CharField()
class ExtentSerializer(serializers.Serializer):
value = serializers.FloatField()
type = serializers.CharField()
class LanguageSerializer(serializers.Serializer):
expression = serializers.CharField()
identifier = serializers.CharField()
class SubnoteSerializer(serializers.Serializer):
type = serializers.CharField()
content = serializers.SerializerMethodField()
def get_content(self, obj):
"""Coerce content into a list so it can be serialized as JSON."""
return list(obj.content)
class NoteSerializer(serializers.Serializer):
type = serializers.CharField()
title = serializers.CharField()
source = serializers.CharField()
subnotes = SubnoteSerializer(many=True)
class RightsGrantedSerializer(serializers.Serializer):
act = serializers.CharField()
begin = serializers.DateField()
end = serializers.DateField()
restriction = serializers.CharField()
notes = NoteSerializer(many=True, allow_null=True)
class RightsStatementSerializer(serializers.Serializer):
determination_date = serializers.DateField()
type = serializers.CharField()
rights_type = serializers.CharField()
begin = serializers.DateField()
end = serializers.DateField()
copyright_status = serializers.CharField(allow_null=True)
other_basis = serializers.CharField(allow_null=True)
jurisdiction = serializers.CharField(allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
rights_granted = RightsGrantedSerializer(many=True)
class GroupSerializer(serializers.Serializer):
identifier = serializers.CharField()
title = serializers.CharField()
class ReferenceSerializer(serializers.Serializer):
title = serializers.CharField()
type = serializers.CharField(allow_null=True)
online = serializers.SerializerMethodField()
hit_count = serializers.IntegerField(allow_null=True)
online_hit_count = serializers.IntegerField(allow_null=True)
uri = serializers.SerializerMethodField()
dates = serializers.CharField(allow_null=True)
description = serializers.CharField(allow_null=True)
group = GroupSerializer(allow_null=True)
def get_online(self, obj):
return getattr(obj, "online", False)
def get_uri(self, obj):
if getattr(obj, "uri", None):
return obj.uri
basename = obj.type
if basename in ["person", "organization", "family", "software"]:
basename = "agent"
elif basename in ["cultural_context", "function", "geographic",
"genre_form", "occupation", "style_period", "technique",
"temporal", "topical"]:
basename = "term"
return reverse('{}-detail'.format(basename), kwargs={"pk": obj.identifier}).rstrip("/")
class BaseListSerializer(serializers.Serializer):
uri = serializers.SerializerMethodField()
type = serializers.CharField()
title = serializers.CharField()
dates = DateSerializer(many=True, allow_null=True)
def get_uri(self, obj):
basename = self.context.get('view').basename or obj.type
return reverse('{}-detail'.format(basename), kwargs={"pk": obj.meta.id}).rstrip("/")
class BaseDetailSerializer(serializers.Serializer):
uri = serializers.SerializerMethodField()
title = serializers.CharField()
type = serializers.CharField()
category = serializers.CharField(allow_null=True)
offset = serializers.IntegerField(allow_null=True)
group = GroupSerializer()
external_identifiers = ExternalIdentifierSerializer(many=True)
def get_uri(self, obj):
basename = self.context.get('view').basename or obj.type
return reverse('{}-detail'.format(basename), kwargs={"pk": obj.meta.id}).rstrip("/")
class AgentSerializer(BaseDetailSerializer):
agent_type = serializers.CharField()
description = serializers.CharField(allow_null=True)
dates = DateSerializer(many=True, allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
class AgentListSerializer(BaseListSerializer):
pass
class CollectionSerializer(BaseDetailSerializer):
level = serializers.CharField()
parent = serializers.CharField(allow_null=True)
languages = LanguageSerializer(many=True, allow_null=True)
description = serializers.SerializerMethodField()
extents = ExtentSerializer(many=True)
formats = serializers.ListField()
online = serializers.BooleanField()
dates = DateSerializer(many=True, allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
rights_statements = RightsStatementSerializer(many=True, allow_null=True)
agents = ReferenceSerializer(many=True, allow_null=True)
creators = ReferenceSerializer(many=True, allow_null=True)
terms = ReferenceSerializer(many=True, allow_null=True)
def get_description(self, obj):
return description_from_notes(getattr(obj, "notes", []))
class CollectionListSerializer(BaseListSerializer):
pass
class ObjectSerializer(BaseDetailSerializer):
languages = LanguageSerializer(many=True, allow_null=True)
parent = serializers.CharField(allow_null=True)
description = serializers.SerializerMethodField()
extents = ExtentSerializer(many=True, allow_null=True)
formats = serializers.ListField()
online = serializers.BooleanField()
dates = DateSerializer(many=True, allow_null=True)
notes = NoteSerializer(many=True, allow_null=True)
rights_statements = RightsStatementSerializer(many=True, allow_null=True)
agents = ReferenceSerializer(many=True, allow_null=True)
terms = ReferenceSerializer(many=True, allow_null=True)
def get_description(self, obj):
return description_from_notes(getattr(obj, "notes", []))
class ObjectListSerializer(BaseListSerializer):
pass
class TermSerializer(BaseDetailSerializer):
term_type = serializers.CharField()
collections = ReferenceSerializer(many=True, allow_null=True)
objects = ReferenceSerializer(many=True, allow_null=True)
class TermListSerializer(BaseListSerializer):
pass
class CollectionHitSerializer(serializers.Serializer):
"""Serializes data for collapsed hits."""
category = serializers.CharField(source="group.category")
dates = serializers.SerializerMethodField()
hit_count = serializers.IntegerField()
online_hit_count = serializers.IntegerField(allow_null=True)
title = serializers.CharField(source="group.title")
uri = serializers.SerializerMethodField()
creators = serializers.SerializerMethodField()
def get_dates(self, obj):
return [d.to_dict() for d in obj.group.dates]
def get_creators(self, obj):
if getattr(obj.group, "creators", None):
return [c.title for c in obj.group.creators]
else:
return []
def get_uri(self, obj):
return obj.group.identifier.rstrip("/")
class FacetSerializer(serializers.Serializer):
"""Serializes facets."""
def to_representation(self, instance):
resp = {}
for k, v in instance.aggregations.to_dict().items():
if "buckets" in v:
resp[k] = v["buckets"]
elif "name" in v: # move nested aggregations up one level
resp[k] = v["name"]["buckets"]
elif k in ["max_date", "min_date"]: # convert timestamps to year
value = (datetime.fromtimestamp(v["value"] / 1000.0).year) if v["value"] else None
resp[k] = {"value": value}
else:
resp[k] = v
return resp
class AncestorsSerializer(serializers.Serializer):
"""Provides a nested dictionary representation of ancestors."""
def serialize_ancestors(self, ancestor_list, tree, idx):
ancestor = ancestor_list[idx]
serialized = ReferenceSerializer(ancestor).data
tree_data = {**serialized, **tree}
if idx == len(ancestor_list) - 1:
new_tree = tree_data
return new_tree
else:
new_tree = {"child": tree_data}
return self.serialize_ancestors(ancestor_list, new_tree, idx + 1)
def to_representation(self, instance):
resp = {}
if instance:
resp = self.serialize_ancestors(instance, {}, 0)
return resp
| 35.247012 | 98 | 0.706228 | 896 | 8,847 | 6.868304 | 0.207589 | 0.123497 | 0.078161 | 0.058011 | 0.545174 | 0.407377 | 0.316867 | 0.268606 | 0.214982 | 0.214982 | 0 | 0.001114 | 0.188312 | 8,847 | 250 | 99 | 35.388 | 0.85587 | 0.026902 | 0 | 0.443243 | 0 | 0 | 0.034121 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.07027 | false | 0.021622 | 0.021622 | 0.027027 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
42549d1737ce596628e42957af0838f8a820986b | 828 | py | Python | cmz/cms_news/migrations/0004_auto_20160923_1958.py | inmagik/cmz | e183f0c7203bda5efb1cbeb96f4f06a76aa91231 | [
"MIT"
] | 1 | 2016-10-01T18:35:24.000Z | 2016-10-01T18:35:24.000Z | cmz/cms_news/migrations/0004_auto_20160923_1958.py | inmagik/cmz | e183f0c7203bda5efb1cbeb96f4f06a76aa91231 | [
"MIT"
] | 8 | 2016-09-14T21:39:09.000Z | 2016-10-25T20:08:31.000Z | cmz/cms_news/migrations/0004_auto_20160923_1958.py | inmagik/cmz | e183f0c7203bda5efb1cbeb96f4f06a76aa91231 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-09-23 19:58
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('cms_news', '0003_auto_20160923_1956'),
]
operations = [
migrations.AddField(
model_name='news',
name='date',
field=models.DateField(auto_now_add=True, default=datetime.datetime(2016, 9, 23, 19, 58, 10, 395979, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='newstranslation',
name='title',
field=models.CharField(default='Hello cmz', max_length=300),
preserve_default=False,
),
]
| 27.6 | 126 | 0.621981 | 94 | 828 | 5.308511 | 0.638298 | 0.016032 | 0.024048 | 0.108216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08867 | 0.264493 | 828 | 29 | 127 | 28.551724 | 0.730706 | 0.080918 | 0 | 0.272727 | 1 | 0 | 0.08971 | 0.030343 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 0.318182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
42553eda4ebfb5ccb85d9727626440163f717d34 | 3,252 | py | Python | mopidy/audio/utils.py | grdorin/mopidy | 76db44088c102d7ad92a3fc6a15a938e66b99b0d | [
"Apache-2.0"
] | 6,700 | 2015-01-01T03:57:59.000Z | 2022-03-30T09:31:31.000Z | mopidy/audio/utils.py | pnijhara/mopidy | 7168787ea6c82b66e138fc2b388d78fa1c7661ba | [
"Apache-2.0"
] | 1,141 | 2015-01-02T09:48:59.000Z | 2022-03-28T22:25:30.000Z | mopidy/audio/utils.py | pnijhara/mopidy | 7168787ea6c82b66e138fc2b388d78fa1c7661ba | [
"Apache-2.0"
] | 735 | 2015-01-01T21:15:50.000Z | 2022-03-20T16:13:44.000Z | from mopidy import httpclient
from mopidy.internal.gi import Gst
def calculate_duration(num_samples, sample_rate):
"""Determine duration of samples using GStreamer helper for precise
math."""
return Gst.util_uint64_scale(num_samples, Gst.SECOND, sample_rate)
def create_buffer(data, timestamp=None, duration=None):
"""Create a new GStreamer buffer based on provided data.
Mainly intended to keep gst imports out of non-audio modules.
.. versionchanged:: 2.0
``capabilites`` argument was removed.
"""
if not data:
raise ValueError("Cannot create buffer without data")
buffer_ = Gst.Buffer.new_wrapped(data)
if timestamp is not None:
buffer_.pts = timestamp
if duration is not None:
buffer_.duration = duration
return buffer_
def millisecond_to_clocktime(value):
"""Convert a millisecond time to internal GStreamer time."""
return value * Gst.MSECOND
def clocktime_to_millisecond(value):
"""Convert an internal GStreamer time to millisecond time."""
return value // Gst.MSECOND
def supported_uri_schemes(uri_schemes):
"""Determine which URIs we can actually support from provided whitelist.
:param uri_schemes: list/set of URIs to check support for.
:type uri_schemes: list or set or URI schemes as strings.
:rtype: set of URI schemes we can support via this GStreamer install.
"""
supported_schemes = set()
registry = Gst.Registry.get()
for factory in registry.get_feature_list(Gst.ElementFactory):
for uri in factory.get_uri_protocols():
if uri in uri_schemes:
supported_schemes.add(uri)
return supported_schemes
def setup_proxy(element, config):
"""Configure a GStreamer element with proxy settings.
:param element: element to setup proxy in.
:type element: :class:`Gst.GstElement`
:param config: proxy settings to use.
:type config: :class:`dict`
"""
if not hasattr(element.props, "proxy") or not config.get("hostname"):
return
element.set_property("proxy", httpclient.format_proxy(config, auth=False))
element.set_property("proxy-id", config.get("username"))
element.set_property("proxy-pw", config.get("password"))
class Signals:
"""Helper for tracking gobject signal registrations"""
def __init__(self):
self._ids = {}
def connect(self, element, event, func, *args):
"""Connect a function + args to signal event on an element.
Each event may only be handled by one callback in this implementation.
"""
if (element, event) in self._ids:
raise AssertionError
self._ids[(element, event)] = element.connect(event, func, *args)
def disconnect(self, element, event):
"""Disconnect whatever handler we have for an element+event pair.
Does nothing it the handler has already been removed.
"""
signal_id = self._ids.pop((element, event), None)
if signal_id is not None:
element.disconnect(signal_id)
def clear(self):
"""Clear all registered signal handlers."""
for element, event in list(self._ids):
element.disconnect(self._ids.pop((element, event)))
| 31.882353 | 78 | 0.681119 | 421 | 3,252 | 5.147268 | 0.382423 | 0.044301 | 0.01246 | 0.031841 | 0.046147 | 0.025842 | 0 | 0 | 0 | 0 | 0 | 0.001587 | 0.225092 | 3,252 | 101 | 79 | 32.19802 | 0.858333 | 0.355781 | 0 | 0 | 0 | 0 | 0.042542 | 0 | 0 | 0 | 0 | 0 | 0.022222 | 1 | 0.222222 | false | 0.022222 | 0.044444 | 0 | 0.422222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
42595d917949c306ffaf79514babf64460ba3c69 | 1,869 | py | Python | blog.py | BenTimor/SerializationConceptSystem | 0f85dc32063d270a5564cda3199d84d474e5d83e | [
"MIT"
] | 1 | 2020-11-13T22:21:47.000Z | 2020-11-13T22:21:47.000Z | blog.py | BenTimor/SerializationConceptSystem | 0f85dc32063d270a5564cda3199d84d474e5d83e | [
"MIT"
] | null | null | null | blog.py | BenTimor/SerializationConceptSystem | 0f85dc32063d270a5564cda3199d84d474e5d83e | [
"MIT"
] | null | null | null | from utils import database
class Config:
config = None
def __init__(self, users, posts, comments):
self.users = users
self.posts = posts
self.comments = comments
Config.config = self
@staticmethod
def update():
database["concept", True]["config", "WHERE id=1"] = Config.config
@staticmethod
def setup():
try:
Config.config = database["concept", True]["config", "WHERE id=1"][0][0]
except:
Config.config = Config([User("Admin", "admin123", True)], [Post("Admin", 0, "Hello World!", "Lorem Ipsum")], {0: [Comment("Admin", "Lorem Ipsum")]})
database["concept", True]["config"] = Config.config
class User:
def __init__(self, name, password, is_admin=False):
self.name = name
self.is_admin = is_admin
self.password = password
@staticmethod
def new_user(name, password, is_admin=False):
Config.config.users.append(User(name, password, is_admin))
Config.update()
@staticmethod
def get_user(name):
for user in Config.config.users:
if user.name == name:
return user
class Post:
def __init__(self, user, id, title, content):
self.user = user
self.id = id
self.title = title
self.content = content
@staticmethod
def new_post(user, title, content):
Config.config.posts.append(Post(user, len(Config.config.posts), title, content))
Config.update()
class Comment:
def __init__(self, user, content):
self.user = user
self.content = content
@staticmethod
def new_comment(post, user, content):
if not Config.config.comments[post]:
Config.config.comments[post] = []
Config.config.comments[post].append(Comment(user, content))
Config.update() | 29.666667 | 160 | 0.602996 | 219 | 1,869 | 5.031963 | 0.219178 | 0.163339 | 0.039927 | 0.068058 | 0.296733 | 0.190563 | 0.125227 | 0.065336 | 0 | 0 | 0 | 0.006598 | 0.270198 | 1,869 | 63 | 161 | 29.666667 | 0.80132 | 0 | 0 | 0.25 | 0 | 0 | 0.062032 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.192308 | false | 0.076923 | 0.019231 | 0 | 0.326923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.