hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
95d4bf219897990197feea13feb7cf1258d214c8
6,298
py
Python
yadlt/core/layers.py
Perfect-SoftwareEngineer/Deep-Learning-Tensorflow
b191cd2c8ff9d8cb6e2c6dedcac4483fa7548366
[ "MIT" ]
null
null
null
yadlt/core/layers.py
Perfect-SoftwareEngineer/Deep-Learning-Tensorflow
b191cd2c8ff9d8cb6e2c6dedcac4483fa7548366
[ "MIT" ]
null
null
null
yadlt/core/layers.py
Perfect-SoftwareEngineer/Deep-Learning-Tensorflow
b191cd2c8ff9d8cb6e2c6dedcac4483fa7548366
[ "MIT" ]
null
null
null
"""Layer classes.""" from __future__ import absolute_import import abc import six import tensorflow as tf @six.add_metaclass(abc.ABCMeta) class BaseLayer(object): """Base layer interface.""" @abc.abstractmethod def forward(self): """Layer forward propagation.""" pass @abc.abstractmethod def backward(self): """Layer backward propagation.""" pass @abc.abstractmethod def get_variables(self): """Get layer's tf variables.""" pass @abc.abstractmethod def get_parameters(self): """Get the layer parameters.""" pass class Linear(BaseLayer): """Fully-Connected layer.""" def __init__(self, shape, name="linear", vnames=["W", "b"]): """Create a new linear layer instance.""" self.name = name self.vnames = vnames with tf.name_scope(self.name): self.W = tf.Variable( tf.truncated_normal(shape=shape, stddev=0.1), name=vnames[0]) self.b = tf.Variable( tf.constant(0.1, shape=[shape[1]]), name=vnames[1]) def forward(self, X): """Forward propagate X through the fc layer.""" with tf.name_scope(self.name): return tf.add(tf.matmul(X, self.W), self.b) def backward(self, H): """Backward propagate H through the fc layer.""" pass def get_variables(self): """Get layer's variables.""" return [self.W, self.b] def get_parameters(self): """Return all the parameters of this layer.""" with tf.Session() as sess: return { self.names[0]: sess.run(self.W), self.names[1]: sess.run(self.b) } class Activation(BaseLayer): """Activation function layer.""" def __init__(self, func, name="act_func"): """Create a new Activation layer instance.""" self.name = name if func is not None: self.func = func else: self.func = tf.identity def forward(self, X): """Forward propagate X.""" return self.func(X) def backward(self, H): """Backward propagate H through the fc layer.""" pass def get_variables(self): """Return the layer's variables.""" pass def get_parameters(self): """Return all the parameters of this layer.""" pass class SoftMax(BaseLayer): """SoftMax layer.""" def __init__(self, prev_layer, n_classes, name="softmax"): """Create a new SoftMax layer instance.""" self.prev_layer = prev_layer self.shape = (prev_layer.get_shape()[1].value, n_classes) self.n_classes = n_classes self.name = name self.vs = ['softmax_W', 'softmax_b'] with tf.name_scope(self.name): self.W = tf.Variable( tf.truncated_normal(self.shape, stddev=0.1), name=self.vs[0]) self.b = tf.Variable( tf.constant(0.1, shape=[n_classes]), name=self.vs[0]) def forward(self, X): """Forward propagate X.""" with tf.name_scope(self.name): return tf.add(tf.matmul(self.prev_layer, self.W), self.b) def backward(self, H): """Backward propagate H through the fc layer.""" pass def get_variables(self): """Return the layer's variables.""" return (self.W, self.b) def get_parameters(self): """Return all the parameters of this layer.""" with tf.Session() as sess: return { self.vs[0]: sess.run(self.W), self.vs[1]: sess.run(self.b) } class Regularization(BaseLayer): """Regularization function layer.""" def __init__(self, variables, C, regtype="l2", name="act_func"): """Create a new Regularization layer instance.""" assert regtype in ["l1", "l2"] self.variables = variables self.C = C self.regtype = regtype self.name = name def forward(self, X): """Forward propagate X.""" regs = tf.constant(0.0) for v in self.variables: if self.regtype == "l1": regs = tf.add(regs, tf.reduce_sum(tf.abs(v))) elif self.regtype == "l2": regs = tf.add(regs, tf.nn.l2_loss(v)) return tf.mul(self.C, regs) def backward(self, H): """Backward propagate H through the fc layer.""" pass def get_variables(self): """Return the layer's variables.""" pass def get_parameters(self): """Return all the parameters of this layer.""" pass class Loss(BaseLayer): """Loss function layer.""" def __init__(self, mod_y, ref_y, loss_type, regterm=None, summary=True, name="loss_func"): """Create a new Loss layer instance.""" assert loss_type in ["cross_entropy", "softmax_cross_entropy", "mean_squared"] self.mod_y = mod_y self.ref_y = ref_y self.loss_type = loss_type self.regterm = regterm self.name = name if loss_type == "cross_entropy": clip_inf = tf.clip_by_value(self.mod_y, 1e-10, float('inf')) clip_sup = tf.clip_by_value(1 - self.mod_y, 1e-10, float('inf')) loss = - tf.reduce_mean(tf.add( tf.mul(self.ref_y, tf.log(clip_inf)), tf.mul(tf.sub(1.0, self.ref_y), tf.log(clip_sup)))) elif loss_type == "softmax_cross_entropy": loss = tf.contrib.losses.softmax_cross_entropy( self.mod_y, self.ref_y) elif loss_type == "mean_squared": loss = tf.sqrt(tf.reduce_mean( tf.square(tf.sub(self.ref_y, self.mod_y)))) self.loss = loss + regterm if regterm is not None else loss if summary: tf.summary.scalar(self.name, self.loss) def forward(self, X): """Forward propagate X.""" pass def backward(self, H): """Backward propagate H through the fc layer.""" pass def get_variables(self): """Return the layer's variables.""" pass def get_parameters(self): """Return all the parameters of this layer.""" pass
28.497738
77
0.563671
805
6,298
4.281988
0.154037
0.020888
0.023209
0.033072
0.531187
0.442994
0.400638
0.333623
0.333623
0.333623
0
0.007996
0.305017
6,298
220
78
28.627273
0.779529
0.18101
0
0.440299
0
0
0.033353
0.008439
0
0
0
0
0.014925
1
0.216418
false
0.119403
0.029851
0
0.350746
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
95dba8c035cf85f4ef8f4bc3e7a7c14c268076f1
1,797
py
Python
engine/src/valet/engine/search/filters/cpu_filter.py
onap/optf-fgps
1494071d0329698297c5d78ee0799dbff0b57e43
[ "Apache-2.0", "CC-BY-4.0" ]
null
null
null
engine/src/valet/engine/search/filters/cpu_filter.py
onap/optf-fgps
1494071d0329698297c5d78ee0799dbff0b57e43
[ "Apache-2.0", "CC-BY-4.0" ]
null
null
null
engine/src/valet/engine/search/filters/cpu_filter.py
onap/optf-fgps
1494071d0329698297c5d78ee0799dbff0b57e43
[ "Apache-2.0", "CC-BY-4.0" ]
1
2021-10-15T18:54:03.000Z
2021-10-15T18:54:03.000Z
# # ------------------------------------------------------------------------- # Copyright (c) 2019 AT&T Intellectual Property # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ------------------------------------------------------------------------- # class CPUFilter(object): def __init__(self): self.name = "cpu" self.status = None def init_condition(self): self.status = None def check_pre_condition(self, _level, _v, _avail_hosts, _avail_groups): return True def filter_candidates(self, _level, _v, _candidate_list): candidate_list = [] for c in _candidate_list: if self._check_candidate(_level, _v, c): candidate_list.append(c) return candidate_list def _check_candidate(self, _level, _v, _candidate): """Return True if host has sufficient CPU cores.""" avail_vcpus = _candidate.get_vcpus(_level) instance_vcpus = _v.vCPUs # TODO: need to check against original CPUs? # Do not allow an instance to overcommit against itself, # only against other instances. # if instance_vcpus > vCPUs: # return False if avail_vcpus < instance_vcpus: return False return True
30.982759
76
0.606566
214
1,797
4.901869
0.518692
0.057197
0.028599
0.030505
0
0
0
0
0
0
0
0.0059
0.245409
1,797
57
77
31.526316
0.767699
0.530885
0
0.2
0
0
0.003681
0
0
0
0
0.017544
0
1
0.25
false
0
0
0.05
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
0
0
0
2
95eaee2ff327784e0d2a6285027d63a294194fa5
283
py
Python
Programming_Maester/Phoneketmon.py
Mayner0220/Programmers
42e4783a526506fb7d8208841a76201909ed5c5c
[ "Apache-2.0" ]
1
2021-04-01T06:19:02.000Z
2021-04-01T06:19:02.000Z
Programming_Maester/Phoneketmon.py
Mayner0220/Programmers
42e4783a526506fb7d8208841a76201909ed5c5c
[ "Apache-2.0" ]
null
null
null
Programming_Maester/Phoneketmon.py
Mayner0220/Programmers
42e4783a526506fb7d8208841a76201909ed5c5c
[ "Apache-2.0" ]
null
null
null
# https://programmers.co.kr/learn/courses/30/lessons/1845 def solution(nums): if len(set(nums)) <= len(nums)/2: return int(len(set(nums))) else: return int(len(nums)/2) print(solution([3,1,2,3])) print(solution([3,3,3,2,2,4])) print(solution([3,3,3,2,2,2]))
25.727273
57
0.618375
52
283
3.365385
0.442308
0.045714
0.24
0.171429
0.205714
0.205714
0.205714
0
0
0
0
0.099174
0.144876
283
11
58
25.727273
0.623967
0.194346
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.375
0.375
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
95ec274e03ce16625cb08ace26548a81e6d7c252
3,903
py
Python
ooobuild/lo/xml/crypto/sax/xsax_event_keeper.py
Amourspirit/ooo_uno_tmpl
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
[ "Apache-2.0" ]
null
null
null
ooobuild/lo/xml/crypto/sax/xsax_event_keeper.py
Amourspirit/ooo_uno_tmpl
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
[ "Apache-2.0" ]
null
null
null
ooobuild/lo/xml/crypto/sax/xsax_event_keeper.py
Amourspirit/ooo_uno_tmpl
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 # # Copyright 2022 :Barry-Thomas-Paul: Moss # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http: // www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Interface Class # this is a auto generated file generated by Cheetah # Libre Office Version: 7.3 # Namespace: com.sun.star.xml.crypto.sax import typing from abc import abstractmethod from ....uno.x_interface import XInterface as XInterface_8f010a43 if typing.TYPE_CHECKING: from ...sax.x_document_handler import XDocumentHandler as XDocumentHandler_9b90e28 from ...wrapper.xxml_element_wrapper import XXMLElementWrapper as XXMLElementWrapper_66c0107c class XSAXEventKeeper(XInterface_8f010a43): """ Interface of SAX Event Keeper. This interface is used to manipulate element marks in a SAX event stream. There are two kinds of element mark, one is element collector, which is used to collect a particular element from the SAX event stream; the other is blocker, which is used to block the SAX event stream. See Also: `API XSAXEventKeeper <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1xml_1_1crypto_1_1sax_1_1XSAXEventKeeper.html>`_ """ __ooo_ns__: str = 'com.sun.star.xml.crypto.sax' __ooo_full_ns__: str = 'com.sun.star.xml.crypto.sax.XSAXEventKeeper' __ooo_type_name__: str = 'interface' __pyunointerface__: str = 'com.sun.star.xml.crypto.sax.XSAXEventKeeper' @abstractmethod def addBlocker(self) -> int: """ Adds a new blocker on the next element in the SAX event stream. No SAX event starting from the next element will be forwarded until this blocker is removed. """ @abstractmethod def addElementCollector(self) -> int: """ Adds a new element collector on the next element in the SAX event stream. """ @abstractmethod def getCurrentBlockingNode(self) -> 'XXMLElementWrapper_66c0107c': """ Gets the element which current blocking happens. This element is the working element of the first blocker in tree order. """ @abstractmethod def getElement(self, id: int) -> 'XXMLElementWrapper_66c0107c': """ Gets the element of an element mark. """ @abstractmethod def isBlocking(self) -> bool: """ Checks whether the SAX event stream is blocking. """ @abstractmethod def printBufferNodeTree(self) -> str: """ Prints information about all buffered elements. """ @abstractmethod def removeBlocker(self, id: int) -> None: """ Removes a blocker. """ @abstractmethod def removeElementCollector(self, id: int) -> None: """ Removes an element collector. """ @abstractmethod def setElement(self, id: int, aElement: 'XXMLElementWrapper_66c0107c') -> None: """ Sets the element of an element mark. When an element is replaced outside of this interface, then uses this method can restore the link between an element mark and its working element. """ @abstractmethod def setNextHandler(self, nextHandler: 'XDocumentHandler_9b90e28') -> 'XDocumentHandler_9b90e28': """ Sets the next document handler in the SAX chain. This handler will receive SAX events forwarded by the SAXEventKeeper. """ __all__ = ['XSAXEventKeeper']
37.171429
206
0.688189
486
3,903
5.417695
0.427984
0.064565
0.031903
0.032283
0.1485
0.095329
0.067983
0.067983
0.026586
0
0
0.024799
0.23546
3,903
104
207
37.528846
0.857574
0.517038
0
0.3125
0
0
0.173743
0.158067
0
0
0
0
0
1
0.3125
false
0
0.15625
0
0.625
0.03125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
95f0995fad2f82fbbbdccca26aa5605a1c0767e1
84
py
Python
libcity/model/traffic_od_prediction/__init__.py
LibCity/Bigscity-LibCity-Docs-zh_CN
2be639c3fe7d75727ade18f473d6f625900f73f2
[ "Apache-2.0" ]
5
2021-09-28T12:32:50.000Z
2022-02-03T09:04:35.000Z
libcity/model/traffic_od_prediction/__init__.py
aptx1231/Bigscity-TrafficDL-Docs-zh_CN
2be639c3fe7d75727ade18f473d6f625900f73f2
[ "Apache-2.0" ]
null
null
null
libcity/model/traffic_od_prediction/__init__.py
aptx1231/Bigscity-TrafficDL-Docs-zh_CN
2be639c3fe7d75727ade18f473d6f625900f73f2
[ "Apache-2.0" ]
1
2021-12-16T05:10:35.000Z
2021-12-16T05:10:35.000Z
from libcity.model.traffic_od_prediction.GEML import GEML __all__ = [ "GEML" ]
14
57
0.738095
11
84
5.090909
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.166667
84
5
58
16.8
0.8
0
0
0
0
0
0.047619
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
2510cbb743f1f29e0bb13e1ae7ff3435645c3b4d
184
py
Python
libs/db_check.py
redpeacock78/bach_bot
086efe4d6eef05fbdff6af34534e54e43fd9af88
[ "MIT" ]
null
null
null
libs/db_check.py
redpeacock78/bach_bot
086efe4d6eef05fbdff6af34534e54e43fd9af88
[ "MIT" ]
null
null
null
libs/db_check.py
redpeacock78/bach_bot
086efe4d6eef05fbdff6af34534e54e43fd9af88
[ "MIT" ]
null
null
null
import mysql.connector as mydb conn = mydb.connect( host='mysql_container', port='3306', user='docker', password='docker', database='my_db' ) conn.is_connected()
15.333333
30
0.657609
23
184
5.130435
0.826087
0
0
0
0
0
0
0
0
0
0
0.027027
0.195652
184
11
31
16.727273
0.77027
0
0
0
0
0
0.195652
0
0
0
0
0
0
1
0
false
0.111111
0.111111
0
0.111111
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
252143e0b4bc8782465cc8f472bab67d3793cee0
1,129
py
Python
python/test_2020_04_2.py
wensby/advent-of-code
50cd7fa2d35674d868a79ac8c75be24a43267e2b
[ "MIT" ]
null
null
null
python/test_2020_04_2.py
wensby/advent-of-code
50cd7fa2d35674d868a79ac8c75be24a43267e2b
[ "MIT" ]
null
null
null
python/test_2020_04_2.py
wensby/advent-of-code
50cd7fa2d35674d868a79ac8c75be24a43267e2b
[ "MIT" ]
null
null
null
import importlib import unittest solution = importlib.import_module('2020_04_2') class Test2020Day4Part1(unittest.TestCase): def test_example1(self): input = ( 'eyr:1972 cid:100\n' 'hcl:#18171d ecl:amb hgt:170 pid:186cm iyr:2018 byr:1926\n' '\n' 'iyr:2019\n' 'hcl:#602927 eyr:1967 hgt:170cm\n' 'ecl:grn pid:012533040 byr:1946\n' '\n' 'hcl:dab227 iyr:2012\n' 'ecl:brn hgt:182cm pid:021572410 eyr:2020 byr:1992 cid:277\n' '\n' 'hgt:59cm ecl:zzz\n' 'eyr:2038 hcl:74454a iyr:2023\n' 'pid:3556412378 byr:2007\n' '\n' 'pid:087499704 hgt:74in ecl:grn iyr:2012 eyr:2030 byr:1980\n' 'hcl:#623a2f\n' '\n' 'eyr:2029 ecl:blu cid:129 byr:1989\n' 'iyr:2014 pid:896056539 hcl:#a97842 hgt:165cm\n' '\n' 'hcl:#888785\n' 'hgt:164cm byr:2001 iyr:2015 cid:88\n' 'pid:545766238 ecl:hzl\n' 'eyr:2022\n' '\n' 'iyr:2010 hgt:158cm hcl:#b6652a ecl:blu byr:1944 eyr:2021 pid:093154719\n' ) self.assertEqual(solution.run(input), 4)
29.710526
82
0.578388
172
1,129
3.773256
0.488372
0.021572
0.015408
0
0
0
0
0
0
0
0
0.303659
0.273694
1,129
37
83
30.513514
0.487805
0
0
0.205882
0
0.029412
0.559787
0
0
0
0
0
0.029412
1
0.029412
false
0
0.088235
0
0.147059
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
252c453ec6e9dc3416a26d47c38bcfb973477454
74
py
Python
python/sequences.py
saedyousef/Python-scratch
ba4bf88d1ad86beddc8c7c5e2f43c4e837e2861e
[ "MIT" ]
5
2020-07-20T17:47:08.000Z
2021-08-17T18:26:25.000Z
python/sequences.py
saedyousef/CS-50
ba4bf88d1ad86beddc8c7c5e2f43c4e837e2861e
[ "MIT" ]
null
null
null
python/sequences.py
saedyousef/CS-50
ba4bf88d1ad86beddc8c7c5e2f43c4e837e2861e
[ "MIT" ]
1
2021-06-29T19:49:46.000Z
2021-06-29T19:49:46.000Z
name = "Saeed" cordinates = (10.0, 20.0) names = ["Saeed", "Bob", "Mousa"]
24.666667
33
0.581081
11
74
3.909091
0.818182
0
0
0
0
0
0
0
0
0
0
0.095238
0.148649
74
3
33
24.666667
0.587302
0
0
0
0
0
0.24
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
255d6b11cbe644a12928787786f04d1940067a84
303
py
Python
setup.py
StrykerKKD/dropbox-backup
8ee692ef1de5be1e3257a627dc268b331694b2b8
[ "MIT" ]
null
null
null
setup.py
StrykerKKD/dropbox-backup
8ee692ef1de5be1e3257a627dc268b331694b2b8
[ "MIT" ]
null
null
null
setup.py
StrykerKKD/dropbox-backup
8ee692ef1de5be1e3257a627dc268b331694b2b8
[ "MIT" ]
null
null
null
from setuptools import setup setup( name='dropboxbackup', version='0.1', py_modules=['dropboxbackup'], install_requires=[ 'click', 'dropbox', 'simple-crypt' ], entry_points=''' [console_scripts] dropboxbackup=dropboxbackup:cli ''', )
17.823529
39
0.574257
26
303
6.538462
0.846154
0
0
0
0
0
0
0
0
0
0
0.009259
0.287129
303
16
40
18.9375
0.777778
0
0
0
0
0
0.409241
0.10231
0
0
0
0
0
1
0
true
0
0.066667
0
0.066667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
255eb03b3149f28db58ee09e23382f4784f486dd
362
py
Python
leadmanager/leads/views.py
mydjangoandreactprojects/lead-manager
844c655dcd1010fb0b1cd889ddc94872aa4f15a0
[ "MIT" ]
1
2020-03-26T06:25:47.000Z
2020-03-26T06:25:47.000Z
leadmanager/leads/views.py
mydjangoandreactprojects/lead-manager
844c655dcd1010fb0b1cd889ddc94872aa4f15a0
[ "MIT" ]
null
null
null
leadmanager/leads/views.py
mydjangoandreactprojects/lead-manager
844c655dcd1010fb0b1cd889ddc94872aa4f15a0
[ "MIT" ]
null
null
null
from rest_framework import viewsets, permissions from leads.serializers import LeadSerializer from leads.models import Lead class LeadViewSet(viewsets.ModelViewSet): """Manage CRUD operations for Leads in the database""" queryset = Lead.objects.all() permission_classes = [ permissions.AllowAny ] serializer_class = LeadSerializer
25.857143
58
0.756906
39
362
6.948718
0.717949
0.066421
0
0
0
0
0
0
0
0
0
0
0.179558
362
13
59
27.846154
0.912458
0.132597
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
255edfec817ac332c0a59a30e33ffe4ca99dbfbc
207
py
Python
app/main/errors.py
BABAYAGI/newsapi
6127d51e702983f2928849bef08c5920f7d06a96
[ "MIT" ]
1
2019-10-15T08:16:17.000Z
2019-10-15T08:16:17.000Z
app/main/errors.py
BABAYAGI/newsapi
6127d51e702983f2928849bef08c5920f7d06a96
[ "MIT" ]
null
null
null
app/main/errors.py
BABAYAGI/newsapi
6127d51e702983f2928849bef08c5920f7d06a96
[ "MIT" ]
null
null
null
from flask import render_template from . import main @main.app_errorhandler(404) def fo_O_fo(error): """ Function to render the 404 error page """ return render_template('fo_O_fo.html'), 404
23
47
0.714976
32
207
4.40625
0.59375
0.198582
0.070922
0
0
0
0
0
0
0
0
0.053571
0.188406
207
9
47
23
0.785714
0.178744
0
0
0
0
0.077419
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
255f1d23b8f394dc79d9946c976e6a08c2991d2e
18,476
py
Python
Collage_generator/_insertion.py
alexliyihao/AAPI_code
81c6cc40a9efb4d4fedf6678c27aac83f5057a70
[ "MIT" ]
2
2020-11-29T17:00:52.000Z
2022-01-06T19:24:23.000Z
Collage_generator/_insertion.py
alexliyihao/AAPI_code
81c6cc40a9efb4d4fedf6678c27aac83f5057a70
[ "MIT" ]
null
null
null
Collage_generator/_insertion.py
alexliyihao/AAPI_code
81c6cc40a9efb4d4fedf6678c27aac83f5057a70
[ "MIT" ]
null
null
null
import PIL.Image as Img import numpy as np from tqdm.notebook import tqdm from PIL import ImageFilter import tables import time import gc """ all the insert/append function for collage generator _canvas_append takes the inserting operation, the rest are finding add_point logic """ class _insertion(): def _canvas_append(self, canvas: np.ndarray, add_point: np.ndarray, img: np.ndarray, mask: np.ndarray = None, mask_label: int = None, mode = "label", format = "pixel"): """ the actual working part, add a image to a canvas Args: canvas: np.ndarray, 3-channel canvas add_point: tuple of int, the topleft point of the image to be added img: np.ndarray, 3-channel, the vignette to be added mask: np.ndarray(if it's there), 1-channel/4-channels, the mask with the canvas mask_label: int/2d np.ndarray/4 channels np.ndarray, the value of this label onto the mask mode: str, "label" or "pattern", how the mask be overwritten, if "label", it will use the int mask_label if "pattern", it will copy the np.ndarray passed to mask_label format: str, "pixel" or "COCO", how the mask will be updates by new vignettes in "pixel", each individual mask will be saved on the same dimension if "COCO", each individual mask will be saved by a different color on a 3-channel mask Return: canvas: np.ndarray of 3 channels, the canvas with img added. mask: if format is "pixel" np.ndarray of 1 channel, the mask with img's label added. if format is "COCO", np.ndarray of 4-channels, the mask with img's label added. """ assert format in ["pixel", "COCO"] # if there's no mask (preview/background) if type(mask) != np.ndarray: # add img to canvas, if there's any overlap, skip it np.add(canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], img, where = (canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]] == 0), out = canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], casting = "unsafe") # return canvas return canvas #if there's a mask, from the logic of the functions below, #we are going to direcly add these value to a 0-filled canvas and mask else: if format == "pixel": # add image to canvas np.add(canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], img, out = canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], casting = "unsafe") # add label to mask if mode == "label": # if in label mode, we are adding this label int value to all nonzero space np.add(mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], mask_label*np.any(img, axis = 2), out = mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], casting = "unsafe") else: #else we are adding a pattern, copy the while pattern np.add(mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], mask_label, out = mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], casting = "unsafe") return canvas, mask # if we are building a COCO mode if format == "COCO": if mode == "label": # generate a new color for this object _new_color, self.existed_color = self._generate_new_color(self.existed_color) self.color_dict[str(tuple(_new_color.tolist()))] = mask_label # we have COCO format use as following, first layer will work as the pixel mask, # and the rest will following, the first layer will be removed when converted to COCO if mask.ndim == 2: # if the mask only have one layer, it must be the start mask # add 3 new layers as the RGB recording mask = np.stack((mask, np.zeros_like(mask),np.zeros_like(mask),np.zeros_like(mask)), axis = -1) # add image to canvas, add different label to different channel of the mask np.add(canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], img, out = canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], casting="unsafe") np.add(mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1],0], mask_label*np.any(img, axis = 2), out = mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1],0], casting="unsafe") np.add(mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1],1:4], np.any(img, axis = 2, keepdims = True)*_new_color, mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1],1:4], casting="unsafe") return canvas, mask else: if mask.ndim == 2: mask = np.stack((mask, np.zeros_like(mask),np.zeros_like(mask),np.zeros_like(mask)), axis = -1) np.add(canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], img, out = canvas[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], casting="unsafe") np.add(mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], mask_label, out = mask[add_point[0]:add_point[0]+img.shape[0], add_point[1]:add_point[1]+img.shape[1]], casting="unsafe") return canvas, mask def _init_insert(self, img: np.ndarray, canvas: np.ndarray, mask: np.ndarray, label: int, mode = "pattern", format = "pixel"): """ find a random legal position in canvas, append img to canvas and mask Args: img: np.ndarray of 3 channels, the vignette to be added canvas: np.ndarray of 3 channels, the canvas mask: 2d/4-channel np.ndarray, the mask label: the label to be added mode: str, "label" or "pattern", see mode in _canvas_append() format: str, see format in _canvas_append() Return: canvas: np.ndarray of 3 channels, the canvas with img added. mask: np.ndarray of 1 channel, the mask with img's label added. """ _outer_bound = (canvas.shape[0] - img.shape[0], canvas.shape[1] - img.shape[1]) # select an initial add_point _add_point = np.array((np.random.randint(low = self._scanning_constant, high = _outer_bound[0] - self._scanning_constant), np.random.randint(low = self._scanning_constant, high = _outer_bound[1] - self._scanning_constant))) # create a binary mask of the img _img_mask = np.any(img, axis = 2) # directly use the _add_point canvas, mask = self._canvas_append(canvas = canvas, add_point = _add_point, img = img, mask = mask, mask_label = label, mode = mode, format = format) return canvas, mask def _secondary_insert(self, img: np.ndarray, canvas: np.ndarray, mask: np.ndarray, label: int, patience: int, mode = "label", format = "pixel"): """ find a random non-overlapping position in canvas, append img to canvas and mask Args: img: np.ndarray of 3 channels, the vignette to be added canvas: np.ndarray of 3 channels, the canvas mask: 2d/4-channel np.ndarray, the mask label: the label to be added patience: int, the retry time for finding non-overlapping position mode: str, "label" or "pattern", see mode in _canvas_append() format: str, see format in _canvas_append() Return: canvas: np.ndarray of 3 channels, the canvas with img added, if the tries in {patience} times succssfully added the img onto canvas, otherwise the original canvas is returned mask: np.ndarray of 2d or 4 channels, the mask with img added, if the tries in {patience} times succssfully added the img onto canvas, otherwise the original mask if returned """ _outer_bound = (canvas.shape[0] - img.shape[0], canvas.shape[1] - img.shape[1]) # select an initial add_point _add_point = np.array((np.random.randint( low = self._scanning_constant, high = _outer_bound[0] - self._scanning_constant), np.random.randint( low = self._scanning_constant, high = _outer_bound[1] - self._scanning_constant) )) # create a binary mask of the img _img_mask = np.any(img, axis = 2) for retry in range(patience): # for each time make a small move _add_point = _add_point + np.random.randint( low = -1*self._scanning_constant, high = self._scanning_constant, size = 2) # make sure the new value is legal _add_point = np.clip(a = _add_point, a_min = (0,0), a_max = _outer_bound) # check if there's any overlap # in pixel format check the mask directly if format == "pixel": _check_zone = mask[_add_point[0]:_add_point[0]+_img_mask.shape[0], _add_point[1]:_add_point[1]+_img_mask.shape[1]] # in COCO format check the first layer of mask else: _check_zone = mask[_add_point[0]:_add_point[0]+_img_mask.shape[0], _add_point[1]:_add_point[1]+_img_mask.shape[1], 0] # if so if np.any(np.multiply(_check_zone,_img_mask)) == True: #retry for a new point continue # otherwise add the img to canvas and mask and stop retry else: canvas, mask = self._canvas_append(canvas = canvas, add_point = _add_point, img = img, mask = mask, mask_label = label, mode = mode, format = format) break gc.collect() return canvas, mask def _try_insert(self, img: np.ndarray, canvas: np.ndarray, mask: np.ndarray, label: int, patience: int, mode = "label", format = "pixel"): """ try to insert img into canvas and mask using a escape-overlapping algorithm if the initial point is overlapping, try to "escape" the overlapping and append at the first position successfuly escape if the initial point is not overlapping, try to find a overlapping point and append at the last non-overlapping point before this one Args: img: np.ndarray of 3 channels, the vignette to be added canvas: np.ndarray of 3 channels, the canvas mask: 2d/4-channel np.ndarray, the mask label: the label to be added patience: int, the retry time for finding non-overlapping position mode: str, "label" or "pattern", see mode in _canvas_append() format: str, see format in _canvas_append() Return: canvas: np.ndarray of 3 channels, the canvas with img added, if the tries in {patience} times succssfully added the img onto canvas, otherwise the original canvas is returned mask: np.ndarray of 2d or 4 channels, the mask with img added, if the tries in {patience} times succssfully added the img onto canvas, otherwise the original mask if returned """ _outer_bound = (canvas.shape[0] - img.shape[0], canvas.shape[1] - img.shape[1]) # select an initial add_point _add_point = np.array((np.random.randint(low = self._scanning_constant, high = _outer_bound[0] - self._scanning_constant), np.random.randint(low = self._scanning_constant, high = _outer_bound[1] - self._scanning_constant))) # create a binary mask of the img _img_mask = np.any(img, axis = 2) # check if there's any overlap if format == "pixel": _check_zone = mask[_add_point[0]:_add_point[0]+_img_mask.shape[0], _add_point[1]:_add_point[1]+_img_mask.shape[1]] # in COCO format check the first layer of mask else: _check_zone = mask[_add_point[0]:_add_point[0]+_img_mask.shape[0], _add_point[1]:_add_point[1]+_img_mask.shape[1], 0] # if we start with an overlap, we need to escape from overlap, otherwise we need to find a overlap _init_overlapped = np.any(np.multiply(_check_zone,_img_mask)) # if we are in a finding mode and need to record the last add point _last_add_point = _add_point # in the patience time for retry in range(patience): # for each time make a small move _add_point = _add_point + np.random.randint(low = -1*self._scanning_constant, high = self._scanning_constant, size = 2) # make sure the new value is legal _add_point = np.clip(a = _add_point, a_min = (0,0), a_max = _outer_bound) # check if there's any overlap if format == "pixel": _check_zone = mask[_add_point[0]:_add_point[0]+_img_mask.shape[0], _add_point[1]:_add_point[1]+_img_mask.shape[1]] # in COCO format check the first layer of mask else: _check_zone = mask[_add_point[0]:_add_point[0]+_img_mask.shape[0], _add_point[1]:_add_point[1]+_img_mask.shape[1], 0] # check if there's overlap _overlap = np.any(np.multiply(_check_zone,_img_mask)) # if we had a overlap in "escaping" if (_overlap == True) and (_init_overlapped == True): #retry for a new point continue # if we met the first non-overlap while escaping elif (_overlap == False) and (_init_overlapped == True): #stop the finding canvas, mask = self._canvas_append(canvas = canvas, add_point = _add_point, img = img, mask = mask, mask_label = label, mode = mode, format = format) break # if we are finding but not found elif (_overlap == False) and (_init_overlapped == False): #record last add_point and retry for a new point _last_add_point = _add_point continue # or we are finding a overlap and found it, we need to use the last else: canvas, mask = self._canvas_append(canvas = canvas, add_point = _last_add_point, img = img, mask = mask, mask_label = label, mode = mode, format = format) break gc.collect() return canvas, mask
54.662722
120
0.500758
2,231
18,476
3.983864
0.098162
0.118812
0.05063
0.033753
0.726035
0.697795
0.670905
0.661566
0.658078
0.658078
0
0.02108
0.414592
18,476
337
121
54.824926
0.800666
0.29974
0
0.791045
0
0
0.011417
0
0
0
0
0
0.004975
1
0.019901
false
0
0.034826
0
0.094527
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
256c5471eacba768e9791f30d6ef0762118cc682
181
py
Python
codility/1_3.py
love-adela/algorithm
4ccd02173c96f8369962f1fd4e5166a221690fa2
[ "MIT" ]
3
2019-03-09T05:19:23.000Z
2019-04-06T09:26:36.000Z
codility/1_3.py
love-adela/algorithm
4ccd02173c96f8369962f1fd4e5166a221690fa2
[ "MIT" ]
1
2020-02-23T10:38:04.000Z
2020-02-23T10:38:04.000Z
codility/1_3.py
love-adela/algorithm
4ccd02173c96f8369962f1fd4e5166a221690fa2
[ "MIT" ]
1
2019-05-22T13:47:53.000Z
2019-05-22T13:47:53.000Z
def solution(S): rs = "" for i in S: if i != " ": rs += i else: rs += "%20" return rs S = "Mr John Smith" print(solution(S))
12.066667
23
0.381215
24
181
2.875
0.625
0.26087
0
0
0
0
0
0
0
0
0
0.020833
0.469613
181
14
24
12.928571
0.697917
0
0
0
0
0
0.093923
0
0
0
0
0
0
1
0.1
false
0
0
0
0.2
0.1
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c24fdcfaa37586667c8318eb6776d1204e6b7822
6,043
py
Python
vendor/packages/nose/functional_tests/test_importer.py
jgmize/kitsune
8f23727a9c7fcdd05afc86886f0134fb08d9a2f0
[ "BSD-3-Clause" ]
2
2019-08-19T17:08:47.000Z
2019-10-05T11:37:02.000Z
vendor/packages/nose/functional_tests/test_importer.py
jgmize/kitsune
8f23727a9c7fcdd05afc86886f0134fb08d9a2f0
[ "BSD-3-Clause" ]
null
null
null
vendor/packages/nose/functional_tests/test_importer.py
jgmize/kitsune
8f23727a9c7fcdd05afc86886f0134fb08d9a2f0
[ "BSD-3-Clause" ]
1
2019-11-02T23:29:13.000Z
2019-11-02T23:29:13.000Z
import os import sys import unittest from nose.importer import Importer class TestImporter(unittest.TestCase): def setUp(self): self.dir = os.path.normpath(os.path.join(os.path.dirname(__file__), 'support')) self.imp = Importer() self._mods = sys.modules.copy() self._path = sys.path[:] sys.modules.pop('mod', None) sys.modules.pop('pak', None) sys.modules.pop('pak.mod', None) sys.modules.pop('pak.sub', None) def tearDown(self): to_del = [ m for m in sys.modules.keys() if m not in self._mods ] if to_del: for mod in to_del: del sys.modules[mod] sys.modules.update(self._mods) sys.path = self._path[:] def test_import_from_dir(self): imp = self.imp d1 = os.path.join(self.dir, 'dir1') d2 = os.path.join(self.dir, 'dir2') # simple name m1 = imp.importFromDir(d1, 'mod') m2 = imp.importFromDir(d2, 'mod') self.assertNotEqual(m1, m2) self.assertNotEqual(m1.__file__, m2.__file__) # dotted name p1 = imp.importFromDir(d1, 'pak.mod') p2 = imp.importFromDir(d2, 'pak.mod') self.assertNotEqual(p1, p2) self.assertNotEqual(p1.__file__, p2.__file__) def test_import_from_path(self): imp = self.imp jn = os.path.join d1 = jn(self.dir, 'dir1') d2 = jn(self.dir, 'dir2') # simple name m1 = imp.importFromPath(jn(d1, 'mod.py'), 'mod') m2 = imp.importFromPath(jn(d2, 'mod.py'), 'mod') self.assertNotEqual(m1, m2) self.assertNotEqual(m1.__file__, m2.__file__) # dotted name p1 = imp.importFromPath(jn(d1, 'pak', 'mod.py'), 'pak.mod') p2 = imp.importFromPath(jn(d2, 'pak', 'mod.py'), 'pak.mod') self.assertNotEqual(p1, p2) self.assertNotEqual(p1.__file__, p2.__file__) # simple name -- package sp1 = imp.importFromPath(jn(d1, 'pak'), 'pak') sp2 = imp.importFromPath(jn(d2, 'pak'), 'pak') self.assertNotEqual(sp1, sp2) assert sp1.__path__ assert sp2.__path__ self.assertNotEqual(sp1.__path__, sp2.__path__) # dotted name -- package dp1 = imp.importFromPath(jn(d1, 'pak', 'sub'), 'pak.sub') dp2 = imp.importFromPath(jn(d2, 'pak', 'sub'), 'pak.sub') self.assertNotEqual(dp1, dp2) assert dp1.__path__ assert dp2.__path__ self.assertNotEqual(dp1.__path__, dp2.__path__) def test_import_sets_intermediate_modules(self): imp = self.imp path = os.path.join(self.dir, 'package2', 'test_pak', 'test_sub', 'test_mod.py') mod = imp.importFromPath(path, 'test_pak.test_sub.test_mod') print mod, dir(mod) assert 'test_pak' in sys.modules, 'test_pak was not imported?' test_pak = sys.modules['test_pak'] assert hasattr(test_pak, 'test_sub'), "test_pak.test_sub was not set" def test_cached_no_reload(self): imp = self.imp d1 = os.path.join(self.dir, 'dir1') m1 = imp.importFromDir(d1, 'mod') m2 = imp.importFromDir(d1, 'mod') assert m1 is m2, "%s is not %s" % (m1, m2) def test_cached_no_reload_dotted(self): imp = self.imp d1 = os.path.join(self.dir, 'dir1') p1 = imp.importFromDir(d1, 'pak.mod') p2 = imp.importFromDir(d1, 'pak.mod') assert p1 is p2, "%s is not %s" % (p1, p2) def test_import_sets_sys_modules(self): imp = self.imp d1 = os.path.join(self.dir, 'dir1') p1 = imp.importFromDir(d1, 'pak.mod') assert sys.modules['pak.mod'] is p1, "pak.mod not in sys.modules" assert sys.modules['pak'], "pak not in sys.modules" assert sys.modules['pak'].mod is p1, \ "sys.modules['pak'].mod is not the module we loaded" def test_failed_import_raises_import_error(self): imp = self.imp def bad_import(): imp.importFromPath(self.dir, 'no.such.module') self.assertRaises(ImportError, bad_import) def test_sys_modules_same_path_no_reload(self): imp = self.imp d1 = os.path.join(self.dir, 'dir1') d2 = os.path.join(self.dir, 'dir2') sys.path.insert(0, d1) mod_sys_imported = __import__('mod') mod_nose_imported = imp.importFromDir(d1, 'mod') assert mod_nose_imported is mod_sys_imported, \ "nose reimported a module in sys.modules from the same path" mod_nose_imported2 = imp.importFromDir(d2, 'mod') assert mod_nose_imported2 != mod_sys_imported, \ "nose failed to reimport same name, different dir" def test_import_pkg_from_path_fpw(self): imp = self.imp imp.config.firstPackageWins = True jn = os.path.join d1 = jn(self.dir, 'dir1') d2 = jn(self.dir, 'dir2') # dotted name p1 = imp.importFromPath(jn(d1, 'pak', 'mod.py'), 'pak.mod') p2 = imp.importFromPath(jn(d2, 'pak', 'mod.py'), 'pak.mod') self.assertEqual(p1, p2) self.assertEqual(p1.__file__, p2.__file__) # simple name -- package sp1 = imp.importFromPath(jn(d1, 'pak'), 'pak') sp2 = imp.importFromPath(jn(d2, 'pak'), 'pak') self.assertEqual(sp1, sp2) assert sp1.__path__ assert sp2.__path__ self.assertEqual(sp1.__path__, sp2.__path__) # dotted name -- package dp1 = imp.importFromPath(jn(d1, 'pak', 'sub'), 'pak.sub') dp2 = imp.importFromPath(jn(d2, 'pak', 'sub'), 'pak.sub') self.assertEqual(dp1, dp2) assert dp1.__path__ assert dp2.__path__ self.assertEqual(dp1.__path__, dp2.__path__) if __name__ == '__main__': import logging logging.basicConfig(level=logging.DEBUG) unittest.main()
35.757396
78
0.580837
784
6,043
4.228316
0.133929
0.040121
0.080241
0.038009
0.561689
0.509502
0.478733
0.461237
0.415988
0.355354
0
0.029903
0.286116
6,043
168
79
35.970238
0.738526
0.027635
0
0.416667
0
0
0.117286
0.008183
0
0
0
0
0.272727
0
null
null
0
0.363636
null
null
0.007576
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
c2531eebc4b5c56768575d213a86688eb0c965b8
161
py
Python
rhg_compute_tools/__init__.py
dpa9694/rhg_compute_tools
f111c380e3672983fa62795346be631e62c12611
[ "MIT" ]
null
null
null
rhg_compute_tools/__init__.py
dpa9694/rhg_compute_tools
f111c380e3672983fa62795346be631e62c12611
[ "MIT" ]
2
2020-05-31T20:40:25.000Z
2020-07-15T16:51:55.000Z
rhg_compute_tools/__init__.py
dpa9694/rhg_compute_tools
f111c380e3672983fa62795346be631e62c12611
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Top-level package for RHG Compute Tools.""" __author__ = """Michael Delgado""" __email__ = 'mdelgado@rhg.com' __version__ = '0.2.1'
20.125
46
0.645963
21
161
4.380952
0.952381
0
0
0
0
0
0
0
0
0
0
0.028986
0.142857
161
7
47
23
0.637681
0.391304
0
0
0
0
0.391304
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c2544d4b8163352d260ea54398086333ae611bb9
271
py
Python
apps/core/models/colabore.py
bispojr/observatorio-ufj-covid19
8667fae1367b95a7dfa8558fbac3b1b0b708af8d
[ "MIT" ]
3
2020-04-02T21:59:19.000Z
2020-12-03T12:37:26.000Z
apps/core/models/colabore.py
bispojr/observatorio-ufj-covid19
8667fae1367b95a7dfa8558fbac3b1b0b708af8d
[ "MIT" ]
68
2020-03-28T22:40:08.000Z
2020-07-08T18:04:07.000Z
apps/core/models/colabore.py
bispojr/observatorio-ufj-covid19
8667fae1367b95a7dfa8558fbac3b1b0b708af8d
[ "MIT" ]
5
2020-03-28T21:35:30.000Z
2020-06-10T01:28:14.000Z
class Colabore(): def getContext(self): return self.__contextColabore(self) def __contextColabore(self): context = { "grupo": "geral", "grupo_link": "saiba_mais", "titulo": "Observatório UFJ Covid-19 - Colabore" } return context
20.846154
50
0.630996
27
271
6.111111
0.666667
0.230303
0
0
0
0
0
0
0
0
0
0.009756
0.243542
271
13
51
20.846154
0.795122
0
0
0
0
0
0.265683
0
0
0
0
0
0
1
0.2
false
0
0
0.1
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c25f9adb65d5dc5bc38b8b33443e4276d20956b1
685
py
Python
ATM/atm_function.py
nouranHnouh/FormusWorkshop-
7f69b9d2226209adcc6ecb208ac426eec7e86d2b
[ "MIT" ]
null
null
null
ATM/atm_function.py
nouranHnouh/FormusWorkshop-
7f69b9d2226209adcc6ecb208ac426eec7e86d2b
[ "MIT" ]
null
null
null
ATM/atm_function.py
nouranHnouh/FormusWorkshop-
7f69b9d2226209adcc6ecb208ac426eec7e86d2b
[ "MIT" ]
null
null
null
#this program is atm that withdraw any money amount #allowed papers: 100,50,10,5, and the rest of requests def withdraw(balance,request): if request>balance: print "can`t give you all this money" while request>0: if request>=100: request-=100 print "give 100" elif request>=50: request-=50 print "give 50" elif request>=10: request-=10 print "give 10" elif request>=5: request-=5 print "give 5" elif request<5: request-=3 print "give 2" return balance-request balance = 500 balance = withdraw(balance, 277) balance = withdraw(balance, 30) balance = withdraw(balance, 5) balance = withdraw(balance, 500)
13.7
55
0.659854
99
685
4.565657
0.393939
0.165929
0.19469
0.084071
0
0
0
0
0
0
0
0.091954
0.237956
685
49
56
13.979592
0.773946
0.151825
0
0
0
0
0.109375
0
0
0
0
0
0
0
null
null
0
0
null
null
0.24
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
c264fe174bb79ece1406b41e4cb858d0735178ff
1,140
py
Python
plugins/cuckoo/komand_cuckoo/actions/vpn_status/schema.py
lukaszlaszuk/insightconnect-plugins
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
[ "MIT" ]
46
2019-06-05T20:47:58.000Z
2022-03-29T10:18:01.000Z
plugins/cuckoo/komand_cuckoo/actions/vpn_status/schema.py
lukaszlaszuk/insightconnect-plugins
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
[ "MIT" ]
386
2019-06-07T20:20:39.000Z
2022-03-30T17:35:01.000Z
plugins/cuckoo/komand_cuckoo/actions/vpn_status/schema.py
lukaszlaszuk/insightconnect-plugins
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
[ "MIT" ]
43
2019-07-09T14:13:58.000Z
2022-03-28T12:04:46.000Z
# GENERATED BY KOMAND SDK - DO NOT EDIT import komand import json class Component: DESCRIPTION = "Returns VPN status" class Input: pass class Output: VPNS = "vpns" class VpnStatusInput(komand.Input): schema = json.loads(""" {} """) def __init__(self): super(self.__class__, self).__init__(self.schema) class VpnStatusOutput(komand.Output): schema = json.loads(""" { "type": "object", "title": "Variables", "properties": { "vpns": { "type": "array", "title": "VPN Statuses", "description": "VPN status array", "items": { "$ref": "#/definitions/vpn" }, "order": 1 } }, "required": [ "vpns" ], "definitions": { "vpn": { "type": "object", "title": "vpn", "properties": { "name": { "type": "string", "title": "Name", "order": 1 }, "status": { "type": "string", "title": "Status", "order": 2 } } } } } """) def __init__(self): super(self.__class__, self).__init__(self.schema)
16.764706
57
0.490351
102
1,140
5.245098
0.421569
0.059813
0.056075
0.059813
0.160748
0.160748
0.160748
0.160748
0.160748
0.160748
0
0.003953
0.334211
1,140
67
58
17.014925
0.700922
0.032456
0
0.290909
1
0
0.607629
0
0
0
0
0
0
1
0.036364
false
0.018182
0.036364
0
0.236364
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c268a7da87d9bb0526fdc4df929e276c63494567
8,095
py
Python
HW_new_contact.py
AnastasiiaSarkisova/Home-Work-1
9021e9323a08f9dd96323b763aa0b0e689df24f8
[ "Apache-2.0" ]
null
null
null
HW_new_contact.py
AnastasiiaSarkisova/Home-Work-1
9021e9323a08f9dd96323b763aa0b0e689df24f8
[ "Apache-2.0" ]
null
null
null
HW_new_contact.py
AnastasiiaSarkisova/Home-Work-1
9021e9323a08f9dd96323b763aa0b0e689df24f8
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from selenium.webdriver.firefox.webdriver import WebDriver import unittest from group import Group from contact import Contact def is_alert_present(wd): try: wd.switch_to_alert().text return True except: return False class HW_new_contact(unittest.TestCase): def setUp(self): self.wd = WebDriver(capabilities={"marionette": False}) self.wd.implicitly_wait(60) def test_HW_new_contact(self): wd = self.wd self.open_webpage(wd) self.login(wd, username="admin", password="secret") wd.find_element_by_xpath("//form[@id='LoginForm']/input[3]").click() self.link_to_group_page(wd) self.create_a_new_group(wd, Group(name="group.Chupakabra", header="group.Chupakabra Header", footer="group.Chupakabra footer")) self.group_verification(wd) self.new_contac_page(wd) self.contact_information(wd, Contact(firstname="Abra", middlename="Kadabra", lastname="Chupakabra", nickname="Chupakabra", title="My Title", company="Home", address="557 Mayfair Walk Ave, Las Vegas NV 89173", home="7023542185", mobile="6504655622", work="7023336669", fax="1234567890", email="test@test.com", email2="test@gmail.com", email3="test.mail.ru", homepage="yahoo.com", address2="123 new address test", phone2="123 home address", notes="my notes bla bla bla")) self.submit_cotact_information(wd) wd.find_element_by_link_text("Logout").click() def submit_cotact_information(self, wd): wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click() def contact_information(self, wd, contact): wd.find_element_by_name("firstname").click() wd.find_element_by_name("firstname").clear() wd.find_element_by_name("firstname").send_keys(contact.firstname) wd.find_element_by_name("middlename").click() wd.find_element_by_name("middlename").clear() wd.find_element_by_name("middlename").send_keys(contact.middlename) wd.find_element_by_name("lastname").click() wd.find_element_by_name("lastname").clear() wd.find_element_by_name("lastname").send_keys(contact.lastname) wd.find_element_by_name("nickname").click() wd.find_element_by_name("nickname").clear() wd.find_element_by_name("nickname").send_keys(contact.nickname) wd.find_element_by_name("title").click() wd.find_element_by_name("title").clear() wd.find_element_by_name("title").send_keys(contact.title) wd.find_element_by_name("company").click() wd.find_element_by_name("company").clear() wd.find_element_by_name("company").send_keys(contact.company) wd.find_element_by_name("address").click() wd.find_element_by_name("address").clear() wd.find_element_by_name("address").send_keys(contact.address) wd.find_element_by_name("home").click() wd.find_element_by_name("home").clear() wd.find_element_by_name("home").send_keys(contact.home) wd.find_element_by_name("mobile").click() wd.find_element_by_name("mobile").clear() wd.find_element_by_name("mobile").send_keys(contact.mobile) wd.find_element_by_name("work").click() wd.find_element_by_name("work").clear() wd.find_element_by_name("work").send_keys(contact.work) wd.find_element_by_name("fax").click() wd.find_element_by_name("fax").clear() wd.find_element_by_name("fax").send_keys(contact.fax) wd.find_element_by_name("email").click() wd.find_element_by_name("email").clear() wd.find_element_by_name("email").send_keys(contact.email) wd.find_element_by_name("email2").click() wd.find_element_by_name("email2").clear() wd.find_element_by_name("email2").send_keys(contact.email2) wd.find_element_by_name("email2").click() wd.find_element_by_name("email2").clear() wd.find_element_by_name("email2").send_keys(contact.email2) wd.find_element_by_name("email3").click() wd.find_element_by_name("email3").clear() wd.find_element_by_name("email3").send_keys(contact.email3) wd.find_element_by_name("homepage").click() wd.find_element_by_name("homepage").clear() wd.find_element_by_name("homepage").send_keys(contact.homepage) if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[17]").is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[17]").click() if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[12]").is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[12]").click() wd.find_element_by_name("byear").click() wd.find_element_by_name("byear").clear() wd.find_element_by_name("byear").send_keys("1986") if not wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[7]").is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[7]").click() if not wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[3]").is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[3]").click() wd.find_element_by_name("ayear").click() wd.find_element_by_name("ayear").clear() wd.find_element_by_name("ayear").send_keys("2018") if not wd.find_element_by_xpath("//div[@id='content']/form/select[5]//option[2]").is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[5]//option[2]").click() wd.find_element_by_name("address2").click() wd.find_element_by_name("address2").clear() wd.find_element_by_name("address2").send_keys(contact.address2) wd.find_element_by_name("phone2").click() wd.find_element_by_name("phone2").clear() wd.find_element_by_name("phone2").send_keys(contact.phone2) wd.find_element_by_name("notes").click() wd.find_element_by_name("notes").clear() wd.find_element_by_name("notes").send_keys(contact.notes) def new_contac_page(self, wd): wd.find_element_by_link_text("add new").click() def group_verification(self, wd): wd.find_element_by_link_text("group page").click() def create_a_new_group(self, wd, group): wd.find_element_by_name("new").click() wd.find_element_by_name("group_name").click() wd.find_element_by_name("group_name").clear() wd.find_element_by_name("group_name").send_keys(group.name) wd.find_element_by_name("group_header").click() wd.find_element_by_name("group_header").clear() wd.find_element_by_name("group_header").send_keys(group.header) wd.find_element_by_name("group_footer").click() wd.find_element_by_name("group_footer").clear() wd.find_element_by_name("group_footer").send_keys(group.footer) wd.find_element_by_name("submit").click() def link_to_group_page(self, wd): wd.find_element_by_link_text("groups").click() def login(self, wd, username="admin", password="secret"): wd.find_element_by_name("user").click() wd.find_element_by_name("user").clear() wd.find_element_by_name("user").send_keys(username) wd.find_element_by_name("pass").click() wd.find_element_by_name("pass").clear() wd.find_element_by_name("pass").send_keys(password) def open_webpage(self, wd): wd.get("http://localhost/addressbook/addressbook/edit.php") def tearDown(self): self.wd.quit() if __name__ == '__main__': unittest.main()
51.891026
112
0.652625
1,104
8,095
4.444746
0.128623
0.117383
0.254331
0.293458
0.648258
0.639495
0.314449
0.225189
0.205828
0.167108
0
0.017813
0.195553
8,095
155
113
52.225806
0.735719
0.002594
0
0.042553
0
0
0.182607
0.065783
0
0
0
0
0
1
0.085106
false
0.035461
0.028369
0
0.134752
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c276131b5adb85398eba0cb67c7e33579e3497af
2,376
py
Python
src/models/SLEuth_model.py
NCBI-Codeathons/SLEuth
c74c05bbc07ce4c813ee46ab66cfb6487fdc6966
[ "MIT" ]
2
2019-11-07T22:24:49.000Z
2019-11-09T02:42:59.000Z
src/models/SLEuth_model.py
NCBI-Codeathons/STRATIFICATION-OF-SLE-PATIENT-COHORT-FOR-PRECISION-MEDICINE
c74c05bbc07ce4c813ee46ab66cfb6487fdc6966
[ "MIT" ]
null
null
null
src/models/SLEuth_model.py
NCBI-Codeathons/STRATIFICATION-OF-SLE-PATIENT-COHORT-FOR-PRECISION-MEDICINE
c74c05bbc07ce4c813ee46ab66cfb6487fdc6966
[ "MIT" ]
1
2020-06-06T18:47:21.000Z
2020-06-06T18:47:21.000Z
import sklearn from sklearn.cluster import KMeans from src.features.feature_selection import PCA_Variants2Gene_FeatureSelection class SLEuth(sklearn.base.TransformerMixin, sklearn.base.ClusterMixin): def __init__(self, cluster_num, variants_genes_path="../../data/interim/variants_top56_genes.csv", variance_threshold=0.9, init='random', n_init=10, max_iter=300,tol=1e-4,random_state=40): """ Run fit_transform() to compute top_k PCA components first. Run transform() if need to get projections from SNP data. Then, run fit_predict() to run KMeans clustering on the PCA transformed data. To predict a SNP profile which cluster they belong to, run predict() :param cluster_num: The number of clusters to form as well as the number of centroids to generate :param variants_genes_path: Path to the csv file that provides two column, ["Gene(s)", "Variant ID"] that the user would like to :param variance_threshold: e.g. 0.9 for 90% variance. The variance threshold to select the minimum number of PCA components such that a % of the variance remains. :param init: 'random': choose k observations (rows) at random from data for the initial centroids. :param n_init: Number of time the k-means algorithm will be run with different centroid seeds :param max_iter: :param tol: :param random_state: """ self.pca_variants_fs = PCA_Variants2Gene_FeatureSelection(variants_genes_path, variance_threshold) self.km = KMeans( n_clusters=cluster_num, init=init, n_init=n_init, max_iter=max_iter, tol=tol, random_state=random_state ) def fit_transform(self, X, y=None, **fit_params): """ :param X: A Pandas DataFrame where row indices are patient samples and columns are SNP sites with :return: """ return self.pca_variants_fs.fit_transform(X, y, **fit_params) def transform(self, X, y=None): return self.pca_variants_fs.transform(X, y) def fit_predict(self, X, y=None): X_transformed = self.transform(X) return self.km.fit_predict(X_transformed, y) def predict(self, X): X_transformed = self.transform(X) return self.km.predict(X_transformed)
44.830189
147
0.676347
332
2,376
4.674699
0.388554
0.006443
0.032861
0.032861
0.103093
0.048969
0.048969
0.048969
0
0
0
0.010585
0.244529
2,376
53
148
44.830189
0.854039
0.431397
0
0.090909
0
0
0.041631
0.036534
0
0
0
0
0
1
0.227273
false
0
0.136364
0.045455
0.590909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
c28e4edca7ae401126ff0a6fbb8cb56b375f7be5
195
py
Python
Regular Expression/Introduction/Matching Whitespace % Non-whitespace Characters/matching_whitespace_and_non-whitespace_characters.py
brianchiang-tw/HackerRank
02a30a0033b881206fa15b8d6b4ef99b2dc420c8
[ "MIT" ]
2
2020-05-28T07:15:00.000Z
2020-07-21T08:34:06.000Z
Regular Expression/Introduction/Matching Whitespace % Non-whitespace Characters/matching_whitespace_and_non-whitespace_characters.py
brianchiang-tw/HackerRank
02a30a0033b881206fa15b8d6b4ef99b2dc420c8
[ "MIT" ]
null
null
null
Regular Expression/Introduction/Matching Whitespace % Non-whitespace Characters/matching_whitespace_and_non-whitespace_characters.py
brianchiang-tw/HackerRank
02a30a0033b881206fa15b8d6b4ef99b2dc420c8
[ "MIT" ]
null
null
null
# Method_#1 #Regex_Pattern = r"\S\S\s\S\S\s\S\S" # Do not delete 'r'. # Method_#2 Regex_Pattern = r"(\S\S\s){2}(\S\S){1}" import re print(str(bool(re.search(Regex_Pattern, input()))).lower())
19.5
59
0.625641
40
195
2.925
0.45
0.17094
0.179487
0.17094
0.316239
0.316239
0.068376
0
0
0
0
0.023121
0.112821
195
10
59
19.5
0.653179
0.369231
0
0
0
0
0.172414
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.333333
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
c28fad06e8a818ac6ddee1694bd9cf847cd0da24
205
py
Python
Semester 5 (PIP)/assignment1/prog4.py
MartyMiniac/ITER-Assignment
a7b355f40cc52a337ad90bb8328e54c4a9534530
[ "MIT" ]
14
2020-11-11T08:48:58.000Z
2022-02-26T03:59:05.000Z
Semester 5 (PIP)/assignment1/prog4.py
SKSTCODE42/ITER-Assignment
a7b355f40cc52a337ad90bb8328e54c4a9534530
[ "MIT" ]
4
2020-11-12T13:31:14.000Z
2021-06-21T05:41:34.000Z
Semester 5 (PIP)/assignment1/prog4.py
SKSTCODE42/ITER-Assignment
a7b355f40cc52a337ad90bb8328e54c4a9534530
[ "MIT" ]
10
2020-11-07T15:09:20.000Z
2022-02-26T03:56:50.000Z
regno='1941012661' year=2019 # print('My Regd. No is %s and I have taken admission in B. Tech. In %d.' %(regno, year)) print('My Regd. No is', regno,'and I have taken admission in B. Tech. In', year,'.' )
41
89
0.658537
38
205
3.552632
0.5
0.103704
0.162963
0.192593
0.681481
0.459259
0.459259
0.459259
0.459259
0
0
0.082353
0.170732
205
5
90
41
0.711765
0.42439
0
0
0
0
0.564103
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c29405121608c5b0e9800d088104121ec6141017
7,242
py
Python
src/python/weblyzard_api/client/jesaja_ng.py
PhilippKuntschik/weblyzard_api
415df7d2c3e625e96636ad0ab91f3ba669db64ea
[ "Apache-2.0" ]
null
null
null
src/python/weblyzard_api/client/jesaja_ng.py
PhilippKuntschik/weblyzard_api
415df7d2c3e625e96636ad0ab91f3ba669db64ea
[ "Apache-2.0" ]
null
null
null
src/python/weblyzard_api/client/jesaja_ng.py
PhilippKuntschik/weblyzard_api
415df7d2c3e625e96636ad0ab91f3ba669db64ea
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- ''' .. codeauthor: Albert Weichselbraun <albert.weichselbraun@htwchur.ch> .. codeauthor:: Heinz-Peter Lang <lang@weblyzard.com> ''' from __future__ import print_function from __future__ import unicode_literals from eWRT.ws.rest import MultiRESTClient from weblyzard_api.client import ( WEBLYZARD_API_URL, WEBLYZARD_API_USER, WEBLYZARD_API_PASS) class JesajaNg(MultiRESTClient): ''' Provides access to the Jesaja keyword service which extracts associations (i.e. keywords) from text documents. ''' URL_PATH = 'rest' def __init__(self, url=WEBLYZARD_API_URL, usr=WEBLYZARD_API_USER, pwd=WEBLYZARD_API_PASS, default_timeout=None, use_random_server=True): ''' :param url: URL of the jeremia web service :param usr: optional user name :param pwd: optional password ''' MultiRESTClient.__init__(self, service_urls=url, user=usr, password=pwd, default_timeout=default_timeout, use_random_server=use_random_server) def set_keyword_profile(self, profile_name, keyword_calculation_profile): ''' Add a keyword profile to the server :param profile_name: the name of the keyword profile :param keyword_calculation_profile: the full keyword calculation \ profile (see below). .. note:: Example keyword calculation profile :: { 'valid_pos_tags' : ['NN', 'P', 'ADJ'], 'required_pos_tags' : [], 'corpus_name' : reference_corpus_name, 'min_phrase_significance' : 2.0, 'num_keywords' : 5, 'skip_underrepresented_keywords' : True, 'keyword_algorithm' : 'com.weblyzard.backend.jesaja.algorithm.keywords.YatesKeywordSignificanceAlgorithm', 'min_token_count' : 5, 'min_ngram_length' : 1, 'max_ngram_length' : 3, 'stoplists' : [], 'groundAnnotations' : False, } .. note:: ``Available keyword_algorithms`` * ``com.weblyzard.backend.jesaja.algorithm.keywords.YatesKeywordSignificanceAlgorithm`` * ``com.weblyzard.backend.jesaja.algorithm.keywords.LogLikelihoodKeywordSignificanceAlgorithm`` ''' return self.request('set_keyword_profile/{}'.format(profile_name), keyword_calculation_profile) def add_csv(self, matview_id, keyword_count_map): ''' Adds reference documents for Jesaja. :param matview_id: matview_id for which the documents are relevant :param keyword_count_map: a map of keywords and the corresponding counts {'the': 222, 'a': 200, ...} ''' if matview_id is None: raise ValueError('Please specify the matview for which the documents are designated.') return self.request('add_csv/{}'.format(matview_id), keyword_count_map) def add_documents(self, matview_id, xml_documents): ''' Adds reference documents for Jesaja. :param matview_id: matview_id for which the documents are relevant :param xml_documents: a list of weblyzard_xml documents [ xml_content, ... ] ''' if matview_id is None: raise ValueError('Please specify the matview for which the documents are designated.') return self.request('add_documents/{}'.format(matview_id), xml_documents) def get_keyword_annotations(self, matview_id, xml_documents): ''' :param matview_id: the matview id for which the keywords are computed :param xml_documents: a list of weblyzard_xml documents [ xml_content, ... ] ''' if not self.has_matview(matview_id): raise Exception( 'Cannot compute keywords - unknown matview {}'.format(matview_id)) return self.request('get_nek_annotations/{}'.format(matview_id), xml_documents) def get_keywords(self, matview_id, xml_documents): ''' :param matview_id: the matview id for which the keywords are computed :param xml_documents: a list of weblyzard_xml documents [ xml_content, ... ] ''' if not self.has_matview(matview_id): raise Exception( 'Cannot compute keywords - unknown matview {}'.format(matview_id)) return self.request('get_keywords/{}'.format(matview_id), xml_documents) def has_matview(self, matview_id): return matview_id in self.list_matviews() def has_corpus(self, matview_id): available_completed_shards = self.request( 'list_shards/complete/{}'.format(matview_id)) return len(available_completed_shards[matview_id]) > 0 def remove_matview_profile(self, matview_id): if not self.has_matview(matview_id): print('No profile {} found'.format(matview_id)) return return self.request('remove_profile/{}/{}'.format(matview_id, matview_id), return_plain=True) def get_corpus_size(self, matview_id): available_completed_shards = self.request( 'list_shards/complete/{}'.format(matview_id)) total = 0 for shard in available_completed_shards[matview_id]: total = total + shard['wordCount'] return total def list_profiles(self): return self.request('list_profiles') def list_matviews(self): return self.request('list_matview_profiles') def get_cache_stats(self): return self.request('get_cache_stats', return_plain=True) def get_cached_corpora(self): return self.request('get_cached_corpora') def set_stoplist(self, name, stoplist): ''' :param name: name of the stopword list :param stoplist: a list of stopwords for the keyword computation ''' return self.request('set_stoplist/{}'.format(name), stoplist) def set_matview_profile(self, matview_id, profile_name): ''' Determines which profile to use for the given matview ''' return self.request('set_matview_profile/{}/{}'.format(matview_id, profile_name)) def list_stoplists(self): ''' :returns: a list of all available stopword lists. ''' return self.request('list_stoplists') def rotate_shard(self, matview_id=None): ''' :param matview_id: an optional matview_id of the shard to be rotated .. note:: All shards are automatically rotated every 24 hourse. Call this method to speed up the availablilty of a shart ''' if not matview_id: return self.request('rotate_shard') else: return self.request('rotate_shard/{}'.format(matview_id))
38.727273
140
0.609362
788
7,242
5.352792
0.244924
0.089616
0.060455
0.029872
0.44808
0.336652
0.319583
0.261735
0.261735
0.261735
0
0.003358
0.300884
7,242
186
141
38.935484
0.829745
0.375449
0
0.208333
0
0
0.139529
0.034439
0
0
0
0
0
1
0.25
false
0.041667
0.055556
0.069444
0.583333
0.027778
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
c29433ef9d2c9ff9b5cb2a9ea24e62b20f20373a
395
py
Python
app/config/__init__.py
mstroud/pykj
2931892b19a697b20c9522dd017e135a1ad396b8
[ "MIT" ]
null
null
null
app/config/__init__.py
mstroud/pykj
2931892b19a697b20c9522dd017e135a1ad396b8
[ "MIT" ]
null
null
null
app/config/__init__.py
mstroud/pykj
2931892b19a697b20c9522dd017e135a1ad396b8
[ "MIT" ]
null
null
null
class Config(object): DEBUG = True DEVELOPMENT = True SECRET_KEY = 'do-i-really-need-this' SQLALCHEMY_DATABASE_URI = 'sqlite:///testing.db' SQLALCHEMY_TRACK_MODIFICATIONS = False KARAOKE_MEDIA_ROOT = "/path/to/app/static/media" class ProductionConfig(Config): DEVELOPMENT = False DEBUG = False SQLALCHEMY_DATABASE_URI = 'sqlite:///production.db'
30.384615
56
0.691139
45
395
5.866667
0.688889
0.136364
0.159091
0.204545
0
0
0
0
0
0
0
0
0.202532
395
12
57
32.916667
0.838095
0
0
0
0
0
0.232376
0.180157
0
0
0
0
0
1
0
false
0
0
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
c298ed848bd524b876312d8bab6fc24cd9b74131
28,847
py
Python
python-obj-system.py
AlexPl292/python-obj-system
7d0e743e4785989f54094ea0bac05e3681c308c2
[ "MIT" ]
54
2021-12-06T10:44:17.000Z
2022-02-16T23:35:16.000Z
python-obj-system.py
AlexPl292/python-obj-system
7d0e743e4785989f54094ea0bac05e3681c308c2
[ "MIT" ]
1
2022-01-22T07:09:16.000Z
2022-01-22T07:09:16.000Z
python-obj-system.py
AlexPl292/python-obj-system
7d0e743e4785989f54094ea0bac05e3681c308c2
[ "MIT" ]
4
2021-12-22T08:07:56.000Z
2022-03-10T01:51:40.000Z
#!/usr/bin/env python3 from mdpyformat import * import pprintex header_md("""Python object primer for Python3 / meta classes""" ) header_md("""Introduction""", nesting = 2) print_md(""" Python is good at creating the illusion of being a simple programming language. Sometimes this illusion fails, like when you have to deal with the import/module system [my attempts to get it](https://github.com/MoserMichael/pythonimportplayground). Another area of complexity is the object system, last week I tried to understand [python enums](https://docs.python.org/3/library/enum.html), it turns that they are built on top of [meta classes](https://github.com/python/cpython/blob/2c56c97f015a7ea81719615ddcf3c745fba5b4f3/Lib/enum.py#L511), So now I have come to realize, that I really don't know much about python and its object system. The purpose of this text is to figure out, how the python object system ticks. """) header_md("""The Python object system""", nesting=2) header_md("""How objects are represented""", nesting=3) print_md(""" Lets look at a simple python class Foo with a single base class Base, and see how objects are created and represented in memory """) eval_and_quote(""" # The base class. All Python3 classes have the base class of type object. # The long form is therefore # class Base(object): # However Pylint will tell you, that this long form is redundant class Base: # Class variables are shared between all instances of the class Base, and declared like this: base_class_var = "Base" # The object constructor/init method, Note the first 'self' argument, which refers to the object instance. def __init__(self): print("calling Base.__init__") # Object variables are specific to a given instance of Base # Each object has a builtin hash member: __dict__ this one lists all object members (including those added by the base class __init__ method) self.obj_var_base = 10 # An object method - needs to access the object instance, which is passed as first 'self' argument. def show_base(self): print_md("obj_var_base: ", self.obj_var_base) # A class method/static method is called without an object instance. @staticmethod def make_base(): return Base() # class Foo with a base class Base class Foo(Base): # Class variables are shared between all instances of the class Foo, and declared like this: class_var = 42 class_var2 = 43 # The object constructor/init method, Note the first 'self' argument, which is the object instance. def __init__(self): # When not calling the base class __init__ method: the base class object variables are not added to the object !!! # The base class __init__ adds the 'obj_var_base' member to the __dict__ member of this object instance. # By convention: you first init the base classes, before initialising the derived class. super().__init__() print("calling Foo.__init__") # Object variables are specific to a given instance of Foo # Each object has a builtin hash member: __dict__ this one lists all object members (including those added by the base class __init__ method) # Define object variable: obj_var_a self.obj_var_a=42 # Define object variable: obj_var_b self.obj_var_b="name" # An object method - needs to access the object instance, which is passed as first 'self' argument. def show_derived(self): print_md("obj_var_a:", self.obj_var_a, "obj_var_b:", self.obj_var_b) # A class method/static method is called without an object instance. @staticmethod def make_foo(): return Foo() # Make a new object instance of type Foo class. foo_obj=Foo() """) print_md("The memory address of object foo_obj is returned by the [id built-in](https://docs.python.org/3/library/functions.html#id)") eval_and_quote('print("id(foo_obj) : ", id(foo_obj))') print_md("If two variables have the same object id value, then they both refer to the very same object/instance!") print_md(""" Each user defined object has a __dict__ attribute, this is a dictionary that lists all the object instance variables. This also includes instance members that were added by the __init__ method of the base class !! """) eval_and_quote("""print("foo_obj.__dict__ : ", foo_obj.__dict__)""") print_md(""" So you see that the following is exactly the same thing: """) eval_and_quote("""assert id(foo_obj.obj_var_a) == id( foo_obj.__dict__['obj_var_a'] ) """) print_md(""" Wait, but where does the __dict__ attribute come from? The [built-in getattr](https://docs.python.org/3/library/functions.html#getattr) function can return this built-in __dict__ attribute! Interesting: the python notation object.member_name can mean different things: 1) for built-in attributes it means a call to getattr 2) for object instances (assigned in the __init__ method of the class) it means a call to retrieve the __dict__ attribute, and then a lookup of the variable name in that dictionary. """) print_md( """foo_obj.__dict__ and getattr(foo_obj,'__dict__',None) is the same thing! """) eval_and_quote("""assert id(foo_obj.__dict__) == id( getattr(foo_obj,'__dict__',None) )""") print_md(""" The getattr builtin function has a good part, its return value can be checked for None. This can be used, in order to check if the argument is an object with a __dict__ attribute. """) eval_and_quote("""base_obj = object()""") print_md("An object of built-in type ", type(base_obj), " doesn't have a __dict__ member") eval_and_quote("""assert getattr(base_obj, '__dict__', None) is None""") eval_and_quote("""int_obj = 42""") print_md("An object of built-in type ", type(int_obj), " doesn't have a __dict__ member") eval_and_quote("""assert getattr(int_obj, '__dict__', None) is None""") print_md(""" The [dir builtin](https://docs.python.org/3/library/functions.html#dir) function does different things, depending on the argument, for regular objects it returns a "list that contains the object’s attributes’ names, the names of its class’s attributes, and recursively of the attributes of its class’s base classes.", all this is sorted alphabetically. """) eval_and_quote("""print("dir(foo_obj) : ", dir(foo_obj))""") # doesn't have __slots__, how odd. #print_md("foo_obj.__slots__ : ", foo_obj.__slots__) header_md("""How classes are represented""", nesting=3) print_md("""The built-in function [type](https://docs.python.org/3/library/functions.html#type), is returning the class of an object, when applied to a variable (to be more exact: type is a built-in class, and not a built-in function, more on that later)""") eval_and_quote(""" # Make a new object instance of type Foo class. foo_obj=Foo() print("class of object foo_obj - type(foo_obj): ", type(foo_obj)) # That's the same as showing the __class__ member of the variable (in Python3) print("foo_obj.__class__ :", foo_obj.__class__) """) print_md(""" The class is an object, it's purpose is to hold the static data that is shared between all object instances. Each object has a built-in __class__ attribute, that refers to this class object. Note that the name of the class includes the module name, __main__ if the class is defined in the file given as argument to the python interpreter. Also note that the type built-in of type(foo_obj) is really the same as: str(foo_obj.__class__) (for Python3) """) print_md(""" Again, the built in attribute __class__ can also be accessed with the getattr built-in function. """) eval_and_quote( """ print("foo_obj.__class__ and getattr(foo_obj,'__class__',None) is the same thing!") assert id(foo_obj.__class__) == id( getattr(foo_obj,'__class__',None) ) """) print_md("""The __name__ and __qualname__ built-in attributes return the name of the class, without the module name """) eval_and_quote( """ print("foo_boj.__class__.__name__ : ", foo_obj.__class__.__name__) print("foo_boj.__class__.__qualname__ : ", foo_obj.__class__.__qualname__)""" ) print_md(""" To get the immediate base class list as declared in that particular class. """) eval_and_quote( """print("foo_obj.__class__.__bases__ :", foo_obj.__class__.__bases__)""") print_md(""" The __mro__ member is a list of types that stands for 'method resoultion order', when searching for an instance method, this list is searched in order to resolve the method name. The Python runtime creates this lists by enumerating all of its base classes recursively, in depth first traversal order. For each class it follows the base classes, from the left ot the right This list is used to resolve a member function 'member_function' of an object, when you call it via: obj_ref.member_function() """) eval_and_quote( """print("foo_obj.__class__.__mro__ :", foo_obj.__class__.__mro__) """ ) print_md("Computing the method resolution order by hand") eval_and_quote(""" # function to a class hierarchy, in depth first search order (like what you get in MRO - method resolution order) def show_type_hierarchy(type_class): def show_type_hierarchy_imp(type_class, nesting): if len(type_class.__bases__) == 0: return prefix = "\t" * nesting print( prefix + "type:", type_class.__name__ , "base types:", ",".join( map( lambda ty : ty.__name__, type_class.__bases__) ) ) #print( prefix + "str(", type_class.__name__ , ").__dict__ : ", type_class.__dict__ ) for base in type_class.__bases__: show_type_hierarchy_imp(base, nesting+1) if not inspect.isclass(type_class): print("object ", str(type_class), " is not class") return print("show type hierarchy of class:") show_type_hierarchy_imp(type_class, 0) class LevelOneFirst: pass class LevelOneSecond: pass class LevelOneThird: pass class LevelTwoFirst(LevelOneFirst, LevelOneSecond): pass class LevelThree(LevelTwoFirst,LevelOneThird): pass show_type_hierarchy(LevelThree) print("LevelThree.__mro__:", LevelThree.__mro__) """) eval_and_quote(""" print("*** mro in detail:") for cls in foo_obj.__class__.__mro__: print_md("\tclass-in-mro: ", str(cls), "id:", id(cls), "cls.__dict__: ", cls.__dict__) print("*** eof mro in detail") """) print_md(""" The class object has a __dict__ too - here you will see all the class variables (for Foo these are class_var and class_var2) and class methods (defined with @staticmethod), but also the object methods (with the self parameter) """) eval_and_quote( """print("foo_obj.__class__.__dict__ : ", foo_obj.__class__.__dict__)""" ) # doen't have slots, how odd. #print_md("foo_obj.__class__.__slots__ : ", foo_obj.__class__.__slots__) print_md(""" Again, the [dir](https://docs.python.org/3/library/functions.html#dir) built-in function does different things, depending on the argument type for a class object it returns a "list that contains the names of its attributes, and recursively of the attributes of its bases" That means it displays both the names of static variables, and the names of the static functions, for the class and it's base classes. Note that the names are sorted. """) eval_and_quote("""print("dir(foo_obj.__class__) : ", dir( foo_obj.__class__ ) )""") print_md(""" The class object derives from built-in class type, you can check if an object is a class by checking if it is an instance of class 'type'! """) # check that foo_obj.__class__ is a type - it is derived from built-in class type eval_and_quote(""" assert isinstance(foo_obj.__class__, type) # same thing as assert inspect.isclass(foo_obj.__class__) # an object is not derived from class type. assert not isinstance(foo_obj, type) # same thng as assert not inspect.isclass(foo_obj) """) print_md( """ Now there is much more: there is the inspect module that returns it all, a kind of rosetta stone of the python object model. inspect.getmembers returns everything! You can see the source of inspect.getmembers [here](https://github.com/python/cpython/blob/3.10/Lib/inspect.py) """) eval_and_quote("""print("inspect.getmembers(foo_obj): ", inspect.getmembers(foo_obj))""") print_md(""" Attention! the type of the object is the class of the object (remember: the classes is an object, where the __dict__ member holds the class variables) """) eval_and_quote(""" print("type(foo_obj) : ", type(foo_obj)) # same thing in python3 print("str(foo_obj.__class__) : ", str(foo_obj.__class__) )""") print_md(""" Let's look at both the type and identity of all these objects: """) eval_and_quote("""print("id(foo_obj) : ", id(foo_obj), " str(foo_obj) : ", str(foo_obj))""") print_md(""" The following expressions refer to the same thing: the type of the object foo_obj, also known as the class of foo_obj """) eval_and_quote(""" print("type(foo_obj) :", type(foo_obj), " id(type(foo_obj)) :", id(type(foo_obj)), " type(foo_obj).__name__ : ", type(foo_obj).__name__ ) print("str(foo_obj.__class__) :", str(foo_obj.__class__), " id(foo_obj.__class__) :", id(foo_obj.__class__), "foo_obj.__class__.__name__ : ", foo_obj.__class__.__name__) print("str(Foo) :", str(Foo), " id(Foo) :", id( Foo ), "Foo.__name__ :", Foo.__name__) assert id(Foo) == id(type(foo_obj)) assert id(type(foo_obj)) == id(foo_obj.__class__) """) print_md(""" The Foo class members """) eval_and_quote(""" print("foo_obj.__class__.__dict__ :", foo_obj.__class__.__dict__) print("Foo.__dict__ :", Foo.__dict__) # everything accessible form the class print("dir(foo_obj.__class__) :", dir( foo_obj.__class__)) """) print_md(""" The following expressions refer to the same thing: the meta-type of the foo_obj. """) eval_and_quote(""" print("type(foo_obj.__class__.__class__):", type(foo_obj.__class__.__class__), " id( foo_obj.__class__.__class__ ) :" , id( foo_obj.__class__.__class__ ) , "foo_obj.__class__.__class__.__name__ : ", foo_obj.__class__.__class__.__name__ ) print("type(Foo) :", type(Foo), " id(type(Foo)) : ", id( type( Foo ) ), " Foo.__class__.__name__ :", Foo.__class__.__name__) print("type(Foo.__class__) :", type(Foo.__class__), " id(type(Foo.__class__)) : ", id( type( Foo.__class__ ) ), " Foo.__class__.__name__ :", Foo.__class__.__name__) print("type(Foo.__class__.__class__) :", type(Foo.__class__.__class__), " id(type(Foo.__class__.__class__)) :", id( type( Foo.__class__.__class__ ) ) ) assert type(Foo) == type(Foo.__class__) assert type(Foo.__class__) == type(Foo.__class__.__class__) """) print_md(""" The type of the type is the metaclass - the metaclass constructs the Class object! (the class of an object is also an object!) """) eval_and_quote(""" print("type( type( foo_obj ) ) :", type( type( foo_obj ) ) ) print("str( foo_obj.__class__.__class__ ) :", str(foo_obj.__class__.__class__) ) """) # result: eval_and_quote(""" print(" metaclass members: foo_obj.__class__.__class__.__dict__ : ", foo_obj.__class__.__class__.__dict__) print(" everything accessible form metaclass: dir( foo_obj.__class__.__class__ ) : ", dir( foo_obj.__class__.__class__) ) """) print_md(""" Wow, any class can tell all of its derived classes! I wonder how that works... """) eval_and_quote("""print("Base.__subclasses__() : ", Base.__subclasses__())""") header_md("""Object creation""", nesting=3) print_md(""" Objects recap: The object instance holds the __dict__ attribute of the object instance, it's value is a dictionary that holds the object instance members. The class is an object that is shared between all object instances, and it holds the static data (class variables, class methods) What happens upon: foo = Foo() ? Take the type of Foo - the metaclass of Foo, the metaclass both knows how to create an instance of the class Foo, and the object instances. A metaclass is derived from built-in class 'type', The 'type' constructor with three argument creates a new class object. [see reference](https://docs.python.org/3/library/functions.html#type) class_obj = Foo The metaclass is used as a 'callable' - it has a __call__ method, and can therefore be called as if it were a function (see more about callables in the course on [decorators](https://github.com/MoserMichael/python-obj-system/blob/master/decorator.md)) Now this __call__ method creates and initialises the object instance. The implementation of __call__ now does two steps: - Class creation is done in the [__new__](https://docs.python.org/3/reference/datamodel.html#object.__new__) method of the metaclass. The __new__ method creates the Foo class, it is called exactly once, upon class declaration (you will see this shortly, in the section on custom meta classes) - It uses the Foo class and calls its to create and initialise the object (call the __new__ method of the Foo class, in order to create an instance of Foo, then calls the __init__ instance method of the Foo class, on order to initialise it). This all done by the __call__ method of the metaclass. instance_of_foo = meta_class_obj.__call__() (actually that was a bit of a simplification... ) """) eval_and_quote(""" # same as: foo_obj = Foo() foo_obj = Foo.__call__() print("foo_obj : ", foo_obj) print("foo_obj.__dict__ : ", foo_obj.__dict__) """) print_md("This is the same as:") eval_and_quote(""" class_obj = Foo instance_of_foo = class_obj() print("instance_of_foo : ", instance_of_foo) print("instance_of_foo.__dict__ : ", instance_of_foo.__dict__) """) header_md("""Custom metaclasses""", nesting = 2) header_md("""Metaclasses for implementing singleton objects""", nesting = 3) print_md(""" An object can define a different way of creating itself, it can define a custom metaclass, which will do exactly the same object creation steps described in the last section. Let's examine a custom metaclass for creating singleton objects. """) eval_and_quote(""" # metaclass are always derived from the type class. # the type class has functions to create class objects # the type class has also a default implementation of the __call__ method, for creating object instances. class Singleton_metaclass(type): # invoked to create the class object instance (for holding static data) # this function is called exactly once, in order to create the class instance! def __new__(meta_class, name, bases, cls_dict, **kwargs): print("Singleton_metaclass: __new__ meta_class:", meta_class, "name:", name, "bases:", bases, "cls_dict:", cls_dict, f'kwargs: {kwargs}') class_instance = super().__new__(meta_class, name, bases, cls_dict) print("Singleton_metaclass: __new__ return value: ", class_instance, "type(class_instance):", type(class_instance)) # the class class variable __singleton_instance__ will hold a reference to the one an only object instance of this class. class_instance.__singleton_instance__ = None return class_instance def __call__(cls, *args, **kwargs): # we get here to create an object instance. the class object has already been created. print("Singleton_metaclass: __call__ args:", *args, f'kwargs: {kwargs}') # check if the singleton has already been created. if cls.__singleton_instance__ is None: # create the one an only instance object. instance = cls.__new__(cls) # initialise the one and only instance object instance.__init__(*args, **kwargs) # store the singleton instance object in the class variable __singleton_instance__ cls.__singleton_instance__ = instance # return the singleton instance return cls.__singleton_instance__ import math # the metaclass specifier tells python to use the Singleton_metaclass, for the creation of an instance of type SquareRootOfTwo class SquareRootOfTwo(metaclass=Singleton_metaclass): # the __init__ method is called exactly once, when the first instance of the singleton is created. # the square root of two is computed exactly once. def __init__(self): self.value = math.sqrt(2) print("SquareRootOfTwo.__init__ self:", self) print("creating the objects instances...") sqrt_root_two_a = SquareRootOfTwo() print("sqrt_two_a id(sqrt_root_two_a):", id(sqrt_root_two_a), "type(sqrt_root_two_a):", type(sqrt_root_two_a), "sqrt_root_two_a.value:", sqrt_root_two_a.value) sqrt_root_two_b = SquareRootOfTwo() print("sqrt_two_b id(sqrt_root_two_b)", id(sqrt_root_two_b), "type(sqrt_root_two_b):", type(sqrt_root_two_b), "sqrt_root_two_b.value:", sqrt_root_two_b.value) # all singleton objects of the same class are referring to the same object assert id(sqrt_root_two_a) == id(sqrt_root_two_b) """) header_md("""Passing arguments to metaclasses""", nesting = 3) print_md("""" Lets extend the previous singleton creating metaclass, so that it can pass parameters to the __init__ method of the object, these parameters are defined together with the metaclass specifier. """) eval_and_quote(""" # metaclass are always derived from the type class. # The type class has functions to create class objects # The type class has also a default implementation of the __call__ method, for creating object instances. class Singleton_metaclass_with_args(type): # invoked to create the class object instance (for holding static data) # this function is called exactly once, in order to create the class instance! def __new__(meta_class, name, bases, cls_dict, **kwargs): print("Singleton_metaclass_with_args: __new__ meta_class:", meta_class, "name:", name, "bases:", bases, "cls_dict:", cls_dict, f'kwargs: {kwargs}') class_instance = super().__new__(meta_class, name, bases, cls_dict) print("Singleton_metaclass_with_args: __new__ return value: ", class_instance, "type(class_instance):", type(class_instance)) # the class class variable __singleton_instance__ will hold a reference to the one an only object instance of this class. class_instance.__singleton_instance__ = None # the keywords that have been specified, are passed into the class creation method __new__. # save them as a class variable, so as to pass them to the object constructor! class_instance.__kwargs__ = kwargs return class_instance def __call__(cls, *args, **kwargs): # we get here to create an object instance. the class object has already been created. print("Singleton_metaclass_with_args: __call__ args:", *args, f'kwargs: {kwargs}') # check if the singleton has already been created. if cls.__singleton_instance__ is None: # create the one an only instance object. instance = cls.__new__(cls) # initialise the one and only instance object # pass it the keyword parameters specified for the class! instance.__init__(*args, **cls.__kwargs__) # store the singleton instance object in the class variable __singleton_instance__ cls.__singleton_instance__ = instance # return the singleton instance return cls.__singleton_instance__ import math class AnySquareRoot: def __init__(self, arg_val): self.value = math.sqrt(arg_val) # the metaclass specifier tells python to use the Singleton_metaclass, for the creation of an instance of type SquareRootOfTwo class SquareRootOfTwo(AnySquareRoot, metaclass=Singleton_metaclass_with_args, arg_num=2): # the init method is called with arg_num specified in the class definition (value of 2) def __init__(self, arg_num): super().__init__(arg_num) class SquareRootOfThree(AnySquareRoot, metaclass=Singleton_metaclass_with_args, arg_num=3): # the init method is called with arg_num specified in the class definition (value of 3) def __init__(self, arg_num): super().__init__(arg_num) print("creating the objects instances...") sqrt_root_two_a = SquareRootOfTwo() print("sqrt_two_a id(sqrt_root_two_a):", id(sqrt_root_two_a), "type(sqrt_root_two_a):", type(sqrt_root_two_a), "sqrt_root_two_a.value:", sqrt_root_two_a.value) sqrt_root_two_b = SquareRootOfTwo() print("sqrt_two_b id(sqrt_root_two_b)", id(sqrt_root_two_b), "type(sqrt_root_two_b):", type(sqrt_root_two_b), "sqrt_root_two_b.value:", sqrt_root_two_b.value) # all singleton objects of the same class are referring to the same object assert id(sqrt_root_two_a) == id(sqrt_root_two_b) sqrt_root_three_a = SquareRootOfThree() print("sqrt_three_a id(sqrt_root_three_a):", id(sqrt_root_three_a), "type(sqrt_root_three_a):", type(sqrt_root_three_a), "sqrt_root_three_a.value:", sqrt_root_three_a.value) sqrt_root_three_b = SquareRootOfThree() print("sqrt_three_b id(sqrt_root_three_b)", id(sqrt_root_three_b), "type(sqrt_root_three_b):", type(sqrt_root_three_b), "sqrt_root_three_b.value:", sqrt_root_three_b.value) # all singleton objects of the same class are referring to the same object assert id(sqrt_root_three_a) == id(sqrt_root_three_b) """) header_md("""Metaclasses in the Python3 standard library""", nesting=2) print_md(""" This section lists examples of meta-classes in the python standard library. Looking at the standard library of a language is often quite useful, when learning about the intricacies of a programming language. """) header_md("""ABCMeta class""", nesting=3) print_md("""The purpose of this metaclass is to define abstract base classes (also known as ABC's), as defined in [PEP 3119](https://www.python.org/dev/peps/pep-3119/), the documentation for the metaclass [ABCMeta class](https://docs.python.org/3/library/abc.html#abc.ABCMeta). A python metaclass imposes a different behavior for builtin function [isinstance](https://docs.python.org/3/library/functions.html#isinstance) and [issubclass](https://docs.python.org/3/library/functions.html#issubclass) Only classes that are [registered](https://docs.python.org/3/library/abc.html#abc.ABCMeta.register) with the metaclass, are reported as being subclasses of the given metaclass. The referenced PEP explains, why this is needed, i didn't quite understand the explanation. Would be helpful if the reader can clarify this issue. """) header_md("""Enum classes""", nesting=3) print_md("""Python has support for [enum classes](https://docs.python.org/3/library/enum.html). An enum class lists a set of integer class variables, these variables can then be accessed both by their name, and by their integer value. An example usage: Note that the class doesn't have a constructor, everything is being taken care of by the baseclass [enum.Enum](https://docs.python.org/3/library/enum.html#enum.Enum) which is making use of a meta-class in he definition of the Enum class [here](https://docs.python.org/3/library/enum.html), this metaclass [EnumMeta source code](https://github.com/python/cpython/blob/f6648e229edf07a1e4897244d7d34989dd9ea647/Lib/enum.py#L161) then creates a behind the scene dictionary, that maps the integer values to their constant names. The advantage is, that you get an exception, when accessing an undefined constant, or name. There are also more things there, please refer to the linked [documentation](https://docs.python.org/3/library/enum.html) """) eval_and_quote(""" import enum class Rainbow(enum.Enum): RED=1 ORANGE=2 YELLOW=3 GREEN=4 BLUE=5 INDIGO=6 VIOLET=7 color=Rainbow.GREEN print("type(Rainbow.GREEN):", type(Rainbow.GREEN)) print("The string rep Rainbow.Green.name:", Rainbow.GREEN.name, "type(Rainbow.GREEN.name):", type(Rainbow.GREEN.name)) print("The integer rep Rainbow.GREEN.value: ", Rainbow.GREEN.value, "type(Rainbow.GREEN.value):", type(Rainbow.GREEN.value)) print("Access by name: Rainbow['GREEN']:", Rainbow['GREEN']) print("Access by value: Rainbow(4):", Rainbow(4)) # which is the same thing assert id(Rainbow['GREEN']) == id(Rainbow(4)) """) header_md("""Conclusion""", nesting=2) print_md(""" Python meta-classes and decorators are very similar in their capabilities. Both are tools for [metaprogramming](https://en.wikipedia.org/wiki/Metaprogramming), tools for modifying the program text, and treating and modifying code, as if it were data. I would argue, that decorators are most often the easiest way of achieving the same goal. However some things, like hooking the classification of classes and objects (implementing class methods [__instancecheck__ and __subclasscheck__](https://docs.python.org/3/reference/datamodel.html#customizing-instance-and-subclass-checks), can only be done with meta-classes. I hope, that this course has given you a better understanding, of what is happening under the hood, which would be a good thing. """) print_md("*** eof tutorial ***")
43.707576
720
0.733456
4,388
28,847
4.488605
0.121696
0.029854
0.026807
0.016399
0.462074
0.409271
0.385561
0.35134
0.309454
0.270309
0
0.00563
0.162582
28,847
659
721
43.7739
0.809703
0.010122
0
0.398169
0
0.176201
0.930223
0.157734
0
0
0
0
0.038902
1
0
true
0.02746
0.01373
0
0.032037
0.244851
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
c2a329ec349312cdbee649e9a5eb68a195101366
2,374
py
Python
libs/units/tests/test_string_reps.py
mscansian/drpexe-uploader
de17baf9085155a046b8e5f68b0b3191a2ce1847
[ "MIT" ]
null
null
null
libs/units/tests/test_string_reps.py
mscansian/drpexe-uploader
de17baf9085155a046b8e5f68b0b3191a2ce1847
[ "MIT" ]
null
null
null
libs/units/tests/test_string_reps.py
mscansian/drpexe-uploader
de17baf9085155a046b8e5f68b0b3191a2ce1847
[ "MIT" ]
null
null
null
"""Tests for string representations of Quantities and Units, i.e. __repr__ and __str__""" from units import unit from units.predefined import define_units from units.quantity import Quantity from units.registry import REGISTRY def test_quantity_repr(): """Developer-friendly string representation of quantities.""" assert repr(Quantity(1, unit('m'))) == "Quantity(1, LeafUnit('m', True))" def test_quantity_str(): """User-friendly string representation of quantities.""" assert str(Quantity(1, unit('m'))) == "1.00 m" def test_leaf_unit_repr(): """Developer-friendly string representation of leaf units.""" assert repr(unit('m')) == "LeafUnit('m', True)" def test_leaf_unit_str(): """User-friendly string representation of leaf units""" assert str(unit('s')) == "s" def test_composed_unit_repr(): """Developer-friendly string representation of composed units.""" test_repr = (repr(unit('m') * unit('g') / unit('s'))) # non-deterministic assert test_repr in ["ComposedUnit([LeafUnit('g', True), " + "LeafUnit('m', True)], " + "[LeafUnit('s', True)], 1)", "ComposedUnit([LeafUnit('m', True), " + "LeafUnit('g', True)], " + "[LeafUnit('s', True)], 1)"] def test_composed_unit_str(): """User-friendly string representation of composed units.""" test_str = (str(unit('m') * unit('g') / unit('s'))) assert test_str in ["g * m / s", "m * g / s"] # non-deterministic. def test_named_composed_unit_repr(): """Developer-friendly string representation of named units.""" assert (repr(unit('km')) == "NamedComposedUnit('km', " + "ComposedUnit([LeafUnit('m', True)], " + "[], 1000), False)") def test_named_composed_unit_str(): """User-friendly string representation of named units.""" assert str(unit('mi')) == 'mi' def setup_module(module): # Disable warning about not using module. # pylint: disable=W0613 """Called by py.test before running any of the tests here.""" define_units() def teardown_module(module): # Disable warning about not using module. # pylint: disable=W0613 """Called after running all of the tests here.""" REGISTRY.clear()
36.523077
77
0.607835
283
2,374
4.957597
0.233216
0.039914
0.159658
0.171062
0.550249
0.478974
0.377762
0.246614
0.098361
0.098361
0
0.011074
0.239259
2,374
64
78
37.09375
0.765781
0.329402
0
0
0
0
0.217505
0.067929
0
0
0
0
0.242424
1
0.30303
false
0
0.121212
0
0.424242
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
c2a90d53c2c37e84ce26a4aecf160f999dff6816
969
py
Python
fishpi/vehicle/test_quick.py
FishPi/FishPi-POCV---Command---Control
6df8e9db29c1b4769ddedb3a89a21fadae260709
[ "BSD-2-Clause" ]
18
2015-01-17T17:03:07.000Z
2020-10-17T06:38:26.000Z
fishpi/vehicle/test_quick.py
FishPi/FishPi-POCV---Command---Control
6df8e9db29c1b4769ddedb3a89a21fadae260709
[ "BSD-2-Clause" ]
null
null
null
fishpi/vehicle/test_quick.py
FishPi/FishPi-POCV---Command---Control
6df8e9db29c1b4769ddedb3a89a21fadae260709
[ "BSD-2-Clause" ]
9
2015-02-14T01:42:46.000Z
2019-08-26T20:24:36.000Z
#!/usr/bin/python # # FishPi - An autonomous drop in the ocean # # Simple test of PWM motor and servo drive # import logging import raspberrypi from time import sleep from drive_controller import AdafruitDriveController if __name__ == "__main__": logger = logging.getLogger() logger.setLevel(logging.DEBUG) console = logging.StreamHandler() logger.addHandler(console) print "testing drive controller..." drive = AdafruitDriveController(debug=True, i2c_bus=raspberrypi.i2c_bus()) print "run ahead..." drive.set_throttle(0.5) sleep(0.5) drive.set_throttle(1.0) sleep(0.5) drive.set_throttle(0.5) sleep(2) print "run 0%..." drive.set_throttle(-1.0) sleep(2) drive.set_throttle(0.0) sleep(2) print "run reverse for 2 sec" drive.set_throttle(-0.5) sleep(0.5) drive.set_throttle(-1.0) sleep(2) print "and back to neutral..." drive.set_throttle(0.0) sleep(5)
20.617021
78
0.668731
136
969
4.625
0.404412
0.101749
0.203498
0.135135
0.305246
0.305246
0.192369
0.152623
0.152623
0.152623
0
0.040523
0.210526
969
46
79
21.065217
0.781699
0.101135
0
0.419355
0
0
0.114583
0
0
0
0
0
0
0
null
null
0
0.129032
null
null
0.16129
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
c2bf82786883e88ae221354f9ad562aa51a42fc8
23,127
py
Python
my_version/craw_page_parse_2.py
xuerenlv/PaperWork
f096b57a80e8d771f080a02b925a22edbbee722a
[ "Apache-2.0" ]
1
2015-10-15T12:26:07.000Z
2015-10-15T12:26:07.000Z
my_version/craw_page_parse_2.py
xuerenlv/PaperWork
f096b57a80e8d771f080a02b925a22edbbee722a
[ "Apache-2.0" ]
null
null
null
my_version/craw_page_parse_2.py
xuerenlv/PaperWork
f096b57a80e8d771f080a02b925a22edbbee722a
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- ''' Created on 2015-08-21 @author: xhj ''' import requests import StringIO import gzip import threading from loginer import Loginer import time from my_log import WeiboSearchLog import os import traceback from bs4 import BeautifulSoup import re from Queue import Queue import datetime from store_model import Single_weibo_store, UserInfo, UserInfo_store, \ UserInfo_loc, UserInfo_loc_store, Bie_Ming_store, \ UserInfo_for_regester_time_store, UserInfo_for_regester_time from mongoengine.errors import NotUniqueError import random from craw_page_parse import Crawler_with_proxy, crawl_set_time_with_keyword import sys from urllib import quote, quote_plus from mongoengine.queryset.visitor import Q import json reload(sys) sys.setdefaultencoding('utf8') # # 通过 nickname 抓取 uid class crawl_uid_from_nickname(threading.Thread): file_write_lock = threading.Lock() def __init__(self, nicknam_list, thread_name='crawl_uid_from_nickname'): threading.Thread.__init__(self) self.nickname_list = nicknam_list self.url_queue = Queue() self.second_url_queue = Queue() pass # http://weibo.cn/search/user/?keyword=孔庆东&page=1 def init_url_queue(self): for nickname in self.nickname_list: url = "http://weibo.cn/search/user/?keyword=" + nickname + "&page=1" self.url_queue.put(url) pass # 抓取并解析页面 def crawl(self, url, is_again=True): loginer = Loginer() cookie = loginer.get_cookie() proxy = loginer.get_proxy() craw_object = Crawler_with_proxy(url, cookie, proxy) WeiboSearchLog().get_scheduler_logger().info(self.name + " start to crawl ! " + url) uid_or_uname = "" try: page = craw_object.get_page() uid_or_uname = page_parser_from_search_for_uid(page) except: print traceback.format_exc() crawl_set_time_with_keyword.del_proxy_lock.acquire() if proxy == loginer.get_proxy(): loginer.del_proxy() WeiboSearchLog().get_scheduler_logger().warning(self.name + " proxy exception , change proxy !") crawl_set_time_with_keyword.del_proxy_lock.release() if is_again: return self.crawl(url, is_again=False) else: self.second_url_queue.put(url) return uid_or_uname return uid_or_uname def run(self): self.init_url_queue() while not self.url_queue.empty() or not self.second_url_queue.empty(): url = "" if not self.url_queue.empty(): url = self.url_queue.get() else: url = self.second_url_queue.get() uid_or_uname = self.crawl(url) op_url = url[url.find("keyword="):] nickname = op_url[op_url.find('=') + 1:op_url.find('&')] crawl_uid_from_nickname.file_write_lock.acquire() file_w = open("at_nickname_to_(uid_or_uname).txt", 'a') file_w.write("[uid_or_uname:" + uid_or_uname + "][nickname:" + nickname + "]" + '\n') file_w.flush() file_w.close() crawl_uid_from_nickname.file_write_lock.release() pass # # 通过 uid_or_uname 抓取 用户信息 class crawl_userinfo_from_uname_or_uid(threading.Thread): def __init__(self, uid_or_uname_list, thread_name='crawl_userinfo_from_uname_or_uid'): threading.Thread.__init__(self, name=thread_name) self.uid_or_uname_list = uid_or_uname_list self.url_queue = Queue() self.second_url_queue = Queue() pass # http://weibo.cn/breakingnews?f=search_0 def init_url_queue(self): global UserInfo_store for uid_or_nickname in self.uid_or_uname_list: if len(UserInfo_store.objects(Q(uid_or_uname=str(uid_or_nickname)) | Q(nickname=str(uid_or_nickname)))) != 0 or\ len(Bie_Ming_store.objects(Q(uid_or_uname=str(uid_or_nickname)) | Q(bie_ming=str(uid_or_nickname)))) != 0: continue self.url_queue.put(uid_or_nickname) print "crawl size ::::::::: ", self.url_queue.qsize() pass # 抓取并解析页面 def crawl(self, uid_or_nickname, is_again=False): # $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ url = '' if len(UserInfo_store.objects(Q(uid_or_uname=str(uid_or_nickname)) | Q(nickname=str(uid_or_nickname)))) != 0 or\ len(Bie_Ming_store.objects(Q(uid_or_uname=str(uid_or_nickname)) | Q(bie_ming=str(uid_or_nickname)))) != 0: WeiboSearchLog().get_scheduler_logger().info("already in the database : " + uid_or_nickname) return "nothing" quote_uid_or_nickname = "" try: quote_uid_or_nickname = quote_plus(str(uid_or_nickname.strip())) except: print traceback.format_exc() print uid_or_nickname # url = "http://weibo.cn/" + uid_or_nickname + "?f=search_0" if quote_uid_or_nickname == uid_or_nickname: url = "http://weibo.cn/" + uid_or_nickname + "?f=search_0" else: url = "http://weibo.cn/n/" + quote_uid_or_nickname # $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ loginer = Loginer() cookie = loginer.get_cookie() proxy = loginer.get_proxy() craw_object = Crawler_with_proxy(url, cookie, proxy) WeiboSearchLog().get_scheduler_logger().info(self.name + " start to crawl ! " + url) user_info = "" try: page = craw_object.get_page() user_info = page_parser_from_search_for_UserInfo(page, url) except: if is_again: return self.crawl(url, is_again=False) else: return user_info return user_info # uid_or_uname = StringField(unique=True) # nickname = StringField() # is_persion = StringField() # check_or_not = StringField() # fensi = StringField() def store_userinfo_to_db(self, uid_or_nickname, user_info): if type(user_info) is str: WeiboSearchLog().get_scheduler_logger().info(self.name + " nothing ! :" + user_info) return unique_user_info = UserInfo_store(uid_or_uname=user_info.uid_or_uname, nickname=user_info.nickname, is_persion=user_info.is_persion, check_or_not=user_info.check_or_not, fensi=user_info.fensi, sex=user_info.sex, location=user_info.location, check_info=user_info.check_info, weibo_all_nums=user_info.weibo_all_nums, guan_zhu_nums=user_info.guan_zhu_nums ) # Bie_Ming_store if unique_user_info['uid_or_uname'] != uid_or_nickname: bie_ming = Bie_Ming_store(uid_or_uname=unique_user_info['uid_or_uname'] , bie_ming=uid_or_nickname) sign = 0 try: unique_user_info.save() except NotUniqueError: sign = 1 WeiboSearchLog().get_scheduler_logger().info(self.name + " insert to database, not unique ! " + unique_user_info['uid_or_uname'] + " crawl: " + uid_or_nickname) except: sign = 2 WeiboSearchLog().get_scheduler_logger().info(self.name + " insert to database, something wrong !") if sign == 0: WeiboSearchLog().get_scheduler_logger().info(self.name + " insert to database, success success success success!") try: bie_ming.save() except NotUniqueError: WeiboSearchLog().get_scheduler_logger().info(self.name + " bieming already in database" + unique_user_info['uid_or_uname'] + " crawl: " + uid_or_nickname) return except: WeiboSearchLog().get_scheduler_logger().info(self.name + " bieming insert to database, something wrong !") return pass def run(self): self.init_url_queue() while not self.url_queue.empty() or not self.second_url_queue.empty(): uid_or_nickname = "" if not self.url_queue.empty(): uid_or_nickname = self.url_queue.get() else: uid_or_nickname = self.second_url_queue.get() user_info = self.crawl(uid_or_nickname) # print user_info.to_string() if not user_info == "nothing" : self.store_userinfo_to_db(uid_or_nickname, user_info) pass # # 通过 uid_or_uname 抓取 用户信息 (位置信息) # 这里主要是uid,可以抓取到生日信息 class crawl_userinfo_2_from_uid(threading.Thread): def __init__(self, uid_or_uname_list, thread_name='crawl_userinfo_from_uname_or_uid'): threading.Thread.__init__(self) self.uid_or_uname_list = uid_or_uname_list self.url_queue = Queue() self.second_url_queue = Queue() pass # http://weibo.cn/1806760610/info def init_url_queue(self): for uid_or_nickname in self.uid_or_uname_list: url = "http://weibo.cn/" + uid_or_nickname + "/info" self.url_queue.put(url) pass # 抓取并解析页面 def crawl(self, url, is_again=True): loginer = Loginer() cookie = loginer.get_cookie() proxy = loginer.get_proxy() craw_object = Crawler_with_proxy(url, cookie, proxy) WeiboSearchLog().get_scheduler_logger().info(self.name + " start to crawl ! " + url) user_info_loc = "" try: page = craw_object.get_page() user_info_loc = page_parser_from_search_for_UserInfoLoc(page, url) except: print traceback.format_exc() crawl_set_time_with_keyword.del_proxy_lock.acquire() if proxy == loginer.get_proxy(): loginer.del_proxy() WeiboSearchLog().get_scheduler_logger().warning(self.name + " proxy exception , change proxy !") crawl_set_time_with_keyword.del_proxy_lock.release() if is_again: return self.crawl(url, is_again=False) else: self.second_url_queue.put(url) return user_info_loc return user_info_loc # uid_or_uname = StringField(unique=True) # nickname = StringField() # is_persion = StringField() # check_or_not = StringField() # fensi = StringField() def store_userinfo_loc_to_db(self, user_info_loc): unique_user_info_loc = UserInfo_loc_store(uid=user_info_loc.uid, nickname=user_info_loc.nickname, location=user_info_loc.location, sex=user_info_loc.sex, birth=user_info_loc.birth, intro=user_info_loc.intro, check_or_not=user_info_loc.check_or_not, check_info=user_info_loc.check_info) try: unique_user_info_loc.save() except NotUniqueError: pass except: WeiboSearchLog().get_scheduler_logger().info(self.name + " insert to database, something wrong !") pass WeiboSearchLog().get_scheduler_logger().info(self.name + " insert to database, success !") pass def run(self): self.init_url_queue() while not self.url_queue.empty() or not self.second_url_queue.empty(): url = "" if not self.url_queue.empty(): url = self.url_queue.get() else: url = self.second_url_queue.get() user_info_loc = self.crawl(url) # print user_info.to_string() self.store_userinfo_loc_to_db(user_info_loc) pass # 要从 网页端 进行抓取,为了提取用户的注册时间 class crawl_userinfo_3_for_regester_time(threading.Thread): def __init__(self, uid_or_uname_list, thread_name='crawl_userinfo_for_regester_times'): threading.Thread.__init__(self) self.uid_or_uname_list = uid_or_uname_list self.url_queue = Queue() self.second_url_queue = Queue() pass # http://weibo.cn/1806760610/info def init_url_queue(self): for uid_or_nickname in self.uid_or_uname_list: url = "http://weibo.com/" + uid_or_nickname + "/info" self.url_queue.put(url) pass # 抓取并解析页面 def crawl(self, url, is_again=True): loginer = Loginer() cookie = loginer.get_cookie() proxy = loginer.get_proxy() craw_object = Crawler_with_proxy(url, cookie, proxy) WeiboSearchLog().get_scheduler_logger().info(self.name + " start to crawl ! " + url) userInfo_for_regester_time = "" try: page = craw_object.get_page() userInfo_for_regester_time = page_parser_from_search_for_UserInfo_for_regester_time(page, url) except: print traceback.format_exc() crawl_set_time_with_keyword.del_proxy_lock.acquire() if proxy == loginer.get_proxy(): loginer.del_proxy() WeiboSearchLog().get_scheduler_logger().warning(self.name + " proxy exception , change proxy !") crawl_set_time_with_keyword.del_proxy_lock.release() if is_again: return self.crawl(url, is_again=False) else: self.second_url_queue.put(url) return userInfo_for_regester_time return userInfo_for_regester_time # uid_or_uname = StringField(unique=True) # nickname = StringField() # is_persion = StringField() # check_or_not = StringField() # fensi = StringField() def store_userinfo_loc_to_db(self, userInfo_for_regester_time): unique_user_info = UserInfo_for_regester_time_store(uid=userInfo_for_regester_time.uid, nickname=userInfo_for_regester_time.nickname, \ location=userInfo_for_regester_time.location, sex=userInfo_for_regester_time.sex, \ birth=userInfo_for_regester_time.birth, regester_time=userInfo_for_regester_time.regester_time) try: unique_user_info.save() except NotUniqueError: pass except: WeiboSearchLog().get_scheduler_logger().info(self.name + " insert to database, something wrong !") pass WeiboSearchLog().get_scheduler_logger().info(self.name + " insert to database, success !") pass def run(self): self.init_url_queue() while not self.url_queue.empty() or not self.second_url_queue.empty(): url = "" if not self.url_queue.empty(): url = self.url_queue.get() else: url = self.second_url_queue.get() userInfo_for_regester_time = self.crawl(url) # print user_info.to_string() self.store_userinfo_loc_to_db(userInfo_for_regester_time) pass ############################################ 页面解析 ########################################################### # http://weibo.cn/1806760610/info def page_parser_from_search_for_UserInfoLoc(page, url): bs_all = BeautifulSoup(page) div_all = bs_all.findAll('div', attrs={'class':'c'}) nickname = "" location = "" sex = "" birth = "" intro = "" check_or_not = u'否' check_info = "" op_uid = url[url.find('.cn'):] uid = op_uid[op_uid.find('/') + 1:op_uid.rfind('/')] for div in div_all: for str_in in str(div.getText(u'\n')).split(u'\n'): en_str = str_in.encode('utf-8') if(en_str.startswith(u"昵称")): nickname = en_str[en_str.find(':') + 1:] elif(en_str.startswith(u"地区")): location = en_str[en_str.find(':') + 1:] elif(en_str.startswith(u"性别")): sex = en_str[en_str.find(':') + 1:] elif(en_str.startswith(u"生日")): birth = en_str[en_str.find(':') + 1:] elif(en_str.startswith(u"简介")): intro = en_str[en_str.find(':') + 1:] elif(en_str.startswith(u"认证信息")): check_or_not = u'是' check_info = en_str return UserInfo_loc(uid, nickname, location, sex, birth, intro, check_or_not, check_info) pass # http://weibo.cn/1730330447?f=search_0 # http://weibo.cn/breakingnews?f=search_0 # 解析获取 UserInfo def page_parser_from_search_for_UserInfo(page, url): out_soup = BeautifulSoup(page) div_u_first = "" for div_u_one in out_soup.findAll('div', attrs={'class':'u'}): if u"资料" in div_u_one.getText() and u"私信" in div_u_one.getText(): div_u_first = div_u_one break # 获取 uid_or_uname, uid_or_uname = "" for a_one in div_u_first.findAll("a"): if u"送Ta会员" in a_one.getText() and u"uid=" in a_one.attrs["href"]: a_one_href = a_one.attrs["href"] uid_or_uname = a_one_href[a_one_href.find("uid=") + 4:] break # op_url = url[url.find(".cn"):] # uid_or_uname = op_url[op_url.find('/')+1:op_url.find('?')] # 新添加----------------------------------start sex = "" location = "" check_info = "" weibo_all_nums = "" guan_zhu_nums = "" # 新添加----------------------------------end # is_persion,check_or_not is_persion = "" check_or_not = "" div_class_ut = div_u_first.find('div', attrs={'class':'ut'}) # nickname nickname = "" span_class_ctt = div_class_ut.findAll('span', attrs={'class':'ctt'}) for span_class_ctt_one in span_class_ctt: span_class_ctt_one_text = span_class_ctt_one.getText() if u"关注" in span_class_ctt_one_text: if str(span_class_ctt_one_text).find("男") != -1: nickname = span_class_ctt_one_text[:span_class_ctt_one_text.find(u'男') - 1] sex = "男" location = span_class_ctt_one_text[span_class_ctt_one_text.find(u'男') + 2:span_class_ctt_one_text.find(u'关注') - 1] if str(span_class_ctt_one_text).find("女") != -1: nickname = span_class_ctt_one_text[:span_class_ctt_one_text.find(u'女') - 1] sex = "女" location = span_class_ctt_one_text[span_class_ctt_one_text.find(u'女') + 2:span_class_ctt_one_text.find(u'关注') - 1 ] if u"认证" in span_class_ctt_one_text: check_info = span_class_ctt_one_text[span_class_ctt_one_text.find(u"认证:") + 1:] pass # op_span_class_ctt_one_html = span_class_ctt_one_html[2:] # nickname_candidate = op_span_class_ctt_one_html[op_span_class_ctt_one_html.find('>')+1:op_span_class_ctt_one_html.find('<')] # # if str(nickname_candidate).find("男") != -1: # # nickname_candidate = nickname_candidate[:nickname_candidate.find(u'男')-1] # # if str(nickname_candidate).find("女") != -1: # # nickname_candidate = nickname_candidate[:nickname_candidate.find(u'女')-1] # nickname = nickname_candidate imag_alt_V = div_class_ut.find('img', attrs={'alt':'V'}) if imag_alt_V is not None: if u"5337" in str(imag_alt_V.attrs['src']): is_persion = "no" else: is_persion = "yes" check_or_not = "yes" else: is_persion = "yes" check_or_not = "no" # ,fensi fensi = "" div_tip2_second_leval = div_u_first.find('div', attrs={'class':'tip2'}) a_all = div_tip2_second_leval.findAll('a') for a_one in a_all: a_text = a_one.getText() if u"粉丝" in a_text: fensi = a_text[a_text.find('[') + 1:a_text.find(']')] if u"关注" in a_text: guan_zhu_nums = a_text[a_text.find('[') + 1:a_text.find(']')] for span_class_tc in div_tip2_second_leval.findAll('span'): span_class_tc_text = span_class_tc.getText() if u"微博" in span_class_tc_text: weibo_all_nums = span_class_tc_text[span_class_tc_text.find('[') + 1:span_class_tc_text.find(']')] user_info = UserInfo(uid_or_uname, nickname, is_persion, check_or_not, fensi, sex, location, check_info, weibo_all_nums, guan_zhu_nums) return user_info pass # # 解析页面,获取搜索的第一个,uid def page_parser_from_search_for_uid(page): out_soup = BeautifulSoup(page) table_first = out_soup.find('table') td_first = table_first.find('td', attrs={'valign':'top'}) a_href = td_first.find('a').attrs['href'] uid_or_uname = a_href[a_href.rfind('/') + 1:a_href.find('?')] return uid_or_uname # # 通过 http://weibo.com/1802646764/info 来抓取用户信息,主要是为了抓取用户的注册时间 def page_parser_from_search_for_UserInfo_for_regester_time(page, url): uid = url[url.find('com/') + 4:url.rfind('/')] nickname = "" location = "" sex = "" birth = "" regester_time = "" soup = BeautifulSoup(page) for script in soup.findAll('script'): text = script.text if 'FM.view(' in text: text = text[8:] if text.endswith(')'): text = text[:-1] if text.endswith(');'): text = text[:-2] data = json.loads(text) inner_html = data.get('html') if inner_html is None: continue inner_soup = BeautifulSoup(inner_html) # pf_items = inner_soup.findAll('div', attrs={'class': 'pf_item clearfix'}) li_1_clearfix_all = inner_soup.findAll('li', attrs={'class':'li_1 clearfix'}) for one_li in li_1_clearfix_all: this_text = one_li.getText() # print this_text if u'昵称' in this_text: nickname = this_text[this_text.find(u'昵称') + 3:].strip() continue if u'所在地' in this_text: location = this_text[this_text.find(u'所在地') + 4:].strip() continue if u'性别' in this_text: sex = this_text[this_text.find(u'性别') + 3:].strip() continue if u'生日' in this_text: birth = this_text[this_text.find(u'生日') + 3:].strip() continue if u'注册时间' in this_text: regester_time = this_text[this_text.find(u'注册时间') + 5:].strip() continue userInfo_for_regester_time = UserInfo_for_regester_time(uid,nickname,location,sex,birth,regester_time) # userInfo_for_regester_time.print_self() return userInfo_for_regester_time
37.727569
293
0.589657
2,921
23,127
4.305033
0.092776
0.031014
0.034195
0.028628
0.676024
0.598807
0.537336
0.483181
0.445089
0.425686
0
0.007777
0.293899
23,127
612
294
37.789216
0.762278
0.099667
0
0.505855
0
0
0.06028
0.007426
0
0
0
0
0
0
null
null
0.051522
0.04918
null
null
0.014052
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
c2c2bc98c89407c449beca19dbbedcbb96369738
246
py
Python
sureflap/resources/request_models.py
fabieu/sureflap-api
711bb32a7add64367fa3e15b25d52468f8aa7904
[ "Apache-2.0" ]
1
2020-12-03T16:43:55.000Z
2020-12-03T16:43:55.000Z
sureflap/resources/request_models.py
fabieu/sureflap-api
711bb32a7add64367fa3e15b25d52468f8aa7904
[ "Apache-2.0" ]
3
2021-07-14T21:41:53.000Z
2022-01-29T16:56:21.000Z
sureflap/resources/request_models.py
fabieu/sureflap-api
711bb32a7add64367fa3e15b25d52468f8aa7904
[ "Apache-2.0" ]
2
2021-02-13T12:11:22.000Z
2021-02-14T09:58:40.000Z
from datetime import datetime, time from enum import Enum from typing import Optional, Sequence, Union from pydantic import BaseModel class Direction(Enum): enum_1 = 1 enum_2 = 2 class PetLocationSet(BaseModel): where: Direction
16.4
44
0.756098
33
246
5.575758
0.515152
0
0
0
0
0
0
0
0
0
0
0.020202
0.195122
246
14
45
17.571429
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.444444
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
c2d85ba1664d0e7d0a642dbaf8af0b812fb9a534
320
py
Python
forums/__init__.py
sharebears/pulsar-forums
6c1152a181c30bb82c49556fd072f47c2eeaf1cb
[ "MIT" ]
null
null
null
forums/__init__.py
sharebears/pulsar-forums
6c1152a181c30bb82c49556fd072f47c2eeaf1cb
[ "MIT" ]
null
null
null
forums/__init__.py
sharebears/pulsar-forums
6c1152a181c30bb82c49556fd072f47c2eeaf1cb
[ "MIT" ]
null
null
null
from werkzeug import find_modules, import_string from forums import routes from forums.modifications import modify_core def init_app(app): with app.app_context(): for name in find_modules('forums', recursive=True): import_string(name) app.register_blueprint(routes.bp) modify_core()
21.333333
59
0.73125
43
320
5.232558
0.55814
0.097778
0
0
0
0
0
0
0
0
0
0
0.19375
320
14
60
22.857143
0.872093
0
0
0
0
0
0.01875
0
0
0
0
0
0
1
0.111111
false
0
0.444444
0
0.555556
0.111111
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
c2e0b6b1770d351e8357e3bd5c3075735bda47ee
695
py
Python
django_monitor/price_monitor/spider/enterprise/enterprise/items.py
jasonljc/enterprise-price-monitor
616396243e909d3584f4cfcc53d4e156510da4bb
[ "MIT" ]
null
null
null
django_monitor/price_monitor/spider/enterprise/enterprise/items.py
jasonljc/enterprise-price-monitor
616396243e909d3584f4cfcc53d4e156510da4bb
[ "MIT" ]
null
null
null
django_monitor/price_monitor/spider/enterprise/enterprise/items.py
jasonljc/enterprise-price-monitor
616396243e909d3584f4cfcc53d4e156510da4bb
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html import scrapy class EnterpriseItem(scrapy.Item): # define the fields for your item here like: searchTime = scrapy.Field() searchCriteria = scrapy.Field() startDateMonth = scrapy.Field() startDateInput = scrapy.Field() startDateTime = scrapy.Field() endDateMonth = scrapy.Field() endDateInput = scrapy.Field() endDateTime = scrapy.Field() optionalCode = scrapy.Field() location = scrapy.Field() car_class = scrapy.Field() car_price = scrapy.Field() car_total_price = scrapy.Field()
26.730769
51
0.684892
80
695
5.9
0.525
0.302966
0.088983
0
0
0
0
0
0
0
0
0.001792
0.197122
695
26
52
26.730769
0.844086
0.261871
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.066667
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
6c0dd11197119baf2f7c1d5775874b54734c6eff
554
py
Python
assets/tuned/daemon/tuned/profiles/functions/function_regex_search_ternary.py
sjug/cluster-node-tuning-operator
8654d1c9558d0d5ef03d14373c877ebc737f9736
[ "Apache-2.0" ]
53
2018-11-13T07:02:03.000Z
2022-03-25T00:00:04.000Z
assets/tuned/daemon/tuned/profiles/functions/function_regex_search_ternary.py
sjug/cluster-node-tuning-operator
8654d1c9558d0d5ef03d14373c877ebc737f9736
[ "Apache-2.0" ]
324
2018-10-02T14:18:54.000Z
2022-03-31T23:47:33.000Z
assets/tuned/daemon/tuned/profiles/functions/function_regex_search_ternary.py
sjug/cluster-node-tuning-operator
8654d1c9558d0d5ef03d14373c877ebc737f9736
[ "Apache-2.0" ]
54
2018-10-01T16:55:09.000Z
2022-03-28T13:56:53.000Z
import re from . import base class regex_search_ternary(base.Function): """ Ternary regex operator, it takes arguments in the following form STR1, REGEX, STR2, STR3 If REGEX matches STR1 (re.search is used), STR2 is returned, otherwise STR3 is returned """ def __init__(self): # 4 arguments super(regex_search_ternary, self).__init__("regex_search_ternary", 4, 4) def execute(self, args): if not super(regex_search_ternary, self).execute(args): return None if re.search(args[1], args[0]): return args[2] else: return args[3]
25.181818
74
0.725632
85
554
4.541176
0.482353
0.11399
0.186529
0.119171
0.139896
0
0
0
0
0
0
0.0282
0.16787
554
21
75
26.380952
0.809111
0.341155
0
0
0
0
0.056818
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.666667
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
6c0f4bbb43f54fa43e4df577a49de96ebd810921
969
py
Python
bitshares/aio/block.py
silverchen0402/python-bitshares
aafbcf5cd09e7bca99dd156fd60b9df8ba508630
[ "MIT" ]
102
2018-04-08T23:05:00.000Z
2022-03-31T10:10:03.000Z
bitshares/aio/block.py
silverchen0402/python-bitshares
aafbcf5cd09e7bca99dd156fd60b9df8ba508630
[ "MIT" ]
246
2018-04-03T12:35:49.000Z
2022-02-28T10:44:28.000Z
bitshares/aio/block.py
silverchen0402/python-bitshares
aafbcf5cd09e7bca99dd156fd60b9df8ba508630
[ "MIT" ]
128
2018-04-14T01:39:12.000Z
2022-03-25T08:56:51.000Z
# -*- coding: utf-8 -*- from .instance import BlockchainInstance from ..block import Block as SyncBlock, BlockHeader as SyncBlockHeader from graphenecommon.aio.block import ( Block as GrapheneBlock, BlockHeader as GrapheneBlockHeader, ) @BlockchainInstance.inject class Block(GrapheneBlock, SyncBlock): """ Read a single block from the chain. :param int block: block number :param bitshares.aio.bitshares.BitShares blockchain_instance: BitShares instance :param bool lazy: Use lazy loading :param loop: async event loop Instances of this class are dictionaries that come with additional methods (see below) that allow dealing with a block and it's corresponding functions. .. code-block:: python from bitshares.aio.block import Block block = await Block(1) print(block) """ pass @BlockchainInstance.inject class BlockHeader(GrapheneBlockHeader, SyncBlockHeader): pass
25.5
75
0.721362
112
969
6.232143
0.535714
0.047278
0.068768
0.051576
0
0
0
0
0
0
0
0.002618
0.211558
969
37
76
26.189189
0.910995
0.521156
0
0.333333
0
0
0
0
0
0
0
0
0
1
0
true
0.166667
0.25
0
0.416667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
2
6c2a16cd533af1320b7486ce971ab489987fbb0b
806
py
Python
CelestialMechanics/mu.py
Camiloasc1/AstronomyUNAL
0d533c1737e5328605c70f614024e1759e8d0962
[ "MIT" ]
null
null
null
CelestialMechanics/mu.py
Camiloasc1/AstronomyUNAL
0d533c1737e5328605c70f614024e1759e8d0962
[ "MIT" ]
null
null
null
CelestialMechanics/mu.py
Camiloasc1/AstronomyUNAL
0d533c1737e5328605c70f614024e1759e8d0962
[ "MIT" ]
null
null
null
import numpy as np from CelestialMechanics.kepler.constants import K def mu_sun(m2_over_m1: float) -> float: """ mu = k * sqrt(1 + m2/m1) :param m2_over_m1: :type m2_over_m1: :return: mu :rtype: float """ mu = K * np.sqrt(1. + m2_over_m1) return mu * mu def mu_na(n: float, a: float) -> float: """ mu = n^2 / a^3 :param n: mean motion in degrees :type n: float :param a: semi-major axis :type a: float :return: mu :rtype: float """ return n * n * a * a * a def mu_gm1m2(m1: float, m2: float) -> float: """ mu = G (m1 + m2) :param m1: mass 1 :type m1: float :param m2: mass 2 :type m2: float :return: mu :rtype: float """ from astropy.constants import G return G * (m1 + m2)
16.791667
49
0.545906
126
806
3.404762
0.301587
0.055944
0.074592
0.125874
0.181818
0
0
0
0
0
0
0.051282
0.322581
806
47
50
17.148936
0.734432
0.41067
0
0
0
0
0
0
0
0
0
0
0
1
0.3
false
0
0.3
0
0.9
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
6c36540f75ff0aa4e3d1fa481631b799e5a9132c
1,041
py
Python
portfolio_pj/portfolio_app/views.py
duynb92/portfolio_site
f6898e8d1c3a67aa8dc6eafc7e4804e81dc46063
[ "MIT" ]
null
null
null
portfolio_pj/portfolio_app/views.py
duynb92/portfolio_site
f6898e8d1c3a67aa8dc6eafc7e4804e81dc46063
[ "MIT" ]
null
null
null
portfolio_pj/portfolio_app/views.py
duynb92/portfolio_site
f6898e8d1c3a67aa8dc6eafc7e4804e81dc46063
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.shortcuts import render from models import * # Create your views here. def index(req): context = HomeContext("Home", Facade.getSkills(), Facade.getHobbies()) return render(req, 'index.html', context=vars(context)) def profile(req): profile_context = ProfileContext("Profile", Facade.getProfiles()) return render(req, 'profile.html', context=vars(profile_context)) def portfolio(req): portfolio_context = PortfolioContext("Portfolio", Facade.getProjects()) return render(req, 'portfolio-gird-3.html', context=vars(portfolio_context)) def service(req): service_context = ServiceContext("Services", Facade.getServices()) return render(req, 'services.html', context=vars(service_context)) def contact(req): context = BaseContext("Contact") return render(req, 'contact-3.html', context=vars(context)) def blog(req): blog_context = BlogContext("Blog", []) return render(req, 'blog.html', context=vars(blog_context))
33.580645
80
0.727185
125
1,041
5.952
0.352
0.096774
0.120968
0.05914
0.067204
0
0
0
0
0
0
0.003319
0.131604
1,041
31
81
33.580645
0.81969
0.043228
0
0
0
0
0.118712
0.021127
0
0
0
0
0
1
0.285714
false
0
0.142857
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
6c3eef3ce318f9f2ea78b8b3df0a26bfa302ee81
106
py
Python
src/pythonFEA/defaults.py
honzatomek/pythonFEA
c851c20800a06cc2084ef53dfd2ab67e7dfbc3b7
[ "MIT" ]
null
null
null
src/pythonFEA/defaults.py
honzatomek/pythonFEA
c851c20800a06cc2084ef53dfd2ab67e7dfbc3b7
[ "MIT" ]
null
null
null
src/pythonFEA/defaults.py
honzatomek/pythonFEA
c851c20800a06cc2084ef53dfd2ab67e7dfbc3b7
[ "MIT" ]
null
null
null
# DEFUALT SETUP FOR NUMBERS DEFAULT_FLOAT = float # DEFAULT SETUP FOR STRINGS DEFAULT_LABEL_LENGTH = 120
17.666667
27
0.801887
15
106
5.466667
0.666667
0.195122
0
0
0
0
0
0
0
0
0
0.033708
0.160377
106
5
28
21.2
0.88764
0.481132
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
6c5b5d2beb7892b3713dc1291924921532e74885
1,795
py
Python
encommon/tests/test_times.py
enasisnetwork/encommon-py
c2bb1412171c84fe2917a23b535a6db1b5f523c1
[ "MIT" ]
null
null
null
encommon/tests/test_times.py
enasisnetwork/encommon-py
c2bb1412171c84fe2917a23b535a6db1b5f523c1
[ "MIT" ]
null
null
null
encommon/tests/test_times.py
enasisnetwork/encommon-py
c2bb1412171c84fe2917a23b535a6db1b5f523c1
[ "MIT" ]
null
null
null
#==============================================================================# # Enasis Network Common Libraries # # Python Functions Time Processing # #==============================================================================# # Primary Functions for Time Processing # # : - - - - - - - - - - - - - - - - - - -- - - - - - - - - - - - - - - - - - - # # : Standard Time Converting timeformat # #==============================================================================# #------------------------------------------------------------------------------# # Primary Functions for Time Processing # #------------------------------------------------------------------------------# # #~~ Standard Time Converting ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Conditionally perform the conversions to and from epoch and timestamp string #----------------------------------------------------------------------------- def test_timeformat(): # # Import the module and functions relevant to this particular set of tests from encommon.times import timeformat # # Initial section for instantizing variables expected by remaining routine epoch = 1558763424 stamp = "2019-05-25T05:50:24" # # Assert the relevant conditions indicating either test success or failure assert timeformat(epoch, "%Y-%m-%dT%H:%M:%S")[1] == stamp assert timeformat(stamp, "%Y-%m-%dT%H:%M:%S")[0] == epoch #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # #------------------------------------------------------------------------------#
54.393939
80
0.325905
105
1,795
5.561905
0.609524
0.071918
0.065068
0.078767
0.212329
0.212329
0.188356
0.188356
0
0
0
0.018786
0.228969
1,795
32
81
56.09375
0.403179
0.815042
0
0
0
0
0.179054
0
0
0
0
0
0.333333
1
0.166667
false
0
0.166667
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
6c6a82e95bf8ebf0eb518403b616adac59f096b0
505
py
Python
autograd/tests/test_z_playground.py
pmaederyork/Dragrongrad
32794d561f8d0273592ed55d315013eab2c24b8b
[ "MIT" ]
3
2018-12-17T16:24:11.000Z
2020-06-03T22:40:50.000Z
autograd/tests/test_z_playground.py
cs207-project-group4/project-repo
d5ee88d2a7d16477d816d830ba90d241a05e3b48
[ "MIT" ]
2
2018-10-18T17:59:26.000Z
2018-12-08T16:06:34.000Z
autograd/tests/test_z_playground.py
cs207-project-group4/project-repo
d5ee88d2a7d16477d816d830ba90d241a05e3b48
[ "MIT" ]
1
2019-08-19T06:06:13.000Z
2019-08-19T06:06:13.000Z
# -*- coding: utf-8 -*- from autograd.blocks.trigo import sin, cos from autograd.variable import Variable import numpy as np import autograd as ad from autograd import config class test(): def __init__(self, x): self.x=x def parent(self): print('parent') class sub(test): def __init__(self, x): super().__init__(x) def parent(self): print('child') super().parent() t=sub(2) t.parent()
15.78125
42
0.552475
63
505
4.238095
0.460317
0.134831
0.082397
0.11236
0.262172
0
0
0
0
0
0
0.005917
0.330693
505
31
43
16.290323
0.784024
0.041584
0
0.222222
0
0
0.022822
0
0
0
0
0
0
1
0.222222
false
0
0.277778
0
0.611111
0.111111
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
6c6ff29fbade9a404f47dd54164a91e8e0704f4b
664
py
Python
opfu/stock.py
XavierDingRotman/OptionsFutures
bab0de0d66efe39f05e9ddf59460ec76547d9ada
[ "Apache-2.0" ]
1
2020-07-05T20:54:15.000Z
2020-07-05T20:54:15.000Z
opfu/stock.py
XavierDingRotman/OptionsFutures
bab0de0d66efe39f05e9ddf59460ec76547d9ada
[ "Apache-2.0" ]
null
null
null
opfu/stock.py
XavierDingRotman/OptionsFutures
bab0de0d66efe39f05e9ddf59460ec76547d9ada
[ "Apache-2.0" ]
null
null
null
from opfu.security import Security class Stock(Security): def __init__(self, S, T, is_short=False): self.S = S self.K = self.S self.T = T Security.__init__(self, is_short, price=0) def payoff_long(self, P): return P - self.S def graph_payoff(self, start=0, end=None, num=100): if end == None: end = self.S * 2 Security.graph_payoff(self, start, end, num) def get_bsm_price(self): return self.S def greek_letter(self, greek, dd=0, method="BSM"): if greek == "delta": return 1 if greek == "gamma": return 0 return 0
23.714286
55
0.554217
95
664
3.705263
0.4
0.085227
0.045455
0.113636
0
0
0
0
0
0
0
0.022573
0.332831
664
27
56
24.592593
0.772009
0
0
0.095238
0
0
0.019578
0
0
0
0
0
0
1
0.238095
false
0
0.047619
0.095238
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
6c74c309dcd00dafc4c1aae00a0c378fd733102d
1,105
py
Python
src/user/models.py
fga-gpp-mds/2017.2-Grupo12
a90f94d0d497f625ab82ef44a907561f3bfa835f
[ "MIT" ]
6
2017-10-02T12:07:40.000Z
2017-12-14T11:40:07.000Z
src/user/models.py
fga-gpp-mds/2017.2-Grupo12
a90f94d0d497f625ab82ef44a907561f3bfa835f
[ "MIT" ]
92
2017-09-30T19:14:21.000Z
2017-12-14T04:41:16.000Z
src/user/models.py
fga-gpp-mds/2017.2-Grupo12
a90f94d0d497f625ab82ef44a907561f3bfa835f
[ "MIT" ]
3
2017-09-06T00:49:38.000Z
2018-07-13T00:32:37.000Z
from django.db import models from django.contrib.auth.models import User class Person(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE) name = models.CharField(max_length=64, null=False) email = models.EmailField(max_length=100, null=False) class Advisor(Person): cpf = models.CharField(max_length=14, null=False) tipo_cae = models.CharField(default='Municipal', max_length=9, null=False) nome_cae = models.CharField(default='CAE', max_length=50, null=False) cep = models.CharField(max_length=10, null=False) bairro = models.CharField(max_length=30, null=False) municipio = models.CharField(max_length=30, null=False) uf = models.CharField(max_length=2, null=False) class Meta: permissions = ( ('advisor', 'Advisor permissions'), ) class President(Advisor): class Meta: permissions = ( ('president', 'President permissions'), ) class Administrator(Person): class Meta: permissions = ( ('administrator', 'Administrator permissions'), )
29.078947
78
0.673303
129
1,105
5.674419
0.356589
0.110656
0.147541
0.196721
0.095628
0.095628
0.095628
0
0
0
0
0.019429
0.208145
1,105
37
79
29.864865
0.817143
0
0
0.214286
0
0
0.095928
0
0
0
0
0
0
1
0
false
0
0.071429
0
0.678571
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
6c86a24e42a439643a1c92f29bdfc4a1de454d48
964
py
Python
tests/conftest.py
jwizzle/nerdchess
045726326abc3ff94af30bda0c66beff1ca52978
[ "WTFPL" ]
null
null
null
tests/conftest.py
jwizzle/nerdchess
045726326abc3ff94af30bda0c66beff1ca52978
[ "WTFPL" ]
null
null
null
tests/conftest.py
jwizzle/nerdchess
045726326abc3ff94af30bda0c66beff1ca52978
[ "WTFPL" ]
null
null
null
"""Fixtures for pytest.""" import pytest from nerdchess.board import Board from nerdchess import pieces @pytest.fixture def board_fixt(): """Wrap the boardfixt class as a pytest fixture.""" return BoardFixt(Board()) class BoardFixt(): """Helper functions to manipulate a board passed as fixture.""" def __init__(self, board): """Init.""" self.board = board def place_piece(self, piece, position): """Place a piece or pawn on the board.""" letter = position[0] number = int(position[1]) self.board.squares[letter][number].occupant = piece piece.position = position def default_setup(self): """Set the board in default game start position. Returns: board: The new board object """ boardpieces = pieces.create_pieces() pawns = pieces.create_pawns() self.board.setup_board(boardpieces, pawns) return self.board
25.368421
67
0.631743
115
964
5.208696
0.434783
0.075125
0.043406
0
0
0
0
0
0
0
0
0.002809
0.261411
964
37
68
26.054054
0.838483
0.263485
0
0
0
0
0
0
0
0
0
0
0
1
0.210526
false
0
0.157895
0
0.526316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
6c87ac082f2ea2bf7c87cad18eaf0cdd7451709c
869
py
Python
opennem/api/schema.py
paulculmsee/opennem
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
[ "MIT" ]
22
2020-06-30T05:27:21.000Z
2022-02-21T12:13:51.000Z
opennem/api/schema.py
paulculmsee/opennem
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
[ "MIT" ]
71
2020-08-07T13:06:30.000Z
2022-03-15T06:44:49.000Z
opennem/api/schema.py
paulculmsee/opennem
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
[ "MIT" ]
13
2020-06-30T03:28:32.000Z
2021-12-30T08:17:16.000Z
from typing import List, Optional from pydantic import BaseModel, Field class ApiBase(BaseModel): class Config: orm_mode = True anystr_strip_whitespace = True use_enum_values = True arbitrary_types_allowed = True validate_assignment = True class UpdateResponse(BaseModel): success: bool = True records: List = [] class FueltechResponse(ApiBase): success: bool = True # @TODO fix circular references # records: List[FueltechSchema] class APINetworkRegion(ApiBase): code: str timezone: Optional[str] class APINetworkSchema(ApiBase): code: str country: str label: str regions: Optional[List[APINetworkRegion]] timezone: Optional[str] = Field(None, description="Network timezone") interval_size: int = Field(..., description="Size of network interval in minutes")
21.725
86
0.696203
94
869
6.340426
0.553191
0.036913
0.050336
0
0
0
0
0
0
0
0
0
0.223245
869
39
87
22.282051
0.882963
0.067894
0
0.166667
0
0
0.063197
0
0
0
0
0.025641
0
1
0
false
0
0.083333
0
0.791667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
6c88a8da20ae18c022b5a983db40aed8a4ffb346
304
py
Python
test-examples/issue_678_reproduce.py
tlambert03/image-demos
a2974bcc7f040fd4d14e659c4cbfeabcf726c707
[ "BSD-3-Clause" ]
null
null
null
test-examples/issue_678_reproduce.py
tlambert03/image-demos
a2974bcc7f040fd4d14e659c4cbfeabcf726c707
[ "BSD-3-Clause" ]
null
null
null
test-examples/issue_678_reproduce.py
tlambert03/image-demos
a2974bcc7f040fd4d14e659c4cbfeabcf726c707
[ "BSD-3-Clause" ]
null
null
null
""" Test adding 4D followed by 5D image layers to the viewer Intially only 2 sliders should be present, then a third slider should be created. """ import numpy as np from skimage import data import napari with napari.gui_qt(): viewer = napari.view_image(np.random.random((2, 10, 50, 100, 100)))
19
72
0.733553
51
304
4.333333
0.764706
0.072398
0
0
0
0
0
0
0
0
0
0.056225
0.180921
304
15
73
20.266667
0.831325
0.457237
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
6664d9c361d76731e630fab7db18a3314ba27f7a
699
py
Python
ex022.py
nascimentobrenda24/PythonExercises
2055f42a0454ae25cba6a6457c85822eaad2df01
[ "MIT" ]
1
2021-11-23T21:41:25.000Z
2021-11-23T21:41:25.000Z
ex022.py
nascimentobrenda24/PythonExercises
2055f42a0454ae25cba6a6457c85822eaad2df01
[ "MIT" ]
null
null
null
ex022.py
nascimentobrenda24/PythonExercises
2055f42a0454ae25cba6a6457c85822eaad2df01
[ "MIT" ]
null
null
null
# Analisador de textos # Crie um programa que leia o nome completo de uma pessoa e mostre: # - O nome com todas as letras maiúsculas e minúsculas. # - Quantas letras ao todo (sem considerar espaços). print('=*'*20, 'CADASTRO', '=*'*20) nome = str(input('Nome Completo:')).strip()#Para ler com letras maiúsculas print('Analisando seu nome...') print('Seu nome em minúsculo é {}'.format(nome.lower())) print('Seu nome em MAIÚSCULO é {}'.format(nome.upper())) print('Seu nome tem ano todo {} letras'.format(len(nome)-nome.count(' ')))#menos o contador de espaços primeiro_nome = nome.split() #Vai quebrar os caracteres print('Seu primeiro nome tem {} letras'.format(len(primeiro_nome[0])))
34.95
102
0.703863
105
699
4.666667
0.542857
0.057143
0.073469
0.057143
0
0
0
0
0
0
0
0.008361
0.144492
699
19
103
36.789474
0.811037
0.391989
0
0
0
0
0.40241
0
0
0
0
0.052632
0
1
0
false
0
0
0
0
0.75
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
1
0
2
66698e346f68c9e447122b0d937db33190f58a61
4,443
py
Python
tests/test_metrohash.py
thihara/pyfasthash
20a53f9bb7bf15f98e3e549f523b49e1e0f62e15
[ "Apache-2.0" ]
234
2015-02-05T13:41:58.000Z
2022-03-30T08:55:23.000Z
tests/test_metrohash.py
thihara/pyfasthash
20a53f9bb7bf15f98e3e549f523b49e1e0f62e15
[ "Apache-2.0" ]
50
2015-03-19T05:53:34.000Z
2022-03-30T16:20:17.000Z
tests/test_metrohash.py
thihara/pyfasthash
20a53f9bb7bf15f98e3e549f523b49e1e0f62e15
[ "Apache-2.0" ]
44
2015-04-23T18:51:43.000Z
2022-03-30T21:07:57.000Z
import pytest import pyhash def test_metro_64_1(hash_tester): hash_tester(hasher_type=pyhash.metro_64_1, bytes_hash=7555593383206836236, seed_hash=9613011798576657330, unicode_hash=5634638029758084150) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") def test_metro_128_1(hash_tester): hash_tester(hasher_type=pyhash.metro_128_1, bytes_hash=310240039238111093048322555259813357218, seed_hash=330324289553816260191102680044286377986, unicode_hash=160639312567243412360084738183177128736) def test_metro_64_2(hash_tester): hash_tester(hasher_type=pyhash.metro_64_2, bytes_hash=13328239478646503906, seed_hash=16521803336796657060, unicode_hash=5992985172783395072) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") def test_metro_128_2(hash_tester): hash_tester(hasher_type=pyhash.metro_128_2, bytes_hash=308979041176504703647272401075625691044, seed_hash=156408679042779357342816971045969684594, unicode_hash=169904568621124891123383613748925830588) def test_metro_Crc64_1(hash_tester): hash_tester(hasher_type=pyhash.metro_crc_64_1, bytes_hash=6872506084457499713, seed_hash=14064239385324957326, unicode_hash=5634638029758084150) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") def test_metro_Crc128_1(hash_tester): hash_tester(hasher_type=pyhash.metro_crc_128_1, bytes_hash=44856800307026421677415827141042094245, seed_hash=199990471895323666720887863107514038076, unicode_hash=53052528140813423722778028047086277728) def test_metro_Crc64_2(hash_tester): hash_tester(hasher_type=pyhash.metro_crc_64_2, bytes_hash=9168163846307153532, seed_hash=11235719994915751828, unicode_hash=15697829093445668111) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") def test_metro_Crc128_2(hash_tester): hash_tester(hasher_type=pyhash.metro_crc_128_2, bytes_hash=29039398407115405218669555123781288008, seed_hash=26197404070933777589488526163359489061, unicode_hash=136212167639765185451107230087801381416) @pytest.mark.benchmark(group='hash64', disable_gc=True) def test_metro_hash64_1_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_64_1, 6897098198286496634) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") @pytest.mark.benchmark(group='hash128', disable_gc=True) def test_metro_hash128_1_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_128_1, 284089860902754045805586152203438670446) @pytest.mark.benchmark(group='hash64', disable_gc=True) def test_metro_hash64_2_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_64_2, 9928248983045338067) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") @pytest.mark.benchmark(group='hash128', disable_gc=True) def test_metro_hash128_2_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_128_2, 298961466275459716490100873977629041349) @pytest.mark.benchmark(group='hash64', disable_gc=True) def test_metro_hash_crc64_1_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_crc_64_1, 15625740387403976237) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") @pytest.mark.benchmark(group='hash128', disable_gc=True) def test_metro_hash_crc128_1_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_crc_128_1, 221795002586229010982769362009963170208) @pytest.mark.benchmark(group='hash64', disable_gc=True) def test_metro_hash_crc64_2_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_crc_64_2, 9313388757605283934) @pytest.mark.skipif(not pyhash.build_with_int128, reason="requires int128 support") @pytest.mark.benchmark(group='hash128', disable_gc=True) def test_metro_hash_crc128_2_perf(benchmark, hash_bencher): hash_bencher(benchmark, pyhash.metro_crc_128_2, 319940271611864595969873671463832146628)
39.669643
83
0.765699
504
4,443
6.376984
0.136905
0.034848
0.059739
0.049782
0.630367
0.627878
0.627878
0.627878
0.627878
0.572495
0
0.29695
0.158677
4,443
111
84
40.027027
0.562868
0
0
0.230769
0
0
0.053117
0
0
0
0
0
0
1
0.205128
false
0
0.025641
0
0.230769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
666a08a2699afb54d288c230c2b9f22bf4716df5
1,375
py
Python
scaner/controllers/communities.py
dearbornlavern/scaner
401de0ec7caef5c5a23aedec106db136bd4e4658
[ "Apache-2.0" ]
12
2016-09-30T12:43:44.000Z
2022-02-17T17:17:02.000Z
scaner/controllers/communities.py
dearbornlavern/scaner
401de0ec7caef5c5a23aedec106db136bd4e4658
[ "Apache-2.0" ]
null
null
null
scaner/controllers/communities.py
dearbornlavern/scaner
401de0ec7caef5c5a23aedec106db136bd4e4658
[ "Apache-2.0" ]
7
2016-09-28T09:48:48.000Z
2020-05-15T04:56:11.000Z
from flask import current_app from scaner.utils import add_metadata import json # PRUEBA EXTRACION USUARIOS # @add_metadata() # def get(userId, fields=None, *args, **kwargs): # #get_task = current_app.tasks.get_users_from_twitter.delay() # get_task = current_app.tasks.execute_metrics.delay() # return {'result': "In progress"}, 200 @add_metadata('communities') def get(communityId, *args, **kwargs): get_task = current_app.tasks.get_community.delay(communityId) return {'communities': get_task.get(timeout = 100)}, 200 @add_metadata('users') def get_network(communityId, *args, **kwargs): community_network_task = current_app.tasks.get_community_network.delay(communityId) return {'users': community_network_task.get(timeout = 100)}, 200 @add_metadata('communities') def search(*args, **kwargs): search_task = current_app.tasks.get_communities_list.delay() return {'communities': search_task.get(timeout = 100)}, 200 @add_metadata() def get_emotion(communityId, *args, **kwargs): emotion_task = current_app.tasks.get_community_emotion.delay(communityId) return {'result': emotion_task.get(timeout = 100)}, 200 @add_metadata() def get_sentiment(communityId, *args, **kwargs): sentiment_task = current_app.tasks.get_community_sentiment.delay(communityId) return {'communities': sentiment_task.get(timeout = 100)}, 200
39.285714
87
0.749818
178
1,375
5.539326
0.235955
0.081136
0.099391
0.134888
0.419878
0.312373
0.208925
0.146045
0.075051
0
0
0.027228
0.118545
1,375
35
88
39.285714
0.786304
0.182545
0
0.173913
0
0
0.06362
0
0
0
0
0
0
1
0.217391
false
0
0.130435
0
0.565217
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
666ce6df66f28481199af4b25376a59418b9191f
395
py
Python
cct/cases/create_snapshot.py
LmangoLemon/mind
1b269acca41f840c5c71cb6c92ec92ecfb977ad4
[ "Apache-2.0" ]
null
null
null
cct/cases/create_snapshot.py
LmangoLemon/mind
1b269acca41f840c5c71cb6c92ec92ecfb977ad4
[ "Apache-2.0" ]
null
null
null
cct/cases/create_snapshot.py
LmangoLemon/mind
1b269acca41f840c5c71cb6c92ec92ecfb977ad4
[ "Apache-2.0" ]
null
null
null
import logging from time import sleep from cct.case import Case logger = logging.getLogger(__file__) class create_snapshot(Case): def pre_test(self): logger.info ('doing something before create snapshot') sleep(3) def process(self): logger.info('create snapshot') sleep(5) def post_test(self): logger.info('create snapshot finished')
17.173913
62
0.668354
50
395
5.14
0.52
0.217899
0.163424
0.140078
0.217899
0
0
0
0
0
0
0.006667
0.240506
395
22
63
17.954545
0.85
0
0
0
0
0
0.194937
0
0
0
0
0
0
1
0.230769
false
0
0.230769
0
0.538462
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
66b70f0759d9cb9c2433981c7b3e962dee37c367
4,032
py
Python
basic/19-brownie/brownie_test/tests/exchange/test_eth_to_token.py
xiangzhengfeng/Dapp-Learning
813fe6e52898206046842d10ecf9eb68b7f336a1
[ "MIT" ]
987
2021-12-19T09:57:18.000Z
2022-03-31T15:39:45.000Z
basic/19-brownie/brownie_test/tests/exchange/test_eth_to_token.py
xiangzhengfeng/Dapp-Learning
813fe6e52898206046842d10ecf9eb68b7f336a1
[ "MIT" ]
30
2021-12-20T03:13:29.000Z
2022-03-31T15:00:23.000Z
basic/19-brownie/brownie_test/tests/exchange/test_eth_to_token.py
xiangzhengfeng/Dapp-Learning
813fe6e52898206046842d10ecf9eb68b7f336a1
[ "MIT" ]
207
2021-12-19T08:40:38.000Z
2022-03-31T13:10:02.000Z
from brownie import (accounts, web3) def test_eth_to_token_swap(HAY_token, hay_token_exchange): HAY_token.approve(hay_token_exchange, 10 * 10**18, {"from": accounts[0]}) # step 1: initialize exchange hay_token_exchange.initializeExchange(10 * 10**18, {"from": accounts[0], "amount": 5 * 10**18}) # the swap function needs a timeout parameter timeout = web3.eth.getBlock(web3.eth.blockNumber).timestamp + 300 assert HAY_token.balanceOf(accounts[2]) == 0 hay_token_exchange.ethToTokenSwap(1, timeout, {"from": accounts[2], "amount": 1 * 10**18}) # step 2: calculate the entries in transforming ETH to Token # a) 注入ETH,直接先收取0.2%的手续费,最后注入到pool中,以input token的形式收取,这里是ETH,上例中收取0.002 ether # fee = 0.2% * 1 * 10**18 = 2000000000000000 # b) 计算池子中剩余的token数量: Token pool = (last invariant) / ( ETH pool - fee ) # 注意在计算时,分子分母都要取整数,int(a) // int(b) # e.g. Token pool = 10 * 10**18 * 5 * 10**18 / (5.998 * 10**18) = 8336112037345781927 # c) 计算返回的token的数量: Token received = original Token amount - Token pool # = 10 * 10**18 - 8336112037345781927 # = 1663887962654218073 # d) 更新ETH-TOKEN池子的所有状态量: # invariant = Token pool * ETH pool = 8336112037345781927 * 6 * 10**18 = 50016672224074691562000000000000000000 # Token Pool = 8336112037345781927 # ETH pool = 6 * 10**18 assert hay_token_exchange.ethPool() == 6 * 10**18 assert web3.eth.getBalance(hay_token_exchange.address) == 6 * 10**18 assert hay_token_exchange.tokenPool() == 8336112037345781927 assert HAY_token.balanceOf(hay_token_exchange) == 8336112037345781927 assert hay_token_exchange.invariant() == 50016672224074691562000000000000000000 assert HAY_token.balanceOf(accounts[2]) == 1663887962654218073 def test_fallback_eth_to_token_swap(HAY_token, hay_token_exchange): # 测试uniswap exchange合约的默认fallback函数,即直接往这个地址转入eth,则默认是用ETH换取TOKEN的操作 HAY_token.approve(hay_token_exchange, 10 * 10**18, {"from": accounts[0]}) # step 1: initialize exchange hay_token_exchange.initializeExchange(10 * 10**18, {"from": accounts[0], "amount": 5 * 10**18}) timeout = web3.eth.getBlock(web3.eth.blockNumber).timestamp + 300 # step 2: use accounts[2] to do the test assert HAY_token.balanceOf(accounts[2]) == 0 accounts[2].transfer(hay_token_exchange, 1 * 10**18) assert hay_token_exchange.ethPool() == 6 * 10 ** 18 assert web3.eth.getBalance(hay_token_exchange.address) == 6 * 10 ** 18 assert hay_token_exchange.tokenPool() == 8336112037345781927 assert HAY_token.balanceOf(hay_token_exchange) == 8336112037345781927 assert hay_token_exchange.invariant() == 50016672224074691562000000000000000000 assert HAY_token.balanceOf(accounts[2]) == 1663887962654218073 def test_eth_to_token_payment(HAY_token, hay_token_exchange): # 测试eth2token payment函数,与swap函数不同的点是receipt是另一个地址 # 用accounts[2]的ETH取exchange中交易,交易所得TOken发往accounts[3] HAY_token.approve(hay_token_exchange, 10 * 10 ** 18, {"from": accounts[0]}) # step 1: initialize exchange hay_token_exchange.initializeExchange(10 * 10 ** 18, {"from": accounts[0], "amount": 5 * 10 ** 18}) timeout = web3.eth.getBlock(web3.eth.blockNumber).timestamp + 300 # 开始的两个地址的TOken数量都为0 assert HAY_token.balanceOf(accounts[2]) == 0 assert HAY_token.balanceOf(accounts[3]) == 0 hay_token_exchange.ethToTokenPayment(1, timeout, accounts[3], {"from": accounts[2], "amount": 1 * 10**18}) assert hay_token_exchange.ethPool() == 6 * 10 ** 18 assert web3.eth.getBalance(hay_token_exchange.address) == 6 * 10 ** 18 assert hay_token_exchange.tokenPool() == 8336112037345781927 assert HAY_token.balanceOf(hay_token_exchange) == 8336112037345781927 assert hay_token_exchange.invariant() == 50016672224074691562000000000000000000 assert HAY_token.balanceOf(accounts[3]) == 1663887962654218073 assert HAY_token.balanceOf(accounts[2]) == 0
51.692308
117
0.705109
489
4,032
5.642127
0.198364
0.127582
0.156578
0.0917
0.677782
0.652048
0.640087
0.576296
0.576296
0.529902
0
0.190764
0.178323
4,032
77
118
52.363636
0.642016
0.282738
0
0.769231
0
0
0.02161
0
0
0
0
0
0.589744
1
0.076923
false
0
0.025641
0
0.102564
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
2
66b88bc537b297b0b6ea48d2a39575fd0626f252
232
py
Python
setup.py
h-rub/manzip
875e4ed75e08bd06b0d50698ecf1744ab3723e4c
[ "MIT" ]
null
null
null
setup.py
h-rub/manzip
875e4ed75e08bd06b0d50698ecf1744ab3723e4c
[ "MIT" ]
null
null
null
setup.py
h-rub/manzip
875e4ed75e08bd06b0d50698ecf1744ab3723e4c
[ "MIT" ]
null
null
null
from setuptools import setup setup( name="manzip", version='1.0.0', py_modules=['manzip'], install_requires=[ 'Click', ], entry_points=''' [console_scripts] manzip=app:main ''', )
16.571429
28
0.547414
24
232
5.125
0.833333
0
0
0
0
0
0
0
0
0
0
0.018293
0.293103
232
14
29
16.571429
0.731707
0
0
0
0
0
0.330472
0
0
0
0
0
0
1
0
true
0
0.076923
0
0.076923
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
66bccd1b00412b945cbbdb0f6a0be3ab3a3ef37f
158
py
Python
tests/cli.py
joesitton/Ciphey
862555f13e3915428a2f4ada5538fdf0be77ffcd
[ "MIT" ]
9,908
2020-06-06T01:06:50.000Z
2022-03-31T21:22:57.000Z
tests/cli.py
joesitton/Ciphey
862555f13e3915428a2f4ada5538fdf0be77ffcd
[ "MIT" ]
423
2020-05-30T11:44:37.000Z
2022-03-18T03:15:30.000Z
tests/cli.py
joesitton/Ciphey
862555f13e3915428a2f4ada5538fdf0be77ffcd
[ "MIT" ]
714
2020-06-09T20:24:41.000Z
2022-03-29T15:28:53.000Z
import subprocess from sys import exit result = subprocess.check_output(["ciphey", "-q", "-t 'hello'"]) if "hello" in result: exit(0) else: exit(1)
15.8
64
0.651899
23
158
4.434783
0.73913
0
0
0
0
0
0
0
0
0
0
0.015504
0.183544
158
9
65
17.555556
0.775194
0
0
0
0
0
0.14557
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.285714
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
66cc342e6fa18c2dd06d530c8ed54f8e34f04274
1,853
py
Python
scripts/bulkLoadUrls.py
conveyal/gtfs-data-manager
e7269fc1660f1816da269b1c116b43bdf758900b
[ "MIT" ]
25
2015-02-11T19:20:07.000Z
2021-03-10T07:53:29.000Z
scripts/bulkLoadUrls.py
conveyal/gtfs-data-manager
e7269fc1660f1816da269b1c116b43bdf758900b
[ "MIT" ]
53
2015-01-07T20:30:56.000Z
2016-10-10T12:47:22.000Z
scripts/bulkLoadUrls.py
conveyal/gtfs-data-manager
e7269fc1660f1816da269b1c116b43bdf758900b
[ "MIT" ]
3
2015-01-03T10:17:34.000Z
2015-11-10T10:44:27.000Z
#!/usr/bin/python # load many feeds to the GTFS data manager, from a csv with fields name and url # usage: bulkLoadFeeds.py file.csv http://server.example.com/ import csv from getpass import getpass from sys import argv import json from cookielib import CookieJar import urllib2 from urllib import urlencode if len(argv) != 3: print 'usage: %s file.csv http://gtfs-data-manager.example.com' % argv[0] server = argv[2] with open(argv[1]) as f: reader = csv.DictReader(f) # log in to the server print 'Please authenticate' uname = raw_input('username: ') pw = getpass('password: ') # strip trailing slash to normalize url server = server if not server.endswith('/') else server[:-1] # cookie handling # http://www.techchorus.net/using-cookie-jar-urllib2 cj = CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) # authenticate opener.open(server + '/authenticate', urlencode(dict(username=uname, password=pw))) # choose feed collection colls = json.load(opener.open(server + '/api/feedcollections')) print 'choose a feed collection: ' for i in xrange(len(colls)): print '%s. %s' % (i + 1, colls[i]['name']) while True: try: coll = colls[int(raw_input('> ')) - 1] except ValueError: continue else: break # load each feed for feed in reader: data = dict( name = feed['name'], url = feed['url'], isPublic = True, autofetch = True, # every day feedCollection = coll ) # http://stackoverflow.com/questions/3290522 req = urllib2.Request(server + '/api/feedsources/', json.dumps(data), {'Content-Type': 'application/json'}) opener.open(req)
25.736111
115
0.611981
228
1,853
4.960526
0.495614
0.026525
0.026525
0
0
0
0
0
0
0
0
0.014012
0.268214
1,853
71
116
26.098592
0.820059
0.207232
0
0
0
0
0.149623
0
0
0
0
0
0
0
null
null
0.075
0.175
null
null
0.1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
66d0333de9cb88854cae7ea5468d3e9e83ace47c
953
py
Python
quokka/ext/weasyprint.py
yencchen/quokka_epus
d64aeb9c5ca59ee4bdcd84381f9bb0504680f5f5
[ "MIT" ]
1
2020-10-31T03:57:07.000Z
2020-10-31T03:57:07.000Z
quokka/ext/weasyprint.py
yencchen/quokka_epus
d64aeb9c5ca59ee4bdcd84381f9bb0504680f5f5
[ "MIT" ]
null
null
null
quokka/ext/weasyprint.py
yencchen/quokka_epus
d64aeb9c5ca59ee4bdcd84381f9bb0504680f5f5
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import print_function import logging from flask import url_for logger = logging.getLogger() try: from flask_weasyprint import render_pdf import_error = False except (ImportError, OSError) as e: # print(""" # Error importing flask-weasyprint! # PDF support is temporarily disabled. # Manual dependencies may need to be installed. # See, # `http://weasyprint.org/docs/install/#by-platform`_ # `https://github.com/Kozea/WeasyPrint/issues/79`_ # """ + str(e)) import_error = True def configure(app): # only configure .pdf extension if it's enabled # and configured correctly in the environment. if app.config.get('ENABLE_TO_PDF', False) and not import_error: def render_to_pdf(long_slug): return render_pdf(url_for('detail', long_slug=long_slug)) app.add_url_rule('/<path:long_slug>.pdf', view_func=render_to_pdf)
25.756757
74
0.684155
128
953
4.875
0.625
0.051282
0.035256
0
0
0
0
0
0
0
0
0.003968
0.206716
953
36
75
26.472222
0.821429
0.383001
0
0
0
0
0.069565
0.036522
0
0
0
0
0
1
0.142857
false
0
0.571429
0.071429
0.785714
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
66d0fa4f73c90e59d6dc87d8a6c39b035c3b58f1
392
py
Python
jupyter_server_terminals/__init__.py
blink1073/jupyter_server_terminals
cc0363421ab50fded26c8519ea4694bf1a391fce
[ "BSD-3-Clause-Clear" ]
3
2021-12-30T23:55:47.000Z
2022-02-18T01:14:54.000Z
jupyter_server_terminals/__init__.py
blink1073/jupyter_server_terminals
cc0363421ab50fded26c8519ea4694bf1a391fce
[ "BSD-3-Clause-Clear" ]
5
2021-12-26T21:27:11.000Z
2022-03-03T11:37:04.000Z
jupyter_server_terminals/__init__.py
blink1073/jupyter_server_terminals
cc0363421ab50fded26c8519ea4694bf1a391fce
[ "BSD-3-Clause-Clear" ]
4
2021-12-26T21:25:45.000Z
2022-01-27T02:47:10.000Z
from ._version import __version__ # noqa:F401 try: from .app import TerminalsExtensionApp except ModuleNotFoundError: import warnings warnings.warn("Could not import submodules") def _jupyter_server_extension_points(): # pragma: no cover return [ { "module": "jupyter_server_terminals.app", "app": TerminalsExtensionApp, }, ]
21.777778
59
0.663265
37
392
6.72973
0.702703
0.104418
0
0
0
0
0
0
0
0
0
0.010274
0.255102
392
17
60
23.058824
0.842466
0.066327
0
0
0
0
0.176309
0.077135
0
0
0
0
0
1
0.076923
true
0
0.307692
0.076923
0.461538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
dd083d9565ab68711f5686f12e467a2276370bf5
188
py
Python
python_module/numpy_module/numpy_test.py
panc-test/python-study
fb172ed4a4f7fb521de9a005cd55115ad63a5b6d
[ "MIT" ]
1
2021-09-17T09:32:56.000Z
2021-09-17T09:32:56.000Z
python_module/numpy_module/numpy_test.py
panc-test/python-study
fb172ed4a4f7fb521de9a005cd55115ad63a5b6d
[ "MIT" ]
2
2021-05-11T05:47:13.000Z
2021-05-11T05:48:10.000Z
python_module/numpy_module/numpy_test.py
panc-test/python-study
fb172ed4a4f7fb521de9a005cd55115ad63a5b6d
[ "MIT" ]
null
null
null
""" numpy模块 —— 数据分析 """ import numpy #创建矩阵 array = numpy.array([[1,2,3],[4,5,6]]) print(array,'\n',type(array)) #矩阵维度 print(array.ndim) #行数和列数 print(array.shape) #元素个数 print(array.size)
11.75
38
0.654255
32
188
3.90625
0.6875
0.32
0
0
0
0
0
0
0
0
0
0.035714
0.106383
188
16
39
11.75
0.696429
0.175532
0
0
0
0
0.013889
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0.666667
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
dd093842c9dc2bff582de0e411627f80f0d5bed5
305
py
Python
flask_config.py
khanshifaul/Flask_Template_Skeleton
951ccb9fc73ff5a2d501f2bc6a742553b8177fc5
[ "Apache-2.0" ]
null
null
null
flask_config.py
khanshifaul/Flask_Template_Skeleton
951ccb9fc73ff5a2d501f2bc6a742553b8177fc5
[ "Apache-2.0" ]
null
null
null
flask_config.py
khanshifaul/Flask_Template_Skeleton
951ccb9fc73ff5a2d501f2bc6a742553b8177fc5
[ "Apache-2.0" ]
null
null
null
class Base(object): DEBUG = False TESTING = False class Production(Base): DEBUG = False TESTING = False class Staging(Base): DEBUG = True TESTING = False class Development(Base): DEBUG = True TESTING = True class Testing(Base): DEBUG = False TESTING = True
12.708333
24
0.629508
35
305
5.485714
0.285714
0.1875
0.265625
0.229167
0.28125
0
0
0
0
0
0
0
0.288525
305
23
25
13.26087
0.884793
0
0
0.666667
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
dd0c981b401b14bcc898b39cf9bb3a7382b0f82e
1,299
py
Python
bcs-ui/backend/helm/authtoken/views.py
kayinli/bk-bcs
93a0856175f7b066ef835921572c1cac590dbd8e
[ "Apache-2.0" ]
1
2021-11-16T08:15:13.000Z
2021-11-16T08:15:13.000Z
bcs-ui/backend/helm/authtoken/views.py
kayinli/bk-bcs
93a0856175f7b066ef835921572c1cac590dbd8e
[ "Apache-2.0" ]
null
null
null
bcs-ui/backend/helm/authtoken/views.py
kayinli/bk-bcs
93a0856175f7b066ef835921572c1cac590dbd8e
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging from rest_framework import viewsets from backend.utils.views import ActionSerializerMixin, with_code_wrapper from .models import Token from .serializers import TokenSLZ, TokenUpdateSLZ logger = logging.getLogger(__name__) @with_code_wrapper class TokenView(ActionSerializerMixin, viewsets.ModelViewSet): serializer_class = TokenSLZ lookup_url_kwarg = "token_id" action_serializers = { 'update': TokenUpdateSLZ, } def get_queryset(self): return Token.objects.filter(username=self.request.user.username)
34.184211
115
0.779831
175
1,299
5.702857
0.697143
0.06012
0.026052
0.032064
0
0
0
0
0
0
0
0.010055
0.157814
1,299
37
116
35.108108
0.902194
0.560431
0
0
0
0
0.024867
0
0
0
0
0
0
1
0.066667
false
0
0.333333
0.066667
0.733333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
dd13e1b360546b453646ec337688f0743f83b569
3,374
py
Python
pyrentals/test_pyrentals.py
asm128/pyrentals
862a0f78d93b18499555dd3c8c1effb7cae9f99b
[ "MIT" ]
null
null
null
pyrentals/test_pyrentals.py
asm128/pyrentals
862a0f78d93b18499555dd3c8c1effb7cae9f99b
[ "MIT" ]
null
null
null
pyrentals/test_pyrentals.py
asm128/pyrentals
862a0f78d93b18499555dd3c8c1effb7cae9f99b
[ "MIT" ]
null
null
null
from pyrentals import Cart import unittest class Test_test_pyrentals(unittest.TestCase): def test_method_empty(self): test_cart_instance = Cart() test_cart_instance.Rentals = {} return self.assertTrue(test_cart_instance.empty(), "Cart should be empty when it's just created.") def test_method_not_empty(self): test_cart_instance = Cart() test_rental = {"Type": "Hour", "Time": 2} test_cart_instance.Rentals = [test_rental, ] return self.assertFalse(test_cart_instance.empty(), "Cart shouldn't be empty if we just added test_rental.") def test_calculate_price_empty(self): test_cart_instance = Cart() return self.assertEqual(0, test_cart_instance.calculate_price()) def test_price_list(self): test_cart_instance = Cart() price_list = {"Hour": 5, "Day": 15, "Month": 60} test_rental = {"Type": "Day", "Time": 2} test_cart_instance.Rentals = [test_rental] * 2 output_prices = [] final_price = test_cart_instance.calculate_price(price_list, output_prices) for price in output_prices: self.assertEqual(30, price) self.assertEqual(60, final_price) return def test_family_discount(self): test_cart_instance = Cart() price_list = {"Hour": 5, "Day": 15, "Month": 60} test_rental = {"Type": "Day", "Time": 2} test_cart_instance.Rentals = [test_rental] * 3 output_prices = [] final_price = test_cart_instance.calculate_price(price_list, output_prices) raw_price = sum([x["Time"] * price_list[x["Type"]] for x in test_cart_instance.Rentals]) self.assertLess(final_price, raw_price) return self.assertTrue(final_price == raw_price - raw_price * .3) def test_family_discount_limit(self): test_cart_instance = Cart() price_list = {"Hour": 5, "Day": 15, "Month": 60} test_rental = {"Type": "Day", "Time": 2} test_cart_instance.Rentals = [test_rental] * 6 output_prices = [] final_price = test_cart_instance.calculate_price(price_list, output_prices) raw_price = sum([x["Time"] * price_list[x["Type"]] for x in test_cart_instance.Rentals]) return self.assertEqual(final_price, raw_price) def test_output_prices(self): test_cart_instance = Cart() price_list = {"Hour": 5, "Day": 15, "Month": 60} test_rental = {"Type": "Day", "Time": 2} test_cart_instance.Rentals = [test_rental] * 3 output_prices = [] final_price = test_cart_instance.calculate_price(price_list, output_prices) raw_price = sum([x["Time"] * price_list[x["Type"]] for x in test_cart_instance.Rentals]) return self.assertEqual(raw_price, sum(output_prices)) def test_add_rental(self): test_cart_instance = Cart() price_list = {"Hour": 5, "Day": 15, "Month": 60} test_rental = {"Type": "Day", "Time": 2} for x in range(2): test_cart_instance.add_rental(test_rental["Type"], test_rental["Time"]) output_prices = [] final_price = test_cart_instance.calculate_price(price_list, output_prices) for price in output_prices: self.assertEqual(30, price) self.assertEqual(60, final_price) return if __name__ == '__main__': unittest.main()
43.25641
116
0.648488
439
3,374
4.651481
0.145786
0.101861
0.203722
0.101371
0.721841
0.682664
0.654261
0.621939
0.60333
0.60333
0
0.017733
0.23118
3,374
77
117
43.818182
0.769468
0
0
0.602941
0
0
0.078245
0
0
0
0
0
0.161765
1
0.117647
false
0
0.029412
0
0.279412
0
0
0
0
null
0
1
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
dd1a3f013274c36a04a9e56b4c6a20cb322ded06
287
py
Python
python-safety/predict.py
alexeyinkin/geonames-to-json
6055d4f3484f859a784aa8b9bfd96682a74fcd1b
[ "MIT" ]
null
null
null
python-safety/predict.py
alexeyinkin/geonames-to-json
6055d4f3484f859a784aa8b9bfd96682a74fcd1b
[ "MIT" ]
null
null
null
python-safety/predict.py
alexeyinkin/geonames-to-json
6055d4f3484f859a784aa8b9bfd96682a74fcd1b
[ "MIT" ]
null
null
null
import numpy as np from model import get_model def get_trained_model(): model = get_model() model.load_weights('weights.ckpt') return model #model = get_fit_model('random.tsv') #inputs_raw = [[0,0,0,0,0]] #inputs_np = np.array(inputs_raw) #print(model.predict(inputs_np))
20.5
38
0.71777
47
287
4.148936
0.468085
0.041026
0.046154
0.041026
0
0
0
0
0
0
0
0.020325
0.142857
287
13
39
22.076923
0.772358
0.432056
0
0
0
0
0.075472
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
dd1d14cad8d7b51f2d3997d8681795f2ed9d4e1a
2,863
py
Python
server/blog/models.py
rafay826/django-react
60bd7dcea8bb5c921f80d064e4d16fa34381ae09
[ "MIT" ]
null
null
null
server/blog/models.py
rafay826/django-react
60bd7dcea8bb5c921f80d064e4d16fa34381ae09
[ "MIT" ]
12
2020-06-05T21:17:35.000Z
2022-03-11T23:49:11.000Z
server/blog/models.py
midasdev711/djudo
5717ad63b0ef5dddddfd1d3839fa5231ac21972f
[ "MIT" ]
null
null
null
from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from comments.models import Comment # Create your models here. class PostManager(models.Manager): def active(self, *args, **kwargs): return super(PostManager, self) def upload_location(instance, filename): #filebase, extension = filename.split(".") #return "%s/%s.%s" %(instance.id, instance.id, extension) PostModel = instance.__class__ """ instance.__class__ gets the model Post. We must use this method because the model is defined below. Then create a queryset ordered by the "id"s of each object, Then we get the last object in the queryset with `.last()` Which will give us the most recently created Model instance We add 1 to it, so we get what should be the same id as the the post we are creating. """ return "static/images/posts/%s" % filename class Post(models.Model): id = models.AutoField(primary_key=True) title = models.CharField(max_length=100, unique=True) slug = models.SlugField(max_length=100, unique=True) image = models.ImageField(upload_to=upload_location, null=True, blank=True, width_field="width_field", height_field="height_field") image_url = models.CharField(max_length=1000, null=True, blank=True) height_field = models.IntegerField(default=0) width_field = models.IntegerField(default=0) description = models.CharField(max_length=255, blank=True) body = models.TextField() published = models.BooleanField(default=True) created = models.DateTimeField(db_index=True, auto_now_add=True) category = models.ForeignKey('blog.Category', related_name='posts', on_delete=models.CASCADE) objects = PostManager() def __unicode__(self): return '%s' % self.title def __str__(self): return self.title def get_absolute_url(self): return reverse("blog.views.post", kwargs={"slug": self.slug}) def get_api_url(self): return reverse("posts-api:detail", kwargs={"slug": self.slug}) @property def comments(self): instance = self qs = Comment.objects.filter_by_instance(instance) return qs class CategoryManager(models.Manager): def active(self, *args, **kwargs): return super(CategoryManager, self) class Category(models.Model): id = models.AutoField(primary_key=True) title = models.CharField(max_length=100, db_index=True) slug = models.SlugField(max_length=100, db_index=True) class Meta: verbose_name = ("Category") verbose_name_plural = ("Categories") objects = CategoryManager() def __unicode__(self): return '%s' % self.title def get_absolute_url(self): return reverse('blog.views.post', args=[self.slug])
35.7875
103
0.691233
373
2,863
5.155496
0.369973
0.028081
0.037442
0.049922
0.298492
0.25793
0.24857
0.212168
0.184087
0.135205
0
0.00962
0.201188
2,863
80
104
35.7875
0.83122
0.042263
0
0.178571
0
0
0.059351
0.009394
0
0
0
0
0
1
0.178571
false
0
0.071429
0.142857
0.821429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
dd3452c9bed80f5adb210957aa5ef5db051c0f6c
301
py
Python
ml_progress/percent_metric.py
gregrolwes/ml_progress
e352f73847e163993ff9e642973512f070555805
[ "MIT" ]
null
null
null
ml_progress/percent_metric.py
gregrolwes/ml_progress
e352f73847e163993ff9e642973512f070555805
[ "MIT" ]
null
null
null
ml_progress/percent_metric.py
gregrolwes/ml_progress
e352f73847e163993ff9e642973512f070555805
[ "MIT" ]
null
null
null
import sys from .display import Display class PercentMetric(Display): def __init__(self, metric: str): super().__init__() self.metric = metric def update(self, metrics: dict, width=25, height=1): sys.stdout.write("%s: %3d%% \n" % (self.metric, metrics[self.metric]))
27.363636
78
0.644518
39
301
4.769231
0.615385
0.215054
0.150538
0
0
0
0
0
0
0
0
0.016736
0.20598
301
11
78
27.363636
0.761506
0
0
0
0
0
0.039735
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.625
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
dd4db1c506d212f1e3f132f918bc8f782cebb18b
673
py
Python
SourceCode/Bayes/bayeslearn.py
xuanyuansen/PyMachineLearning
c5b88c4d9aa2f5d59160d0824f7cee8377e7e16e
[ "Apache-2.0" ]
1
2017-01-17T06:19:33.000Z
2017-01-17T06:19:33.000Z
SourceCode/Bayes/bayeslearn.py
xuanyuansen/PyMachineLearning
c5b88c4d9aa2f5d59160d0824f7cee8377e7e16e
[ "Apache-2.0" ]
null
null
null
SourceCode/Bayes/bayeslearn.py
xuanyuansen/PyMachineLearning
c5b88c4d9aa2f5d59160d0824f7cee8377e7e16e
[ "Apache-2.0" ]
null
null
null
#coding=utf-8 ''' Created on 2013年9月20日 @author: Wangliaofan ''' import bayes import feedparser from time import * if __name__== '__main__': listOPosts,listClasses = bayes.loadDataSet() print listOPosts,listClasses myVocabList = bayes.createVocabList(listOPosts) print myVocabList trainMat=[] for postinDoc in listOPosts: trainMat.append(bayes.setOfWords2Vec(myVocabList, postinDoc)) print trainMat p0V,p1V,pAb=bayes.trainNB0(trainMat, listClasses) print p0V print p1V print pAb #ny=feedparser.parse('http://newyork.craigslist.org/stp/index.rss') #sleep(5) #print ny['entries'] bayes.spamTest() pass
24.035714
71
0.708767
75
673
6.253333
0.613333
0.089552
0
0
0
0
0
0
0
0
0
0.027372
0.185736
673
28
72
24.035714
0.828467
0.156018
0
0
0
0
0.015564
0
0
0
0
0
0
0
null
null
0.055556
0.166667
null
null
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
dd5e767c2f0f3137c08350fa015010541f736e41
290
py
Python
migrations/824-auto-exclude-regions.py
muffinresearch/zamboni
045a6f07c775b99672af6d9857d295ed02fe5dd9
[ "BSD-3-Clause" ]
null
null
null
migrations/824-auto-exclude-regions.py
muffinresearch/zamboni
045a6f07c775b99672af6d9857d295ed02fe5dd9
[ "BSD-3-Clause" ]
null
null
null
migrations/824-auto-exclude-regions.py
muffinresearch/zamboni
045a6f07c775b99672af6d9857d295ed02fe5dd9
[ "BSD-3-Clause" ]
null
null
null
from mkt.constants import regions from mkt.developers.cron import exclude_new_region def run(): exclude_new_region([ regions.CR, regions.EC, regions.FR, regions.GT, regions.IT, regions.NI, regions.PA, regions.SV, ])
18.125
50
0.586207
34
290
4.882353
0.588235
0.084337
0.192771
0
0
0
0
0
0
0
0
0
0.324138
290
15
51
19.333333
0.846939
0
0
0
0
0
0
0
0
0
0
0
0
1
0.076923
true
0
0.153846
0
0.230769
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
dd63b219f3cb356db869fd14f9d34da5dd98361a
463
py
Python
Constants.py
HKN-UCSD/HIIT
63f726398ca63c92e82a8052a05608a8537e6e87
[ "MIT" ]
null
null
null
Constants.py
HKN-UCSD/HIIT
63f726398ca63c92e82a8052a05608a8537e6e87
[ "MIT" ]
null
null
null
Constants.py
HKN-UCSD/HIIT
63f726398ca63c92e82a8052a05608a8537e6e87
[ "MIT" ]
1
2019-08-18T19:23:44.000Z
2019-08-18T19:23:44.000Z
COLUMNS = ['First Name', 'Last Name', 'Class Standing', 'Cum GPA', 'Major Code', 'Dept', 'Email'] DEPTS = ['CSE', 'ECE', 'MATH', 'BENG'] CLASS_STANDINGS = ['SO', 'JR', 'SR'] DEPTS_MAJORS = dict() # bit of a faux-pas... DEPTS_MAJORS['CSE'] = ['CS25', 'CS26', 'CS27', 'CS28'] DEPTS_MAJORS['ECE'] = ['EC26', 'EC27', 'EC28'] DEPTS_MAJORS['MATH'] = ['MA30'] DEPTS_MAJORS['BENG'] = ['BE25', 'BE26', 'BE27', 'BE28'] CLASS_QUANTILE = {'SO': 0.8, 'JR': 0.75, 'SR': 0.667}
51.444444
97
0.580994
66
463
3.969697
0.681818
0.209924
0
0
0
0
0
0
0
0
0
0.08209
0.131749
463
9
98
51.444444
0.569652
0.043197
0
0
0
0
0.332579
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
dd6453c36feaa6225a2781434ed2cbc21f1477e2
258
py
Python
app/admin.py
Syilun/aiot-django-dashboard-postgresql
3aa607bd623006a4c99a97da304985eb908741c8
[ "MIT" ]
null
null
null
app/admin.py
Syilun/aiot-django-dashboard-postgresql
3aa607bd623006a4c99a97da304985eb908741c8
[ "MIT" ]
null
null
null
app/admin.py
Syilun/aiot-django-dashboard-postgresql
3aa607bd623006a4c99a97da304985eb908741c8
[ "MIT" ]
null
null
null
# -*- encoding: utf-8 -*- """ License: MIT Copyright (c) 2019 - present AppSeed.us """ from django.contrib import admin from .models import PERSON from .models import FACE # Register your models here. admin.site.register(PERSON) admin.site.register(FACE)
17.2
39
0.728682
36
258
5.222222
0.638889
0.106383
0.170213
0
0
0
0
0
0
0
0
0.022624
0.143411
258
14
40
18.428571
0.828054
0.403101
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
dd7ea8658ced22af8cde597e98ea9827577d3995
343
py
Python
strings.py
aemreb/telegram_paybot
f973ddd9029f2844901508fa983aa37b6ca93089
[ "MIT" ]
2
2021-09-17T10:55:14.000Z
2021-09-17T10:55:38.000Z
strings.py
aemreb/telegram_paybot
f973ddd9029f2844901508fa983aa37b6ca93089
[ "MIT" ]
null
null
null
strings.py
aemreb/telegram_paybot
f973ddd9029f2844901508fa983aa37b6ca93089
[ "MIT" ]
1
2021-05-31T18:23:02.000Z
2021-05-31T18:23:02.000Z
help = '''Hey 👋 \n\n /signup nick: enter your nick and sign up \n\n /atm: see your balance \n\n /send nick amount: send this nick that amount of buxx 💰 \n ''' signup = '''Hi! Type /signup to sign up.''' user_created = '''Created user. Welcome to Paybot 🤑''' user_exists = '''User already exists ☹️''' not_enough_buxx = '''Not enough Buxx 🙄'''
57.166667
158
0.655977
61
343
3.721311
0.57377
0.026432
0.114537
0
0
0
0
0
0
0
0
0
0.174927
343
5
159
68.6
0.780919
0
0
0
0
0.2
0.714286
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
dd83de2fe7b5a8905e65a790566a924385cc7b19
297
py
Python
dados/outside/6-JEL_code.py
correia-marcos/Innovation-on-brazilian-economic-research
98bc677618ec2dff062db42cda7279a5e7065a32
[ "MIT" ]
null
null
null
dados/outside/6-JEL_code.py
correia-marcos/Innovation-on-brazilian-economic-research
98bc677618ec2dff062db42cda7279a5e7065a32
[ "MIT" ]
null
null
null
dados/outside/6-JEL_code.py
correia-marcos/Innovation-on-brazilian-economic-research
98bc677618ec2dff062db42cda7279a5e7065a32
[ "MIT" ]
null
null
null
""" This script was made to anaylse the relation between JEL and areas in ANPEC. The idea is that checking the JEL code of each paper, it can be vizualized whether some papers were published in area (from ANPEC meeting) not expected by their JEL code. """ import os import pandas as pd import
22.846154
76
0.767677
52
297
4.384615
0.826923
0.061404
0
0
0
0
0
0
0
0
0
0
0.195286
297
12
77
24.75
0.953975
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
1
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
dd90daf3764122311fdf88342d48e04ecb1b9e7d
3,946
py
Python
editquality/feature_lists/wikitext.py
paulkernfeld/editquality
029f21278d89d6e50b0eac7b39d8355f8e4686f4
[ "MIT" ]
18
2015-09-13T10:47:31.000Z
2018-08-20T15:00:35.000Z
editquality/feature_lists/wikitext.py
paulkernfeld/editquality
029f21278d89d6e50b0eac7b39d8355f8e4686f4
[ "MIT" ]
98
2015-12-13T12:18:24.000Z
2018-08-07T21:10:46.000Z
editquality/feature_lists/wikitext.py
paulkernfeld/editquality
029f21278d89d6e50b0eac7b39d8355f8e4686f4
[ "MIT" ]
17
2015-09-29T20:52:12.000Z
2018-08-20T11:33:30.000Z
from revscoring.features import Feature, wikitext from revscoring.features.modifiers import div, log, max, sub def _process_new_longest(p_longest, r_longest): if r_longest > p_longest: return r_longest else: return 1 parent = [ log(wikitext.revision.parent.chars + 1), log(wikitext.revision.parent.tokens + 1), log(wikitext.revision.parent.words + 1), log(wikitext.revision.parent.uppercase_words + 1), log(wikitext.revision.parent.headings + 1), log(wikitext.revision.parent.wikilinks + 1), log(wikitext.revision.parent.external_links + 1), log(wikitext.revision.parent.templates + 1), log(wikitext.revision.parent.ref_tags + 1), div(wikitext.revision.parent.chars, max(wikitext.revision.parent.words, 1), name="revision.parent.chars_per_word"), div(wikitext.revision.parent.words, max(wikitext.revision.parent.tokens, 1), name="revision.parent.words_per_token"), div(wikitext.revision.parent.uppercase_words, max(wikitext.revision.parent.words, 1), name="revision.parent.uppercase_words_per_word"), div(wikitext.revision.parent.markups, max(wikitext.revision.parent.tokens, 1), name="revision.parent.markups_per_token"), ] diff = [ wikitext.revision.diff.markup_delta_sum, wikitext.revision.diff.markup_delta_increase, wikitext.revision.diff.markup_delta_decrease, wikitext.revision.diff.markup_prop_delta_sum, wikitext.revision.diff.markup_prop_delta_increase, wikitext.revision.diff.markup_prop_delta_decrease, wikitext.revision.diff.number_delta_sum, wikitext.revision.diff.number_delta_increase, wikitext.revision.diff.number_delta_decrease, wikitext.revision.diff.number_prop_delta_sum, wikitext.revision.diff.number_prop_delta_increase, wikitext.revision.diff.number_prop_delta_decrease, wikitext.revision.diff.uppercase_word_delta_sum, wikitext.revision.diff.uppercase_word_delta_increase, wikitext.revision.diff.uppercase_word_delta_decrease, wikitext.revision.diff.uppercase_word_prop_delta_sum, wikitext.revision.diff.uppercase_word_prop_delta_increase, wikitext.revision.diff.uppercase_word_prop_delta_decrease, sub(wikitext.revision.chars, wikitext.revision.parent.chars, name="revision.diff.chars_change"), sub(wikitext.revision.tokens, wikitext.revision.parent.tokens, name="revision.diff.tokens_change"), sub(wikitext.revision.words, wikitext.revision.parent.words, name="revision.diff.words_change"), sub(wikitext.revision.markups, wikitext.revision.parent.markups, name="revision.diff.markups_change"), sub(wikitext.revision.headings, wikitext.revision.parent.headings, name="revision.diff.headings_change"), sub(wikitext.revision.external_links, wikitext.revision.parent.external_links, name="revision.diff.external_links_change"), sub(wikitext.revision.wikilinks, wikitext.revision.parent.wikilinks, name="revision.diff.wikilinks_change"), sub(wikitext.revision.templates, wikitext.revision.parent.templates, name="revision.diff.templates_change"), sub(wikitext.revision.tags, wikitext.revision.parent.tags, name="revision.diff.tags_change"), sub(wikitext.revision.ref_tags, wikitext.revision.parent.ref_tags, name="revision.diff.ref_tags_change"), Feature("revision.diff.longest_new_token", _process_new_longest, returns=int, depends_on=[wikitext.revision.parent.longest_token, wikitext.revision.longest_token]), Feature("revision.diff.longest_new_repeated_char", _process_new_longest, returns=int, depends_on=[wikitext.revision.parent.longest_repeated_char, wikitext.revision.longest_repeated_char]) ]
41.978723
71
0.727826
469
3,946
5.895522
0.115139
0.34141
0.230741
0.081374
0.560579
0.418445
0.220976
0.118626
0.118626
0.047016
0
0.004253
0.165737
3,946
93
72
42.430108
0.835662
0
0
0.068182
0
0
0.123923
0.123923
0
0
0
0
0
1
0.011364
false
0
0.022727
0
0.056818
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
06b3cc12c35c6a81cb3cf69f89310f5caed75723
141
py
Python
config_default.py
wecassidy/pishow
310f935688e0b3ceeb8fe11bd0fda902c041dd45
[ "MIT" ]
null
null
null
config_default.py
wecassidy/pishow
310f935688e0b3ceeb8fe11bd0fda902c041dd45
[ "MIT" ]
null
null
null
config_default.py
wecassidy/pishow
310f935688e0b3ceeb8fe11bd0fda902c041dd45
[ "MIT" ]
null
null
null
IMG_DIR = "/path/to/pictures" IMG_POLL_RATE = 1 # minute DWELL_TIME = 5 # seconds FADE_TIME = 1 # seconds REFRESH_RATE = 10 # milliseconds
17.625
32
0.723404
22
141
4.363636
0.772727
0
0
0
0
0
0
0
0
0
0
0.043103
0.177305
141
7
33
20.142857
0.784483
0.248227
0
0
0
0
0.168317
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
06be9a64516dc7d92276fa93579a7f01f96fc979
360
py
Python
QASMToQuEST/errors.py
oerc0122/QASMParser
701b6f25f498ea67670f2d85ae0f2e6920aea267
[ "MIT" ]
5
2019-05-10T08:17:57.000Z
2021-12-19T05:06:18.000Z
QASMToQuEST/errors.py
oerc0122/QASMParser
701b6f25f498ea67670f2d85ae0f2e6920aea267
[ "MIT" ]
14
2019-04-11T11:28:08.000Z
2020-02-13T15:18:56.000Z
QASMToQuEST/errors.py
oerc0122/QASMParser
701b6f25f498ea67670f2d85ae0f2e6920aea267
[ "MIT" ]
2
2019-05-10T08:17:23.000Z
2021-12-18T16:37:02.000Z
""" Define the errors which may occur """ langMismatchWarning = "Classical language {} does not match output language {}" langNotDefWarning = "Language {0} translation not found, check QASMToQuEST/langs/{0}.py exists" noLangSpecWarning = "No language specified for screen print" noSpecWarning = "Neither language nor output with recognised language specified"
45
95
0.786111
42
360
6.738095
0.785714
0.120141
0
0
0
0
0
0
0
0
0
0.00639
0.130556
360
7
96
51.428571
0.897764
0.091667
0
0
0
0
0.714734
0.075235
0
0
0
0
0
1
0
false
0
0
0
0
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
06cdfe64d4ce6044067a25d0bf7f0ef2fa35cf39
240
py
Python
Home_Work_2_B_Naychuk_Anastasiya/Task2.py
NaychukAnastasiya/goiteens-python3-naychuk
a79d0af238a15f58a822bb5d8e4d48227d4a7bc1
[ "MIT" ]
null
null
null
Home_Work_2_B_Naychuk_Anastasiya/Task2.py
NaychukAnastasiya/goiteens-python3-naychuk
a79d0af238a15f58a822bb5d8e4d48227d4a7bc1
[ "MIT" ]
null
null
null
Home_Work_2_B_Naychuk_Anastasiya/Task2.py
NaychukAnastasiya/goiteens-python3-naychuk
a79d0af238a15f58a822bb5d8e4d48227d4a7bc1
[ "MIT" ]
null
null
null
# y = f(X) # y = 2*x -10 # x>0 # y = 0# x=0 # y = 2 * abs(x) -1 # x<0 print("Введіть число") x = float(input()) y = 0 if x > 0: y = 2*x -10 elif x == 0: y = 0 else: #x<0 y = 2 * abs(x) -1 print ("y: ",y)
13.333333
26
0.375
51
240
1.764706
0.313725
0.133333
0.166667
0.133333
0.2
0.2
0.2
0
0
0
0
0.131034
0.395833
240
17
27
14.117647
0.489655
0.254167
0
0.2
0
0
0.105263
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
06d6a4324acde5c358d0bd6522e052a2ede943db
423
py
Python
cookbook/c05/p03_print_sepend.py
itpubs/python3-cookbook
140f5e4cc0416b9674edca7f4c901b1f58fc1415
[ "Apache-2.0" ]
3
2018-09-19T06:44:13.000Z
2019-03-24T10:07:07.000Z
cookbook/c05/p03_print_sepend.py
itpubs/python3-cookbook
140f5e4cc0416b9674edca7f4c901b1f58fc1415
[ "Apache-2.0" ]
2
2020-09-19T17:10:23.000Z
2020-10-17T16:43:52.000Z
cookbook/c05/p03_print_sepend.py
itpubs/python3-cookbook
140f5e4cc0416b9674edca7f4c901b1f58fc1415
[ "Apache-2.0" ]
1
2020-12-22T06:33:18.000Z
2020-12-22T06:33:18.000Z
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Topic: print分隔符和结尾符 Desc : """ def print_sepend(): print('ACME', 50, 91.5) print('ACME', 50, 91.5, sep=',') print('ACME', 50, 91.5, sep=',', end='!!\n') for i in range(5): print(i) for i in range(5): print(i, end=' ') print() row = ['ACME', 50, 91.5] print(*row, sep=',') if __name__ == '__main__': print_sepend()
17.625
48
0.501182
61
423
3.311475
0.459016
0.118812
0.158416
0.178218
0.485149
0.346535
0.178218
0
0
0
0
0.073248
0.257683
423
23
49
18.391304
0.570064
0.170213
0
0.153846
0
0
0.093842
0
0
0
0
0
0
1
0.076923
false
0
0
0
0.076923
0.692308
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
06db5fed30a40e13ffe36a7460de36bf7c61b325
655
py
Python
monthlysal.py
aja512/Python-Lab
8f9c57d6d7f835e31a595223cdddf9c52ebe1cc9
[ "Apache-2.0" ]
null
null
null
monthlysal.py
aja512/Python-Lab
8f9c57d6d7f835e31a595223cdddf9c52ebe1cc9
[ "Apache-2.0" ]
null
null
null
monthlysal.py
aja512/Python-Lab
8f9c57d6d7f835e31a595223cdddf9c52ebe1cc9
[ "Apache-2.0" ]
null
null
null
bs=input("Enter basic salary:") days=input("Enter no. of working days:") ovrti=input("Enter no. of overtime working hrs:") deduct=0 if days<6: deduct=3500 salary=calci(bs,days,ovrti,deduct) elif days>=6 and days<=12: deduct=1000 salary= calci(bs,days,ovrti,deduct) elif days>=13 and days<=18: deduct=800 salary=calci(bs,days,ovrti,deduct) else: deduct=0 salary=calci(bs,days,ovrti,deduct) def calci(bs,days,ovrti,deduct): sal=0 emp=str(input('Enter type of Employee:')) if emp=='Permanent': bonus=22500 sal=bs+ovrti*500+bonus-deduct print(sal) else: bonus=2500 sal=bs+ovrti*500+bonus-deduct print(sal)
22.586207
49
0.690076
109
655
4.146789
0.348624
0.119469
0.121681
0.176991
0.473451
0.424779
0.300885
0.300885
0
0
0
0.067151
0.158779
655
28
50
23.392857
0.753176
0
0
0.444444
0
0
0.169466
0
0
0
0
0
0
0
null
null
0
0
null
null
0.074074
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
06e980c4f9e4c64a9ff9ed3aae6e787174a3969c
193
py
Python
College grade 2/Python 3/Lab_2/Ansel/exercise 1.py
SimonH19009/Lzu_Data-science
bd35c5e156b0db21c3585c11dce15fba0b7003e2
[ "MIT" ]
1
2022-03-06T05:30:44.000Z
2022-03-06T05:30:44.000Z
College grade 2/Python 3/Lab_2/Ansel/exercise 1.py
SimonH19009/Lzu_Data-science
bd35c5e156b0db21c3585c11dce15fba0b7003e2
[ "MIT" ]
null
null
null
College grade 2/Python 3/Lab_2/Ansel/exercise 1.py
SimonH19009/Lzu_Data-science
bd35c5e156b0db21c3585c11dce15fba0b7003e2
[ "MIT" ]
1
2022-03-06T06:07:40.000Z
2022-03-06T06:07:40.000Z
try: a=int(input("Please enter a number:")) if 9<a<100: a = str(a) print(a[0]) print(a[1]) else: print("no correct") except: print("no correct")
17.545455
42
0.487047
29
193
3.241379
0.62069
0.12766
0.297872
0
0
0
0
0
0
0
0
0.047244
0.341969
193
10
43
19.3
0.692913
0
0
0.2
0
0
0.217617
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
06f25df0cac2fd1a3b4a6fa7c42cbde635e84014
1,131
py
Python
hypothesis/nn/neuromodulation/base.py
boyali/hypothesis-sre
f44d25eb281d49663d49d134ee73ad542849714b
[ "BSD-3-Clause" ]
45
2019-02-13T14:16:35.000Z
2022-02-23T21:30:02.000Z
hypothesis/nn/neuromodulation/base.py
boyali/hypothesis-sre
f44d25eb281d49663d49d134ee73ad542849714b
[ "BSD-3-Clause" ]
1
2020-01-13T08:29:50.000Z
2020-01-22T10:28:02.000Z
hypothesis/nn/neuromodulation/base.py
boyali/hypothesis-sre
f44d25eb281d49663d49d134ee73ad542849714b
[ "BSD-3-Clause" ]
8
2019-04-23T14:25:08.000Z
2021-07-28T15:05:31.000Z
import hypothesis import torch from hypothesis.nn.util import list_modules_with_type def allocate_neuromodulated_activation(activation, allocator): class LambdaNeuromodulatedActivation(BaseNeuromodulatedModule): def __init__(self): super(LambdaNeuromodulatedActivation, self).__init__( controller=allocator(), activation=activation) return LambdaNeuromodulatedActivation def list_neuromodulated_modules(module): desired_type = BaseNeuromodulatedModule return list_modules_with_type(module, desired_type) class BaseNeuromodulatedModule(torch.nn.Module): def __init__(self, controller, activation=hypothesis.default.activation, **kwargs): super(BaseNeuromodulatedModule, self).__init__() self.activation = activation(**kwargs) self.bias = torch.randn(1, 1) self.controller = controller def forward(self, x, context=None): if context is not None: self.update(context) return self.activation(x + self.bias) def update(self, context): self.bias = self.controller(context)
26.928571
87
0.715296
111
1,131
7.036036
0.342342
0.076825
0.038412
0.048656
0
0
0
0
0
0
0
0.002225
0.205128
1,131
41
88
27.585366
0.866518
0
0
0
0
0
0
0
0
0
0
0
0
1
0.24
false
0
0.12
0
0.56
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
06fe538ead84c59a5f996694a885042a2b6cbe93
822
py
Python
rocketchat_API/APISections/subscriptions.py
dudanogueira/rocketchat_API
190952f07ce04a356c79ad29e9d1e01dea620ba6
[ "MIT" ]
210
2017-03-20T13:36:24.000Z
2022-03-30T17:37:02.000Z
rocketchat_API/APISections/subscriptions.py
dudanogueira/rocketchat_API
190952f07ce04a356c79ad29e9d1e01dea620ba6
[ "MIT" ]
144
2017-03-21T13:50:22.000Z
2022-03-28T09:43:26.000Z
rocketchat_API/APISections/subscriptions.py
dudanogueira/rocketchat_API
190952f07ce04a356c79ad29e9d1e01dea620ba6
[ "MIT" ]
103
2017-03-20T13:54:54.000Z
2022-03-22T05:00:18.000Z
from rocketchat_API.APISections.base import RocketChatBase class RocketChatSubscriptions(RocketChatBase): def subscriptions_get(self, **kwargs): """Get all subscriptions.""" return self.call_api_get("subscriptions.get", kwargs=kwargs) def subscriptions_get_one(self, room_id, **kwargs): """Get the subscription by room id.""" return self.call_api_get("subscriptions.getOne", roomId=room_id, kwargs=kwargs) def subscriptions_unread(self, room_id, **kwargs): """Mark messages as unread by roomId or from a message""" return self.call_api_post("subscriptions.unread", roomId=room_id, kwargs=kwargs) def subscriptions_read(self, rid, **kwargs): """Mark room as read""" return self.call_api_post("subscriptions.read", rid=rid, kwargs=kwargs)
41.1
88
0.708029
103
822
5.475728
0.339806
0.053191
0.099291
0.120567
0.379433
0.379433
0.141844
0
0
0
0
0
0.175182
822
19
89
43.263158
0.831858
0.152068
0
0
0
0
0.110947
0
0
0
0
0
0
1
0.4
false
0
0.1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
06fefeedf2e0b5eb6fdb9935285ddf733447d30c
1,081
py
Python
accounts/models.py
Phist0ne/webvirtcloud
d94ca38e5c8b5bb1323d33067ee6b991775cc390
[ "Apache-2.0" ]
2
2018-03-14T09:46:49.000Z
2019-05-14T11:45:14.000Z
accounts/models.py
JamesLinus/webvirtcloud
d94ca38e5c8b5bb1323d33067ee6b991775cc390
[ "Apache-2.0" ]
1
2018-03-01T04:05:25.000Z
2018-10-01T08:30:00.000Z
accounts/models.py
caicloud/webvirtcloud
d94ca38e5c8b5bb1323d33067ee6b991775cc390
[ "Apache-2.0" ]
1
2019-06-11T19:54:08.000Z
2019-06-11T19:54:08.000Z
from django.db import models from django.contrib.auth.models import User from instances.models import Instance class UserInstance(models.Model): user = models.ForeignKey(User) instance = models.ForeignKey(Instance) is_change = models.BooleanField(default=False) is_delete = models.BooleanField(default=False) is_vnc = models.BooleanField(default=False) def __unicode__(self): return self.instance.name class UserSSHKey(models.Model): user = models.ForeignKey(User) keyname = models.CharField(max_length=25) keypublic = models.CharField(max_length=500) def __unicode__(self): return self.keyname class UserAttributes(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE) can_clone_instances = models.BooleanField(default=False) max_instances = models.IntegerField(default=1) max_cpus = models.IntegerField(default=1) max_memory = models.IntegerField(default=2048) max_disk_size = models.IntegerField(default=20) def __unicode__(self): return self.user.username
30.885714
63
0.747456
131
1,081
5.977099
0.374046
0.091954
0.127714
0.153257
0.337165
0.0894
0
0
0
0
0
0.014317
0.160037
1,081
34
64
31.794118
0.848018
0
0
0.192308
0
0
0
0
0
0
0
0
0
1
0.115385
false
0
0.115385
0.115385
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
6603d02ce7ae8ff687520b5fd3b5f402880ef876
1,616
py
Python
tginviter/storage/base_storage.py
cuamckuu/tg-inviter
80b8d4664d1e2628b46ac1a6d58f8495c408d4b4
[ "MIT" ]
20
2020-08-24T19:11:38.000Z
2022-03-17T19:24:50.000Z
tginviter/storage/base_storage.py
bequirky12/tg-inviter
5cad1bc1afce101be03a2cee805931e77b7f6842
[ "MIT" ]
null
null
null
tginviter/storage/base_storage.py
bequirky12/tg-inviter
5cad1bc1afce101be03a2cee805931e77b7f6842
[ "MIT" ]
8
2021-02-05T11:51:21.000Z
2022-03-22T08:48:44.000Z
import abc from typing import AbstractSet class BaseStorage(abc.ABC): """Abstract base class for storing invite link tokens""" def insert(self, token: str, *, payload: dict, max_uses=1): """Insert token to storage. Shoud call super().insert(...)""" if type(payload) != dict: raise TypeError("Only dict payloads supported") if any([x not in payload for x in ["joinchat_key", "channel_id"]]): raise ValueError("Payload requires channel_id and joinchat_key") if type(payload["channel_id"]) != int: raise TypeError("Value of 'channel_id' should be int") if type(payload["joinchat_key"]) != str: raise TypeError("Value of 'joinchat_key' should be str") @abc.abstractmethod def uses_left(self, token: str) -> int: """Return amount of unused invitations for token""" pass @abc.abstractmethod def count_new_use(self, token: str): """Increase token usages count""" pass @abc.abstractmethod def get_payload(self, token: str) -> dict: """Return payload assosciated with given token""" pass @abc.abstractmethod def get_channel_ids(self) -> AbstractSet[int]: """Return set of all inserted channel_ids""" pass @abc.abstractmethod def is_subscribed(self, channel_id: int, user_id: int) -> bool: """Check if user is in channel's whitelist""" pass @abc.abstractmethod def add_subscription(self, channel_id: int, user_id: int): """Subscribe user to be in channel whitelist""" pass
31.076923
76
0.631188
204
1,616
4.892157
0.401961
0.054108
0.12024
0.12024
0.138277
0.0501
0.0501
0
0
0
0
0.000832
0.256188
1,616
51
77
31.686275
0.829451
0.21349
0
0.4
0
0
0.152846
0
0
0
0
0
0
1
0.233333
false
0.2
0.066667
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
6604fb1cbe01eb3c82eaa8f6e34b1bd3c2c37677
359
py
Python
app/api/v1/__init__.py
Ethan-Ceng/Flask-CMS
2fe2664e9ae60affe277e9c3b50c18a2e32e422b
[ "MIT" ]
null
null
null
app/api/v1/__init__.py
Ethan-Ceng/Flask-CMS
2fe2664e9ae60affe277e9c3b50c18a2e32e422b
[ "MIT" ]
null
null
null
app/api/v1/__init__.py
Ethan-Ceng/Flask-CMS
2fe2664e9ae60affe277e9c3b50c18a2e32e422b
[ "MIT" ]
null
null
null
from flask import Blueprint from app.api.v1 import user, book, client, token def create_blueprint(): bp_v1 = Blueprint('v1', __name__) user.api.register(bp_v1, url_prefix='/client') user.api.register(bp_v1, url_prefix='/user') book.api.register(bp_v1, url_prefix='/book') book.api.register(bp_v1, url_prefix='/token') return bp_v1
25.642857
50
0.707521
56
359
4.267857
0.339286
0.100418
0.217573
0.251046
0.468619
0.468619
0.468619
0
0
0
0
0.026316
0.153203
359
13
51
27.615385
0.759868
0
0
0
0
0
0.069638
0
0
0
0
0
0
1
0.111111
false
0
0.222222
0
0.444444
0.333333
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
660ac2d8937432f3e9a4b2a9a74404def921da17
132
py
Python
lab2_typyOperacje/2.15.py
Damian9449/Python
dc9091e15356733821bbb6a768b7d5e428640340
[ "MIT" ]
1
2017-11-15T13:03:40.000Z
2017-11-15T13:03:40.000Z
lab2_typyOperacje/2.15.py
Damian9449/Python
dc9091e15356733821bbb6a768b7d5e428640340
[ "MIT" ]
null
null
null
lab2_typyOperacje/2.15.py
Damian9449/Python
dc9091e15356733821bbb6a768b7d5e428640340
[ "MIT" ]
null
null
null
#!/usr/bin/python L = [1, 3, 6, 77, 34, 45, 26, 81] result = "" for number in L: result = result + str(number) print(result)
13.2
33
0.575758
23
132
3.304348
0.782609
0
0
0
0
0
0
0
0
0
0
0.128713
0.234848
132
9
34
14.666667
0.623762
0.121212
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
66160d3954e8f1108b126460356e2b0740e00e9e
2,055
py
Python
create_db.py
vladstorm98/TaskBoard
ebe4244cc5cbdfda2fdaac84c0158692f440517c
[ "MIT" ]
null
null
null
create_db.py
vladstorm98/TaskBoard
ebe4244cc5cbdfda2fdaac84c0158692f440517c
[ "MIT" ]
null
null
null
create_db.py
vladstorm98/TaskBoard
ebe4244cc5cbdfda2fdaac84c0158692f440517c
[ "MIT" ]
null
null
null
from flask import Flask from flask_sqlalchemy import SQLAlchemy from config import Config app = Flask(__name__) app.config.from_object(Config) db = SQLAlchemy(app) class User(db.Model): __tablename__ = 'user' id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String(128), index=True, unique=True) password_hash = db.Column(db.String(128)) last_seen = db.Column(db.DateTime) profile = db.relationship('Profile', backref='user', lazy='dynamic') class Profile(db.Model): __tablename__ = 'profile' id = db.Column(db.Integer, primary_key=True) first_name = db.Column(db.String(64), index=True) last_name = db.Column(db.String(64), index=True) number = db.Column(db.Integer) price = db.Column(db.String(32)) address = db.Column(db.String(128), index=True) about_client = db.Column(db.String(256)) last_order = db.Column(db.String(32)) user_id = db.Column(db.Integer, db.ForeignKey('user.id')) tasks = db.relationship('Task', backref='client', lazy='dynamic') class Task(db.Model): __tablename__ = 'task' __searchable__ = ['title', 'address', 'note', 'price'] id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(32)) name = db.Column(db.String(128)) address = db.Column(db.String(128)) note = db.Column(db.String(512)) price = db.Column(db.String(32)) date = db.Column(db.String(32)) in_progress = db.Column(db.Boolean) client_id = db.Column(db.Integer, db.ForeignKey('profile.id')) db.create_all() db.session.commit() # from flask import Flask # from flask_babel import Babel # from flask_babel import format_datetime # from datetime import datetime # from flask import render_template # # ap = Flask(__name__) # babel = Babel(ap) # # @ap.route('/', methods=['GET']) # def index(): # f = format_datetime(datetime(1987, 3, 5, 17, 12), 'EEEE, d. MMMM yyyy H:mm') # # print(f) # return render_template('1') # # # ap.run()
29.357143
83
0.654988
288
2,055
4.517361
0.291667
0.135281
0.169101
0.172175
0.399693
0.325903
0.21445
0.123751
0
0
0
0.027744
0.193187
2,055
69
84
29.782609
0.756936
0.182968
0
0.128205
0
0
0.055311
0
0
0
0
0
0
1
0
false
0.025641
0.076923
0
0.871795
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
661950464dde57e4e6f43aa2be8e46d0304ef9f0
797
py
Python
vid 18 dream logo.py
agneay/turtle-projects
88f7de0c8eb1bb0f37255746c3d11b9c272d1c10
[ "MIT" ]
null
null
null
vid 18 dream logo.py
agneay/turtle-projects
88f7de0c8eb1bb0f37255746c3d11b9c272d1c10
[ "MIT" ]
null
null
null
vid 18 dream logo.py
agneay/turtle-projects
88f7de0c8eb1bb0f37255746c3d11b9c272d1c10
[ "MIT" ]
null
null
null
from turtle import * def l(y,x): lt(y) fd(x) def r(y,x): rt(y) fd(x) def gt(x,y): pu() goto(x,y) pd() bgcolor("lime") color("black","white") shape("circle") ht() speed(1) width(15) gt(-365,-380) begin_fill() seth(90) fd(20) r(5,20) r(4,60) r(5,100) r(5,120) l(85,50) r(10,25) r(15,10) r(15,40) r(15,50) r(10,90) r(20,50) r(20,25) r(10,75) r(30,20) r(10,10) l(30,50) r(15,10) r(10,20) r(10,80) r(10,40) for _ in range(7): r(10,30) r(10,80) r(20,35) l(30,150) for _ in range(3): r(5,150) end_fill() gt(-290,-70) l(90,30) l(10,120) l(15,60) l(10,30) l(5,40) l(10,40) gt(-320,50) r(70,30) l(10,30) l(15,30) l(5,60) l(5,60) l(15,40) r(10,20) l(20,35) color("black") gt(-50,170) stamp() gt(-340,135) stamp() gt(40,-115) write("dream", font = ('Arial', 85, 'italic','bold')) done()
9.16092
53
0.567127
203
797
2.206897
0.344828
0.066964
0.017857
0.03125
0
0
0
0
0
0
0
0.286131
0.140527
797
87
54
9.16092
0.367883
0
0
0.191781
0
0
0.056391
0
0
0
0
0
0
1
0.041096
false
0
0.013699
0
0.054795
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
662106942718d48504f32ad0e27db934931029d3
19,428
py
Python
tests/adapters/test_s3_adapter.py
MonolithAILtd/monolith-filemanager
2369e244e4d8a48890f55d00419a83001a5c6c40
[ "Apache-2.0" ]
3
2021-06-02T09:45:00.000Z
2022-02-01T14:30:01.000Z
tests/adapters/test_s3_adapter.py
MonolithAILtd/monolith-filemanager
2369e244e4d8a48890f55d00419a83001a5c6c40
[ "Apache-2.0" ]
3
2021-05-26T11:46:28.000Z
2021-11-04T10:14:42.000Z
tests/adapters/test_s3_adapter.py
MonolithAILtd/monolith-filemanager
2369e244e4d8a48890f55d00419a83001a5c6c40
[ "Apache-2.0" ]
2
2021-06-04T15:02:14.000Z
2021-09-03T09:26:45.000Z
from unittest import TestCase, main from unittest.mock import patch, MagicMock, call from monolith_filemanager.adapters.s3_processes import S3ProcessesAdapter, S3ProcessesAdapterError class TestS3ProcessesAdapter(TestCase): @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def setUp(self, mock_init) -> None: mock_init.return_value = None self.test_file = S3ProcessesAdapter(file_path="mock/folder/test.xlsx") self.test_file.path = "mock/folder/test.xlsx" self.test_folder = S3ProcessesAdapter(file_path="mock/folder/path") self.test_folder.path = "mock/folder/path" @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter._strip_path_slash") @patch("monolith_filemanager.adapters.s3_processes.V1Engine") @patch("monolith_filemanager.adapters.s3_processes.Base.__init__") def test___init__(self, mock_init, mock_engine, mock_strip_path_slash): mock_init.return_value = None mock_strip_path_slash.return_value = None test = S3ProcessesAdapter(file_path="test") mock_init.assert_called_once_with(file_path="test") mock_engine.assert_called_once_with() self.assertEqual(mock_engine.return_value, test._engine) mock_strip_path_slash.assert_called_once_with() @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_local_file_object(self, mock_init): mock_init.return_value = None test = S3ProcessesAdapter(file_path="test path") test.file_types = MagicMock() test.path = MagicMock() out_come = test.local_file_object() test.file_types.get_file.assert_called_once_with(file_path=test.path) test.file_types.get_file.return_value.assert_called_once_with(path=test.path) self.assertEqual(test.file_types.get_file.return_value.return_value, out_come) @patch("monolith_filemanager.adapters.s3_processes.FilePath") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.local_file_object") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_read_file(self, mock_init, mock_local_file_object, mock_file_path): mock_init.return_value = None test = S3ProcessesAdapter(file_path=MagicMock()) test.path = MagicMock() test._cache = MagicMock() test._pickle_factory = MagicMock() test._engine = MagicMock() test.file_types = MagicMock() test.path.file_type = "pickle" with self.assertRaises(S3ProcessesAdapterError): test.read_file() test.path.file_type = "not pickle" second_cached_path = test.path.to_string.return_value second_out_come = test.read_file() test._cache.create_cache.assert_called_once_with() test._engine.download_data_file.assert_called_once_with(storage_path=second_cached_path, file_path=test._cache.cache_path) mock_local_file_object.return_value.read.assert_called_once_with() mock_file_path.assert_called_once_with(test._engine.download_data_file.return_value) self.assertEqual(mock_file_path.return_value, test.path) self.assertEqual(mock_local_file_object.return_value.read.return_value, second_out_come) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_read_raw_file(self, mock_init): mock_init.return_value = None test = S3ProcessesAdapter(file_path=MagicMock()) test.path = MagicMock() test._engine = MagicMock() out_come = test.read_raw_file() test._engine.download_raw_data_file.assert_called_once_with(storage_path=test.path.to_string.return_value) self.assertEqual(out_come, test._engine.download_raw_data_file.return_value) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_custom_read_file(self, mock_init): mock_init.return_value = None test = S3ProcessesAdapter(file_path="test path") test.path = MagicMock() test._cache = MagicMock() test._engine = MagicMock() test.file_types = MagicMock() test.path.file_type = "any" source_path = test.path custom_read_function = MagicMock(name='read function') data_output = MagicMock(name='data') custom_read_function.return_value = data_output out_come = test.custom_read_file(custom_read_function) test._cache.create_cache.assert_called_once_with() test._engine.download_data_file.assert_called_once_with(storage_path=source_path, file_path=test._cache.cache_path) self.assertEqual(test._engine.download_data_file.return_value, test.path) custom_read_function.assert_called_once_with(test.path) self.assertEqual(out_come, data_output) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.local_file_object") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_write_file_supports_s3(self, mock_init, mock_local_file_object): mock_init.return_value = None test = S3ProcessesAdapter(file_path=MagicMock()) test._engine = MagicMock() test.path = MagicMock() mock_local_file_object.return_value.supports_s3.return_value = True mock_data = MagicMock() test.write_file(data=mock_data) mock_local_file_object.return_value.write.assert_called_once_with(mock_data) @patch("monolith_filemanager.adapters.s3_processes.FilePath") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.local_file_object") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_write_file_not_supports_s3(self, mock_init, mock_local_file_object, mock_file_path): mock_init.return_value = None test = S3ProcessesAdapter(file_path=MagicMock()) test._engine = MagicMock() test.path = MagicMock() test._cache = MagicMock() mock_local_file_object.return_value.supports_s3.return_value = False mock_local_file_object.return_value.path = test.path mock_data = MagicMock() test.write_file(data=mock_data) test._engine.upload_data_from_file.assert_called_once_with(file_path=mock_file_path.return_value.to_string.return_value, storage_path=test.path.to_string.return_value) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_write_raw_file(self, mock_init): mock_init.return_value = None test = S3ProcessesAdapter(file_path=MagicMock()) test._engine = MagicMock() test.path = MagicMock() mock_data = MagicMock() test.write_raw_file(data=mock_data) test._engine.upload_data.assert_called_once_with(storage_path=test.path.to_string.return_value, data=mock_data) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_delete_file(self, mock_init): mock_init.return_value = None test = S3ProcessesAdapter(file_path="test path") test._engine = MagicMock() test.path = MagicMock() test.delete_file() test._engine.delete.assert_called_once_with(storage_path=test.path) @patch("monolith_filemanager.adapters.s3_processes.V1Engine._split_s3_path") def test_delete_folder(self, mock_split_path): mock_engine = MagicMock() mock_engine.delete_file.return_value = None self.test_folder._engine = mock_engine mock_split_path.return_value = ("mock-bucket", "mock/folder", "folder") self.test_folder.delete_folder() mock_engine.delete_file.assert_called_once_with(bucket_name="mock-bucket", file_name="mock/folder/") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.increment_files") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.exists") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.check_name_taken") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.ls") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_write_stream(self, mock_init, mock_ls, mock_name_taken, mock_exists, mock_increment_files): mock_init.return_value = None test = S3ProcessesAdapter(file_path=MagicMock()) test._engine = MagicMock() test.path = "mock/folder/file.txt" test._cache = MagicMock() mock_ls.return_value = ([], []) mock_stream = MagicMock() # test name already taken mock_name_taken.return_value = True with self.assertRaises(S3ProcessesAdapterError): test.write_stream(mock_stream) mock_stream.save.assert_called_once() # test already exists mock_stream.reset_mock() mock_name_taken.return_value = False mock_exists.return_value = True mock_increment_files.return_value = None self.assertEqual("file.txt", test.write_stream(mock_stream)) cache_path = mock_stream.save.call_args_list[0][0][0] test._engine.upload_data_from_file.assert_called_once_with(storage_path=test.path, file_path=cache_path) mock_increment_files.assert_called_once_with() mock_stream.save.assert_called_once() # test doesn't already exist mock_stream.reset_mock() test._engine.reset_mock() mock_increment_files.reset_mock() mock_exists.return_value = False self.assertEqual("file.txt", test.write_stream(mock_stream)) mock_stream.save.assert_called_once() cache_path = mock_stream.save.call_args_list[0][0][0] test._engine.upload_data_from_file.assert_called_once_with(storage_path=test.path, file_path=cache_path) mock_increment_files.assert_has_calls = [] @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.exists") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_increment_files(self, mock_init, mock_exists): mock_init.return_value = None test = S3ProcessesAdapter(file_path=MagicMock()) test.path = "mock/path/folder/file.txt" mock_exists.return_value = False test.increment_files() self.assertEqual("mock/path/folder/file 2.txt", test.path) test.path = "mock/path/folder/file.txt" mock_exists.side_effect = [True, False] test.increment_files() self.assertEqual("mock/path/folder/file 3.txt", test.path) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_create_directory_if_not_exists(self, mock_init): mock_init.return_value = None mock_path = MagicMock() test = S3ProcessesAdapter(file_path=mock_path) test._engine = MagicMock() test.path = mock_path test.create_directory_if_not_exists() test._engine.create_folder.assert_called_once_with(storage_path=mock_path) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_exists(self, mock_init): mock_init.return_value = None test = S3ProcessesAdapter(file_path="test path") test._engine = MagicMock() test.path = MagicMock() test.exists() test._engine.exists.assert_called_once_with(storage_path=test.path) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_ls(self, mock_init): mock_init.return_value = None test = S3ProcessesAdapter(file_path="test path") test._engine = MagicMock() test.path = MagicMock() test.ls() test._engine.ls.assert_called_once_with(storage_path=test.path.to_string.return_value) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.delete_file") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test_batch_delete(self, mock_init, mock_delete_file): mock_init.return_value = None test = S3ProcessesAdapter(file_path="test/path") test.path = "test/path" test.path = "mock/folder/path" mock_paths = ["mock_folder", "mock_file"] mock_delete_file.side_effect = [None, None] test.batch_delete(paths=mock_paths) mock_delete_file.assert_has_calls = [call(path=test.path + mock_paths[0]), call(path=test.path + mock_paths[1])] def test_copy_file(self): mock_new_path = "mock/new/path" mock_engine = MagicMock() mock_engine._split_s3_path.side_effect = [("mock-bucket", "old/file.txt", None), ("mock-bucket", "new/file.txt", None)] mockObject = MagicMock() mockObject.copy_from.return_value = None mock_engine.resource.Object.return_value = mockObject self.test_file._engine = mock_engine self.test_file.copy_file(mock_new_path) mock_engine._split_s3_path.assert_has_calls = [call(self.test_file.path), call(mock_new_path)] mockObject.copy_from.assert_called_once_with(CopySource="mock-bucket/old/file.txt") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.delete_file") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.copy_file") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.check_name_taken") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.exists") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.ls") @patch("monolith_filemanager.adapters.s3_processes.FilePath") def test_rename_file(self, mock_filepath, mock_ls, mock_exists, mock_check_name_taken, mock_copy_file, mock_delete_file): new_name = "new_name" mock_ext = ".xlsx" mock_filepath.side_effect = ["/".join(self.test_file.path.split("/")[:-1]) + f"/{new_name}" + mock_ext, "/".join(self.test_file.path.split("/")[:-1]) + "/"] mock_ls.return_value = ({}, []) mock_exists.return_value = True mock_check_name_taken.return_value = False mock_copy_file.return_value = None mock_delete_file.return_value = None self.test_file.rename_file(new_name=new_name) mock_ls.assert_called_once_with(path="mock/folder/") mock_copy_file.assert_called_once_with(new_path='mock/folder/new_name.xlsx') mock_delete_file.assert_called_once_with() @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.delete_folder") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.copy_folder") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.exists") @patch("monolith_filemanager.adapters.s3_processes.FilePath") def test_rename_folder(self, mock_filepath, mock_exists, mock_copy_folder, mock_delete_folder): new_name = "new_folder" mock_filepath.return_value = "/".join(self.test_folder.path.split("/")[:-1]) + f"/{new_name}" mock_exists.return_value = False mock_copy_folder.return_value = None mock_delete_folder.return_value = None self.test_folder.rename_folder(new_name=new_name) mock_copy_folder.assert_called_once_with(new_folder=f"mock/folder/{new_name}") mock_delete_folder.assert_called_once_with() @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.delete_file") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.copy_file") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.exists") @patch("monolith_filemanager.adapters.s3_processes.FilePath") def test_move_file(self, mock_filepath, mock_exists, mock_copy_file, mock_delete_file): mock_destination_folder = "mock/new/path" mock_filepath.return_value = "mock/new/path/file.txt" mock_exists.return_value = False self.test_file.move_file(destination_folder=mock_destination_folder) mock_copy_file.assert_called_once_with(new_path=mock_filepath.return_value) mock_delete_file.assert_called_once_with() @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.delete_folder") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.copy_folder") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.exists") @patch("monolith_filemanager.adapters.s3_processes.FilePath") def test_move_folder(self, mock_filepath, mock_exists, mock_copy_folder, mock_delete_folder): mock_destination_folder = "mock/new/path" mock_filepath.return_value = "mock/new/path/folder" mock_exists.return_value = False self.test_folder.move_folder(destination_folder=mock_destination_folder) mock_copy_folder.assert_called_once_with(new_folder=mock_filepath.return_value) mock_delete_folder.assert_called_once_with() @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.move_folder") @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.move_file") @patch("monolith_filemanager.adapters.s3_processes.FilePath") def test_batch_move(self, mock_filepath, mock_move_file, mock_move_folder): mock_paths = ["test.xlsx", "test_folder"] mock_destination_folder = "mock/destination/folder" mock_filepath1 = MagicMock() mock_filepath1.to_string.return_value = f"{mock_destination_folder}/{mock_paths[0]}" mock_filepath1.get_file_type.return_value = "xlsx" mock_filepath2 = MagicMock() mock_filepath2.to_string.return_value = f"{mock_destination_folder}/{mock_paths[1]}" mock_filepath2.get_file_type.return_value = None mock_filepath.side_effect = [mock_filepath1, None, mock_filepath2, None] mock_move_file.return_value = None mock_move_folder.return_value = None self.test_folder.batch_move(paths=mock_paths, destination_folder=mock_destination_folder) mock_filepath.assert_has_calls([call(f"{mock_destination_folder}/{mock_paths[0]}"), call(f"{self.test_folder.path}/{mock_paths[0]}"), call(f"{mock_destination_folder}/{mock_paths[1]}"), call(f"{self.test_folder.path}/{mock_paths[1]}")]) mock_move_file.assert_called_once_with(destination_folder=mock_destination_folder) mock_move_folder.assert_called_once_with(destination_folder=mock_destination_folder) @patch("monolith_filemanager.adapters.s3_processes.S3ProcessesAdapter.__init__") def test__strip_path_slash(self, mock_init): mock_init.return_value = None mock_path = "mock/folder/path/" test_folder = S3ProcessesAdapter(file_path=mock_path) test_folder.path = mock_path test_folder._strip_path_slash() self.assertEqual(mock_path.rstrip("/"), test_folder.path) if __name__ == "__main__": main()
51.261214
175
0.726992
2,389
19,428
5.484303
0.05023
0.062967
0.10922
0.11731
0.805221
0.731644
0.656312
0.57808
0.543123
0.496336
0
0.00962
0.176035
19,428
378
176
51.396825
0.808846
0.003603
0
0.446875
0
0
0.233595
0.208122
0
0
0
0
0.178125
1
0.075
false
0
0.009375
0
0.0875
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
6621f5308d9319ae5e2c09eb61c97a4f6a12ea20
459
py
Python
maleorfemale.py
harshals13/Jigglypuffpuff
469ea64c71a123cafadc4ae40a1da5171182647b
[ "MIT" ]
null
null
null
maleorfemale.py
harshals13/Jigglypuffpuff
469ea64c71a123cafadc4ae40a1da5171182647b
[ "MIT" ]
null
null
null
maleorfemale.py
harshals13/Jigglypuffpuff
469ea64c71a123cafadc4ae40a1da5171182647b
[ "MIT" ]
null
null
null
from sklearn import tree #[height, weight, shoe size] X = [[181,80,44], [177,70, 43],[160, 60,38],[154, 54, 37], [166,65,40], [190, 90, 47], [175, 64, 39], [177, 70, 40],[159, 55, 45],[171, 75, 42], [181, 85, 43] ] Y = ['male', 'female', 'female', 'female', 'male','male', 'male', 'female', 'male', 'female', 'male'] clf = tree.DecisionTreeClassifier() clf = clf.fit(X, Y) prediction = clf.predict([[ 180, 80, 33 ]]) print(prediction) #This code works
27
160
0.586057
73
459
3.684932
0.69863
0.111524
0.104089
0
0
0
0
0
0
0
0
0.216495
0.154684
459
17
161
27
0.476804
0.091503
0
0
0
0
0.129808
0
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
66240c3ee8769bc3f49297e677406217e8beaf2e
1,505
py
Python
TSTP_20210522_CH4_Functions.py
adjectiveJ/tstp_challenges
e0fcc174594387e0c6142980bf2706d7f5cbc5bd
[ "CC0-1.0" ]
null
null
null
TSTP_20210522_CH4_Functions.py
adjectiveJ/tstp_challenges
e0fcc174594387e0c6142980bf2706d7f5cbc5bd
[ "CC0-1.0" ]
null
null
null
TSTP_20210522_CH4_Functions.py
adjectiveJ/tstp_challenges
e0fcc174594387e0c6142980bf2706d7f5cbc5bd
[ "CC0-1.0" ]
null
null
null
""" The Self-Taught Programmer - Chapter 4 Challenges Author: Dante Valentine Date: 22 May, 2021 """ """ CHALLENGE 1 """ to_square = 12 def square_func(x): # Returns the square of x. return x*x resp = square_func(to_square) #print("The square of", to_square, "is", str(resp) + ".") """ CHALLENGE 2 """ to_print = 1.0056 def print_string(x): # Prints a string. print(str(x)) #print("Next I will print a string...") #string_var = print_string(to_print) """ CHALLENGE 3 """ param1 = "horse" param2 = "owl" param3 = "pig" def mixed_params(x, y, z, a="cat", b="dog"): # Prints 3 required parameters and two optional parameters. print(x ,y, z, a, b) #mixed_params(param1, param2, param3) #mixed_params(param1, param2, param3, "mouse", "frog") """ CHALLENGE 4 """ int_var = 3 def int_div_two(x): # Returns the input value divided by 2. return x/2 def int_mult_four(y): # Returns the input value multiplied by 4. return y*4 var1 = int_div_two(int_var) var2 = int_mult_four(var1) #print("Input: " + str(int_var) + " | Div2: " + str(var1) + " | Mult4: " + str(var2)) """ CHALLENGE 5""" string_var = "ghost" #string_var = "7.5" def stringToFloat(x): # Tries to return the input value as a float, else returns error message. try: return float(x) except ValueError: print("Error: Value must be a number.") print_var = stringToFloat(string_var) print(print_var)
21.5
87
0.621927
221
1,505
4.104072
0.39819
0.039691
0.042999
0.00882
0.063947
0
0
0
0
0
0
0.037555
0.239203
1,505
69
88
21.811594
0.754585
0.44186
0
0
0
0
0.079389
0
0
0
0
0
0
1
0.222222
false
0
0
0.111111
0.37037
0.259259
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
662cd92db90d9d0b17d2a45ad47818909732dc56
1,253
py
Python
src/algorithms/utils.py
MSwenne/QSB-Suite
7860e4634bdce10fa79d7f850b6bc2099357d7b0
[ "MIT" ]
null
null
null
src/algorithms/utils.py
MSwenne/QSB-Suite
7860e4634bdce10fa79d7f850b6bc2099357d7b0
[ "MIT" ]
null
null
null
src/algorithms/utils.py
MSwenne/QSB-Suite
7860e4634bdce10fa79d7f850b6bc2099357d7b0
[ "MIT" ]
null
null
null
import random def QFT(first: int, last: int) -> str: res = "// Begin QFT\n" for a in range(first, last+1): res += f"h q[{a}];\n" for b in range(a+1,last+1): res += f"crz({1/(2**(b-a+1))}) q[{a}], q[{b}];\n" return res def QFT_inv(first: int, last: int) -> str: res = "// Begin QFT inverse\n" for a in range(last, first-1, -1): for b in range(last,a,-1): res += f"crz({-1/(2**(b-a+1))}) q[{a}], q[{b}];\n" res += f"h q[{a}];\n" return res def QASM_prefix(qubits: int, bits: int) -> str: # Initial QASM setup res = "OPENQASM 2.0;\n" res += "include \"qelib1.inc\";\n\n" res += f"qreg q[{qubits}];\n" res += f"creg c[{bits}];\n\n" return res def random_pauli() -> str: return random.choice(["x", "y", "z"]) def random_cliff3() -> str: return random.choice(["x", "h", "s"]) def random_cliff7() -> str: return random.choice(["x", "y", "z", "h", "sx", "sy", "s"]) def random_univeral() -> str: return random.choice(["x", "y", "z", "h", "t"]) def random_cgate(qubits): c = random.randint(0,qubits-1) t = random.randint(0,qubits-1) while c == t: t = random.randint(0,qubits-1) return f"cz q[{c}], q[{t}];\n"
27.23913
63
0.51237
214
1,253
2.96729
0.257009
0.037795
0.094488
0.132283
0.464567
0.359055
0.264567
0.226772
0.056693
0.056693
0
0.024313
0.245012
1,253
45
64
27.844444
0.646934
0.014366
0
0.2
0
0.057143
0.197891
0.034874
0
0
0
0
0
1
0.228571
false
0
0.028571
0.114286
0.485714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
b077615a398497bbf160df8bf076bda9f756c99d
337
py
Python
vendor/views/sign_in_views.py
hossainchisty/Multi-Vendor-eCommerce
42c5f62b8b098255cc9ea57858d3cc7de94bd76a
[ "MIT" ]
16
2021-09-22T19:08:28.000Z
2022-03-18T18:57:02.000Z
vendor/views/sign_in_views.py
hossainchisty/Multi-Vendor-eCommerce
42c5f62b8b098255cc9ea57858d3cc7de94bd76a
[ "MIT" ]
6
2021-09-30T12:36:02.000Z
2022-03-18T22:18:00.000Z
vendor/views/sign_in_views.py
hossainchisty/Multi-Vendor-eCommerce
42c5f62b8b098255cc9ea57858d3cc7de94bd76a
[ "MIT" ]
6
2021-12-06T02:04:51.000Z
2022-03-13T14:38:14.000Z
from django.contrib.auth.views import LoginView from django.contrib.auth.forms import AuthenticationForm class SignInView(LoginView): ''' Sign in for vendor ''' form_class = AuthenticationForm template_name = 'vendor/sign_in.html' redirect_field_name = 'vendor:root_path' success_url = 'vendor:root_path'
30.636364
57
0.735905
41
337
5.853659
0.609756
0.083333
0.141667
0.175
0
0
0
0
0
0
0
0
0.178042
337
10
58
33.7
0.866426
0.053412
0
0
0
0
0.169435
0
0
0
0
0
0
1
0
false
0
0.285714
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
b078190b27e28c76e2f6312fda29c3c87dbe3e12
692
py
Python
django_excel_to_model/tests/test_openpyxl.py
weijia/django-excel-to-model
2bab354835e31133f1344bee2cb12cb3627eef3d
[ "BSD-3-Clause" ]
2
2021-03-14T14:29:19.000Z
2021-05-02T10:36:47.000Z
django_excel_to_model/tests/test_openpyxl.py
weijia/django-excel-to-model
2bab354835e31133f1344bee2cb12cb3627eef3d
[ "BSD-3-Clause" ]
1
2020-03-11T06:20:59.000Z
2020-04-22T02:17:35.000Z
django_excel_to_model/tests/test_openpyxl.py
weijia/django-excel-to-model
2bab354835e31133f1344bee2cb12cb3627eef3d
[ "BSD-3-Clause" ]
4
2018-04-28T02:39:05.000Z
2021-07-27T02:04:27.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_django-excel-to-model ------------ Tests for `django-excel-to-model` models module. """ from django.test import TestCase # from unittest import TestCase from django_excel_to_model.openpyxl_reader import OpenpyxlExcelFile from sap_asset_master_data20191224.models import mapping class TestOpenpyxl(TestCase): def setUp(self): pass def test_something(self): x = OpenpyxlExcelFile(r"C:\N-PC0WN7R6-Data\q19420\Downloads\sapItems20191223-1.XLSx") s = x.get_sheet(0) s.set_header_row(0) for i in s.enumerate_mapped(mapping, 2): print(i) def tearDown(self): pass
21.625
93
0.679191
93
692
4.913978
0.645161
0.07221
0.085339
0.118162
0
0
0
0
0
0
0
0.052252
0.197977
692
31
94
22.322581
0.771171
0.234104
0
0.142857
0
0
0.113462
0.113462
0
0
0
0
0
1
0.214286
false
0.142857
0.214286
0
0.5
0.071429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
b08769fd169d1bb3f57d9800c62bb6c63602cc3c
3,685
py
Python
kappmax_prediction_scripts/media.py
coltonlloyd/keff_mapping
eea8450561232d32c049455948d88917bf56ddd8
[ "MIT" ]
null
null
null
kappmax_prediction_scripts/media.py
coltonlloyd/keff_mapping
eea8450561232d32c049455948d88917bf56ddd8
[ "MIT" ]
null
null
null
kappmax_prediction_scripts/media.py
coltonlloyd/keff_mapping
eea8450561232d32c049455948d88917bf56ddd8
[ "MIT" ]
2
2019-02-05T15:24:29.000Z
2020-04-16T11:06:06.000Z
LB_media = { "EX_ni2_e": -1000, "EX_dcyt_e": -1000, "EX_hg2_e": -1000, "EX_ins_e": -1000, "EX_cd2_e": -1000, "EX_so4_e": -1000, "EX_uri_e": -1000, "EX_tungs_e": -1000, "EX_glu__L_e": -1000, "EX_slnt_e": -1000, "EX_trp__L_e": -1000, "EX_dad__2_e": -1000, "EX_mobd_e": -1000, "EX_val__L_e": -1000, "EX_cobalt2_e": -1000, "EX_gln__L_e": -1000, "EX_co2_e": -1000, "EX_k_e": -1000, "EX_cu2_e": -1000, "EX_sel_e": -1000, "EX_na1_e": -1000, "EX_cl_e": -1000, "EX_fe3_e": -1000, "EX_arg__L_e": -1000, "EX_pnto__R_e": -1000, "EX_lys__L_e": -1000, "EX_ala__L_e": -1000, "EX_gal_e": -1000, "EX_cbl1_e": -1000, "EX_ser__L_e": -1000, "EX_adn_e": -1000, "EX_thr__L_e": -1000, "EX_pi_e": -1000, "EX_thymd_e": -1000, "EX_mn2_e": -1000, "EX_phe__L_e": -1000, "EX_leu__L_e": -1000, "EX_ura_e": -1000, "EX_h_e": -100, "EX_h2o_e": -100, "EX_aso3_e": -1000, "EX_hxan_e": -1000, "EX_glc__D_e": -1000, "EX_nac_e": -1000, "EX_his__L_e": -1000, "EX_o2_e": -1000, "EX_pro__L_e": -1000, "EX_mg2_e": -1000, "EX_asp__L_e": -1000, "EX_gly_e": -1000, "EX_cys__L_e": -1000, "EX_fe2_e": -1000, "EX_ca2_e": -1000, "EX_tyr__L_e": -1000, "EX_zn2_e": -1000, "EX_fru_e": -1000, "EX_met__L_e": -1000, "EX_ile__L_e": -1000 } aas = {"EX_glyc_e": -1000, "EX_asp__L_e": -1000, "EX_gly_e": -1000, "EX_cys__L_e": -1000, "EX_met__L_e": -1000, "EX_ile__L_e": -1000, "EX_tyr__L_e": -1000, "EX_pro__L_e": -1000, "EX_his__L_e": -1000, "EX_phe__L_e": -1000, "EX_leu__L_e": -1000, "EX_ser__L_e": -1000, "EX_arg__L_e": -1000, "EX_lys__L_e": -1000, "EX_ala__L_e": -1000, "EX_gln__L_e": -1000, "EX_glu__L_e": -1000, "EX_trp__L_e": -1000, "EX_val__L_e": -1000, "EX_thr__L_e": -1000, "EX_asn__L_e": -1000 } # Mapping of Aerbersold media conditions to exchange reaction media_dict = {'Glucose': 'EX_glc__D_e', 'Acetate': 'EX_ac_e', 'Pyruvate': 'EX_pyr_e', 'Glycerol': 'EX_glyc_e', 'Fumarate': 'EX_fum_e', 'Succinate': 'EX_succ_e', 'LB': '', 'Glucosamine': 'EX_gam_e', 'Mannose': 'EX_man_e', 'Xylose': 'EX_xyl__D_e', 'Fructose': 'EX_fru_e', 'Glycerol + AA': '', 'Galactose': 'EX_gal_e', 'Gluconate': 'EX_glcn_e'} map_media_to_old_me_df = { 'Glucose': 'base', 'Acetate': 'Acetate', 'Fumarate': 'Fumarate', 'Glycerol': 'Glycerol', 'Pyruvate': 'Pyruvate', 'Succinate': 'Succinate' } def set_media(model, name, value=-1000): model.reactions.EX_glc__D_e.lower_bound = 0 reactions_changed = [] if name in model.reactions: model.reactions.get_by_id(name).lower_bound = value reactions_changed.append(name) elif name == 'Glycerol + AA': for r, v in aas.items(): model_rxn = model.reactions.get_by_id(r) if model_rxn.lower_bound == 0: model_rxn.lower_bound = v reactions_changed.append(r) elif name == 'LB': for r, v in LB_media.items(): model_rxn = model.reactions.get_by_id(r) if model_rxn.lower_bound == 0: model_rxn.lower_bound = v reactions_changed.append(r) elif name in media_dict: model.reactions.get_by_id(media_dict[name]).lower_bound = value reactions_changed.append(media_dict[name]) else: raise UserWarning('Media (s) not valid' % name) return reactions_changed
29.246032
76
0.566079
575
3,685
3.090435
0.231304
0.216657
0.295442
0.157569
0.481148
0.457513
0.457513
0.411367
0.254924
0.254924
0
0.1256
0.2654
3,685
125
77
29.48
0.530846
0.016011
0
0.376068
0
0
0.304084
0
0
0
0
0
0
1
0.008547
false
0
0
0
0.017094
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b08a24bef63250d06b5557da066625b5924bd251
178
py
Python
17. Chapter_/xmlrpc_client.py
Mikma03/Python_Bill_Lubanovic_BookCodes
8b5b228bb500a08af645a1db6f7c5f33ef5f0512
[ "MIT" ]
null
null
null
17. Chapter_/xmlrpc_client.py
Mikma03/Python_Bill_Lubanovic_BookCodes
8b5b228bb500a08af645a1db6f7c5f33ef5f0512
[ "MIT" ]
null
null
null
17. Chapter_/xmlrpc_client.py
Mikma03/Python_Bill_Lubanovic_BookCodes
8b5b228bb500a08af645a1db6f7c5f33ef5f0512
[ "MIT" ]
null
null
null
import xmlrpc.client proxy = xmlrpc.client.ServerProxy("http://localhost:6789/") num = 7 result = proxy.double(num) print("Dwukrotność liczby %s jest równa %s" % (num, result))
25.428571
60
0.724719
25
178
5.16
0.72
0.186047
0
0
0
0
0
0
0
0
0
0.031847
0.117978
178
6
61
29.666667
0.789809
0
0
0
0
0
0.320225
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b08bef0d9eac560231bad41dab1d173a448b34ce
843
py
Python
Chapter03/function_enforcement.py
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
805d00c7a54927ba94c9077e9a580508ee3c5e56
[ "MIT" ]
13
2018-06-21T01:44:49.000Z
2021-12-01T10:49:53.000Z
Chapter03/function_enforcement.py
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
805d00c7a54927ba94c9077e9a580508ee3c5e56
[ "MIT" ]
null
null
null
Chapter03/function_enforcement.py
PacktPublishing/Secret-Recipes-of-the-Python-Ninja
805d00c7a54927ba94c9077e9a580508ee3c5e56
[ "MIT" ]
6
2018-10-05T08:29:24.000Z
2022-01-11T14:49:50.000Z
def accepts(*types): def check_accepts(f): assert len(types) == f.func_code.co_argcount def new_f(*args, **kwds): for (a, t) in zip(args, types): assert isinstance(a, t), \ "arg %r does not match %s" % (a,t) return f(*args, **kwds) new_f.func_name = f.func_name return new_f return check_accepts def returns(rtype): def check_returns(f): def new_f(*args, **kwds): result = f(*args, **kwds) assert isinstance(result, rtype), \ "return value %r does not match %s" % (result,rtype) return result new_f.func_name = f.func_name return new_f return check_returns @accepts(int, (int,float)) @returns((int,float)) def func(arg1, arg2): return arg1 * arg2
30.107143
71
0.548043
114
843
3.912281
0.315789
0.053812
0.080717
0.049327
0.318386
0.188341
0.188341
0.188341
0.188341
0.188341
0
0.00708
0.329775
843
27
72
31.222222
0.782301
0
0
0.24
0
0
0.067616
0
0
0
0
0
0.12
1
0.28
false
0
0
0.04
0.56
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
b091329df5d41c9910c06cb7d5946e735531a835
345
py
Python
python/1920.py
zheedong/BaekJoon
7f9e00085276a337d18ee3bb90c98126f7af4d3a
[ "MIT" ]
null
null
null
python/1920.py
zheedong/BaekJoon
7f9e00085276a337d18ee3bb90c98126f7af4d3a
[ "MIT" ]
null
null
null
python/1920.py
zheedong/BaekJoon
7f9e00085276a337d18ee3bb90c98126f7af4d3a
[ "MIT" ]
null
null
null
test_case_1 = """ 5 # N 4 1 5 2 3 5 # M 1 3 7 9 5 """ ''' result 1 1 0 0 1 ''' N = int(input()) A = list(map(int, input().split())) A_set = set(A) # Set in으로 문제 한 번에 해결... M = int(input()) B = list(map(int, input().split())) for i in range(0, M): if B[i] in A_set: print(1) else: print(0)
12.321429
46
0.46087
68
345
2.279412
0.485294
0.206452
0.129032
0.193548
0.258065
0
0
0
0
0
0
0.093333
0.347826
345
28
47
12.321429
0.595556
0.063768
0
0
0
0
0.184564
0
0
0
0
0
0
1
0
false
0
0
0
0
0.125
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b09631a3441037754162b7e57cc1cff16fb106f6
1,452
py
Python
src/api/access_module.py
GuilhermeVieira/mac0350-database
1b3b344cda09187cc5f7186d86bea385f77509dc
[ "MIT" ]
null
null
null
src/api/access_module.py
GuilhermeVieira/mac0350-database
1b3b344cda09187cc5f7186d86bea385f77509dc
[ "MIT" ]
1
2019-06-02T22:40:24.000Z
2019-06-02T22:40:24.000Z
src/api/access_module.py
GuilhermeVieira/mac0350-database
1b3b344cda09187cc5f7186d86bea385f77509dc
[ "MIT" ]
null
null
null
import databases from database_handler import load_session, func from flask_login import LoginManager, UserMixin class AccessModule: session, Base = load_session(databases.urls['DATABASE_ACCESS_URL']) def __init__(self): ''' for item in self.session.query(User.us_id): print(item.first()) ''' return def create_user(self, email, password): try: self.session.execute(func.cria_usuario(email, password)) self.session.commit() return True except Exception as e: return str(e) def authenticate_user(self, email, password): try: return self.session.execute(func.verifica_senha(email, password)).first()[0] except Exception as e: print('Erro: ' + str(e)) return None def get_user_by_id(self, us_id): return self.session.query(User).get(us_id) def get_user_by_email(self, us_email): return self.session.query(User).filter_by(us_email=str(us_email)).first() def is_allowed(self, us_id, service): try: return self.session.execute(func.tem_acesso(us_id, service)).first()[0] except Exception as e: print('Error: ' + str(e)) return False class User(UserMixin, AccessModule.Base): __tablename__ = 'users' __table_args__ = { 'autoload': True } def get_id(self): return self.us_id
29.632653
88
0.623967
183
1,452
4.721311
0.360656
0.08912
0.078704
0.069444
0.251157
0.138889
0.06713
0
0
0
0
0.001885
0.269284
1,452
48
89
30.25
0.812441
0.046143
0
0.171429
0
0
0.033259
0
0
0
0
0
0
1
0.2
false
0.114286
0.085714
0.085714
0.685714
0.057143
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
b09a66e328205a7eed665f95fa47dd121d3d6481
844
py
Python
test/test_contact.py
hawkins/Shawk
c5be1165d8f6c0471544d06f7da07156df1dcc10
[ "MIT" ]
13
2016-05-05T11:16:31.000Z
2021-02-25T11:23:14.000Z
test/test_contact.py
hawkins/Shawk
c5be1165d8f6c0471544d06f7da07156df1dcc10
[ "MIT" ]
18
2016-05-05T20:23:51.000Z
2020-12-25T16:34:05.000Z
test/test_contact.py
hawkins/Shawk
c5be1165d8f6c0471544d06f7da07156df1dcc10
[ "MIT" ]
5
2017-07-25T23:50:43.000Z
2021-12-04T11:05:13.000Z
# Import shawk from shawk import Contact # Prepare contacts used throughout tests mini_contact = Contact(12345678, 'Verizon') name_contact = Contact(12345678, 'Verizon', 'Somebody') def test_repr_minimal(): assert(repr(mini_contact) == "<shawk.Contact('12345678', 'Verizon', '<No name>')>") def test_repr_with_name(): assert(repr(name_contact) == "<shawk.Contact('12345678', 'Verizon', 'Somebody')>") def test_string_minimal(): assert(str(mini_contact) == '<No name>: 12345678 (Verizon)') def test_string_with_name(): assert(str(name_contact) == 'Somebody: 12345678 (Verizon)') def test_get_address_verizon(): assert(name_contact.get_address() == '12345678@vtext.com') def test_get_number(): assert(name_contact.get_number() == '12345678') def test_get_name(): assert(name_contact.get_name() == 'Somebody')
29.103448
87
0.716825
107
844
5.383178
0.261682
0.085069
0.152778
0.104167
0.208333
0.128472
0
0
0
0
0
0.086486
0.123223
844
28
88
30.142857
0.691892
0.060427
0
0
0
0
0.271229
0.065906
0
0
0
0
0.411765
1
0.411765
false
0
0.058824
0
0.470588
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
0
0
0
2
b0c2a699dca314b9665d88e6e73956104ae6f215
2,852
py
Python
st_library/utils/databases/postgres.py
shortesttrack/dataprovider-py
3ecbcb5e09da09614a708e4ef990acdd3443c6ed
[ "Apache-2.0" ]
null
null
null
st_library/utils/databases/postgres.py
shortesttrack/dataprovider-py
3ecbcb5e09da09614a708e4ef990acdd3443c6ed
[ "Apache-2.0" ]
2
2018-03-27T11:06:46.000Z
2020-10-27T20:48:51.000Z
st_library/utils/databases/postgres.py
shortesttrack/dataprovider-py
3ecbcb5e09da09614a708e4ef990acdd3443c6ed
[ "Apache-2.0" ]
4
2018-02-26T08:12:39.000Z
2018-05-18T06:01:01.000Z
import backoff import psycopg2 from psycopg2.extras import DictCursor, NamedTupleCursor from st_library.utils.generics.connectors import ConnectorContainer _disconnect_errors = (psycopg2.InterfaceError, psycopg2.OperationalError,) _backoff = backoff.on_exception(backoff.expo, _disconnect_errors, max_time=30, max_tries=30) class Postgres(object): DictCursor = DictCursor NamedTupleCursor = NamedTupleCursor def __init__(self, name, host, port, username, password): self._name = name self._host = host self._port = port self._username = username self._password = password self._conn = None self._cursor = None self._cursor_type = None def __repr__(self): return '<Postgres db "{}">'.format(self._name) def set_cursor_type(self, cursor_type): self._cursor_type = cursor_type @property def name(self): return self._name @_backoff def _get_connection(self): if self._conn and not self._conn.closed: return self._conn db_connection = self._do_get_connection() self._conn = db_connection return self._conn def _do_get_connection(self): return psycopg2.connect(database=self._name, user=self._username, password=self._password, host=self._host, port=self._port) @_backoff def execute(self, *args, **kwargs): if self._cursor is None or self._cursor.closed: self._cursor = self._get_connection().cursor(cursor_factory=self._cursor_type) return self._cursor.execute(*args, **kwargs) def fetchall(self): return self._cursor.fetchall() def fetchmany(self, *args, **kwargs): return self._cursor.fetchmany(*args, **kwargs) def fetchone(self): return self._cursor.fetchone() @_backoff def commit(self): self._get_connection().commit() @_backoff def cancel(self): self._get_connection().cancel() @_backoff def close(self): self._get_connection().close() @_backoff def rollback(self): self._get_connection().rollback() class PostgresContainer(ConnectorContainer): def _do_initialize_data(self): assert not len(self._list) params = self._fetch_param_dict([ 'psql_host', 'psql_name', 'psql_username', 'psql_password', 'psql_port' ]) list_of_servers_params = [params] for server in list_of_servers_params: obj = Postgres(server['psql_name'], server['psql_host'], int(server['psql_port']), server['psql_username'], server['psql_password']) self._list.append(obj) self._dict[obj.name] = obj
29.102041
92
0.630435
314
2,852
5.39172
0.27707
0.07088
0.050207
0.049616
0.022445
0
0
0
0
0
0
0.004321
0.269635
2,852
97
93
29.402062
0.808449
0
0
0.109589
0
0
0.043478
0
0
0
0
0
0.013699
1
0.205479
false
0.068493
0.054795
0.082192
0.438356
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
b0cef7efc960a0a20a9e808c895f9ff5b72321b6
1,903
py
Python
src/zojax/portlet/browser/portlets.py
Zojax/zojax.portlet
f442ae53c400cd39e0c593138b83eeea0d13787e
[ "ZPL-2.1" ]
null
null
null
src/zojax/portlet/browser/portlets.py
Zojax/zojax.portlet
f442ae53c400cd39e0c593138b83eeea0d13787e
[ "ZPL-2.1" ]
null
null
null
src/zojax/portlet/browser/portlets.py
Zojax/zojax.portlet
f442ae53c400cd39e0c593138b83eeea0d13787e
[ "ZPL-2.1" ]
null
null
null
############################################################################## # # Copyright (c) 2009 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """ $Id$ """ from zope import interface from zope.location import LocationProxy from zope.publisher.interfaces import NotFound from zope.publisher.interfaces import IPublishTraverse from zope.security.proxy import removeSecurityProxy from zope.component import getAdapters, queryMultiAdapter from zojax.statusmessage.interfaces import IStatusMessage from zojax.portlet.interfaces import IPortletManager, IPortletsExtension from zojax.portlet.browser.interfaces import IPortletManagerPublicMarker class Portlets(object): interface.implements(IPublishTraverse) __name__ = 'portlets' __parent__ = None def __init__(self, context, request): self.__parent__ = self.context = context self.request = request def publishTraverse(self, request, name): context = self.context manager = queryMultiAdapter( (context, request, None), IPortletManager, name) if manager is not None: manager.update() interface.alsoProvides(manager, IPortletManagerPublicMarker) return LocationProxy(manager, self.context, name) raise NotFound(self.context, self.__name__, request) def __call__(self): raise NotFound(self.context, self.__name__, request)
33.982143
78
0.683132
196
1,903
6.489796
0.479592
0.037736
0.022013
0.042453
0.113208
0.061321
0.061321
0
0
0
0
0.003807
0.171834
1,903
55
79
34.6
0.803299
0.239096
0
0.074074
0
0
0.006314
0
0
0
0
0
0
0
null
null
0
0.333333
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
b0daa63a5af9ec548e408e77f43dd85ab4843906
462
py
Python
__findModuleLocations.py
simdevex/01.Basics
cf4f372384e66f4b26e4887d2f5d815a1f8e929c
[ "MIT" ]
null
null
null
__findModuleLocations.py
simdevex/01.Basics
cf4f372384e66f4b26e4887d2f5d815a1f8e929c
[ "MIT" ]
null
null
null
__findModuleLocations.py
simdevex/01.Basics
cf4f372384e66f4b26e4887d2f5d815a1f8e929c
[ "MIT" ]
null
null
null
''' Python program to find the location of Python modulesources ''' #Location of Python module sources: import imp print("Location of Python os module sources:") print(imp.find_module('os')) print("\nLocation of Python sys module sources:") print(imp.find_module('datetime')) #List of directories of specific module: import os print("\nList of directories in os module:") print(os.path) print("\nList of directories in sys module:") import sys print(sys.path)
24.315789
59
0.75974
70
462
4.985714
0.328571
0.091691
0.137536
0.120344
0.320917
0.17765
0
0
0
0
0
0
0.125541
462
18
60
25.666667
0.863861
0.287879
0
0
0
0
0.49375
0
0
0
0
0
0
1
0
true
0
0.272727
0
0.272727
0.727273
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
2
b0df7d5ad0499addaf0ae1570bc27728fffba61b
778
py
Python
configs/_base_/det_datasets/pool_icdar2013_icdar2015.py
andrgje/mmocr
26963dcf56b6cc842d097617a9dc1688b01fcaed
[ "Apache-2.0" ]
null
null
null
configs/_base_/det_datasets/pool_icdar2013_icdar2015.py
andrgje/mmocr
26963dcf56b6cc842d097617a9dc1688b01fcaed
[ "Apache-2.0" ]
null
null
null
configs/_base_/det_datasets/pool_icdar2013_icdar2015.py
andrgje/mmocr
26963dcf56b6cc842d097617a9dc1688b01fcaed
[ "Apache-2.0" ]
null
null
null
dataset_type1 = 'IcdarDataset' data_root1 = 'data/icdar15' train1 = dict( type=dataset_type1, ann_file=f'{data_root1}/instances_training.json', img_prefix=f'{data_root1}/imgs', pipeline=None) test1 = dict( type=dataset_type1, ann_file=f'{data_root1}/instances_validation.json', img_prefix=f'{data_root1}/imgs', pipeline=None) dataset_type2 = 'IcdarDataset' data_root2 = 'data/icdar13' train2 = dict( type=dataset_type2, ann_file=f'{data_root2}/instances_training.json', img_prefix=f'{data_root2}/imgs', pipeline=None) test2 = dict( type=dataset_type2, ann_file=f'{data_root2}/instances_validation.json', img_prefix=f'{data_root2}/imgs', pipeline=None) train_list = [train1,train2] test_list = [test1,test2]
22.882353
55
0.713368
106
778
4.95283
0.283019
0.07619
0.114286
0.091429
0.71619
0.71619
0.71619
0.647619
0.647619
0.350476
0
0.04236
0.150386
778
33
56
23.575758
0.751891
0
0
0.461538
0
0
0.339332
0.190231
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b0fda5ff40de8e081dd602a278f08974ff524613
11,355
py
Python
adventures/bucketlist/tests/test_api.py
lakivisi-zz/adventures
f094ac3fa1d5c85d65650c9cdc2ff2f60f9189a5
[ "MIT" ]
null
null
null
adventures/bucketlist/tests/test_api.py
lakivisi-zz/adventures
f094ac3fa1d5c85d65650c9cdc2ff2f60f9189a5
[ "MIT" ]
null
null
null
adventures/bucketlist/tests/test_api.py
lakivisi-zz/adventures
f094ac3fa1d5c85d65650c9cdc2ff2f60f9189a5
[ "MIT" ]
1
2021-01-14T21:27:32.000Z
2021-01-14T21:27:32.000Z
from django.urls import reverse from rest_framework import status from rest_framework.test import APITestCase from bucketlist.models import Bucketlist, Item from bucketlist.tests.factories import (BucketlistFactory, UserFactory, ItemFactory) # Create your tests here. class RegisterApiTestSuite(APITestCase): def setUp(self): self.user = UserFactory.build() def test_user_can_register_with_correct_credentials(self): url = reverse('register') data = {'username': self.user.username, 'email': self.user.email, 'password': self.user.password} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertIn( '"username":"{}","email":"{}"'.format( self.user.username, self.user.email), str(response.content)) # test_user_cant_register_twice response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, 400) self.assertIn( '"username":["A user with that username already exists."]', str(response.content)) def test_user_cant_register_with_wrong_credentials(self): url = reverse('register') data = {'username': self.user.username, 'email': "wq", 'password': self.user.password} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, 400) self.assertIn( '"email":["Enter a valid email address."]', str(response.content)) def test_user_cant_register_with_blank_credentials(self): url = reverse('register') data = {'username': ' ', 'email': ' ', 'password': ' '} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, 400) self.assertIn( '"username":["This field may not be blank."]', str(response.content)) class LoginAPITestSuite(APITestCase): def setUp(self): self.user = UserFactory.build() url = reverse('register') data = {'username': self.user.username, 'email': self.user.email, 'password': self.user.password} self.client.post(url, data, format='json') self.url = reverse('login') self.data = {'username': self.user.username, 'password': self.user.password} def test_registered_user_can_login(self): response = self.client.post(self.url, self.data, format='json') self.assertEqual(response.status_code, 200) self.assertIn('token', str(response.content)) def test_login_with_blank_credentials(self): response = self.client.post( self.url, {'username': '', 'password': ''}, format='json') self.assertEqual(response.status_code, 400) self.assertIn( '"username":["This field may not be blank."]', str(response.content)) self.assertIn( '"password":["This field may not be blank."]', str(response.content)) def test_login_with_wrong_credentials(self): response = self.client.post(self.url, {'username': 'loice', 'password': 'loice'}, format='json') self.assertEqual(response.status_code, 400) self.assertIn('Unable to login with provided credentials', str(response.content)) class BucketlistAPITestSuite(APITestCase): def setUp(self): self.user = UserFactory.build() bucketlists = BucketlistFactory.build_batch(2) self.bucketlist1 = bucketlists[0] self.bucketlist2 = bucketlists[1] # register a user url = reverse('register') data = {'username': self.user.username, 'email': self.user.email, 'password': self.user.password} self.client.post(url, data, format='json') # login user response = self.client.post(reverse('login'), {'username': self.user.username, 'password': self.user.password}, format='json' ) self.token = response.data['token'] self.client.credentials(HTTP_AUTHORIZATION='JWT ' + self.token) # add one bucketlist self.data = {'name': self.bucketlist2.name, 'description': self.bucketlist2.description} response = self.client.post( reverse('bucketlists'), self.data, format='json') self.bucketlist = Bucketlist.objects.get(name=self.bucketlist2.name) def test_user_can_create_bucketlist(self): url = reverse('bucketlists') data = {'name': self.bucketlist1.name, 'description': self.bucketlist1.description} response = self.client.post(url, data, format='json') data = response.data self.assertIsNotNone(data['id']) self.assertEqual(data['name'], self.bucketlist1.name) def test_user_cant_create_bucketlist_with_same_name(self): response = self.client.post(reverse('bucketlists'), self.data, format='json') data = response.data self.assertEqual(data['name'], ["bucketlist already exists"]) def test_can_list_bucketlists(self): response = self.client.get(reverse('bucketlists')) data = response.data self.assertEqual(response.status_code, 200) self.assertEqual(self.bucketlist2.name, data[0]['name']) def test_can_list_one_bucketlist(self): response = self.client.get( reverse('one_bucketlist', kwargs={'pk': self.bucketlist.id})) data = response.data self.assertEqual(response.status_code, 200) self.assertEqual(self.bucketlist2.name, data['name']) def test_can_edit_one_bucketlist(self): response = self.client.put( reverse('one_bucketlist', kwargs={'pk': self.bucketlist.id}), {'name': 'holiday', 'description': self.bucketlist2.description}, format='json') data = response.data self.assertEqual(response.status_code, 200) self.assertEqual('holiday', data['name']) def test_can_delete_one_bucketlist(self): response = self.client.delete( reverse('one_bucketlist', kwargs={'pk': 1})) self.assertEqual(response.status_code, 204) response = self.client.get( reverse('one_bucketlist', kwargs={'pk': 1}), headers={'Authorization': 'JWT ' + self.token}) data = response.data self.assertEqual(response.status_code, 404) self.assertEqual("Not found.", data["detail"]) class ItemAPITestSuite(APITestCase): def setUp(self): self.user = UserFactory.build() bucketlist = BucketlistFactory.build() items = ItemFactory.build_batch(2) self.item1 = items[0] self.item2 = items[1] # register a user url = reverse('register') data = {'username': self.user.username, 'email': self.user.email, 'password': self.user.password} self.client.post(url, data, format='json') # login user response = self.client.post(reverse('login'), {'username': self.user.username, 'password': self.user.password}, format='json' ) self.token = response.data['token'] self.client.credentials(HTTP_AUTHORIZATION='JWT ' + self.token) # add one bucketlist data = {'name': bucketlist.name, 'description': bucketlist.description} self.client.post( reverse('bucketlists'), data, format='json') self.bucketlist = Bucketlist.objects.get(name=bucketlist.name) self.data = {'name': self.item1.name, 'description': self.item1.description, 'completed': self.item1.completed} self.client.post(reverse('items', kwargs={'bucketlist_id': self.bucketlist.id}), self.data, format='json') self.item = Item.objects.get( name=self.item1.name, bucketlist=self.bucketlist) def test_user_can_create_item(self): url = reverse('items', kwargs={'bucketlist_id': self.bucketlist.id}) data = {'name': self.item2.name, 'description': self.item2.description, 'completed': self.item2.completed} response = self.client.post(url, data, format='json') data = response.data self.assertIsNotNone(data['id']) self.assertEqual(data['name'], self.item2.name) def test_user_cant_create_item_with_same_name_in_one_bucketlist(self): url = reverse('items', kwargs={'bucketlist_id': self.bucketlist.id}) response = self.client.post(url, self.data, format='json') data = response.data self.assertEqual(data['name'], ["item already exists in bucketlist"]) def test_can_list_bucketlist_items(self): url = reverse('items', kwargs={'bucketlist_id': self.bucketlist.id}) response = self.client.get(url) data = response.data self.assertEqual(response.status_code, 200) self.assertEqual(self.item1.name, data[0]['name']) def test_can_list_one_bucketlist_item(self): url = reverse('one_item', kwargs={'bucketlist_id': self.bucketlist.id, 'pk': self.item.id}) response = self.client.get(url) data = response.data self.assertEqual(response.status_code, 200) self.assertEqual(self.item1.name, data['name']) def test_can_edit_one_bucketlist_item(self): url = reverse('one_item', kwargs={'bucketlist_id': self.bucketlist.id, 'pk': self.item.id}) response = self.client.put(url, {'name': 'israel trip', 'description': self.item1.description, 'completed': self.item1.completed}, format='json') data = response.data self.assertEqual(response.status_code, 200) self.assertEqual('israel trip', data['name']) def test_can_delete_one_bucketlist_item(self): url = reverse('one_item', kwargs={'bucketlist_id': self.bucketlist.id, 'pk': self.item.id}) response = self.client.delete(url) self.assertEqual(response.status_code, 204) response = self.client.get(url) data = response.data self.assertEqual(response.status_code, 404) self.assertEqual("Not found.", data["detail"])
41.593407
88
0.574813
1,155
11,355
5.536797
0.101299
0.048475
0.067553
0.077091
0.753401
0.722283
0.697263
0.676153
0.587021
0.516654
0
0.010865
0.30295
11,355
272
89
41.746324
0.797094
0.01277
0
0.541126
0
0
0.112847
0.0025
0
0
0
0
0.168831
1
0.095238
false
0.051948
0.021645
0
0.134199
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
b0fdbad819a58bb24f68fffc89eae7b13da8ac0a
514
py
Python
src/model/match.py
HarborYuan/Student-Information-Management-System
7226bdea9a422cc88876ba58f1e36e4f7087342d
[ "Apache-2.0" ]
null
null
null
src/model/match.py
HarborYuan/Student-Information-Management-System
7226bdea9a422cc88876ba58f1e36e4f7087342d
[ "Apache-2.0" ]
null
null
null
src/model/match.py
HarborYuan/Student-Information-Management-System
7226bdea9a422cc88876ba58f1e36e4f7087342d
[ "Apache-2.0" ]
1
2018-12-03T11:43:37.000Z
2018-12-03T11:43:37.000Z
import re class IsCellphone(): def __init__(self): self.p = re.compile(r'[1][^1269]\d{9}') def iscellphone(self, number): res = self.p.match(number) if res: return True else: return False class IsMail(): def __init__(self): self.p = re.compile(r'[^\._][\w\._-]+@(?:[A-Za-z0-9]+\.)+[A-Za-z]+$') def ismail(self, str): res = self.p.match(str) if res: return True else: return False
19.769231
77
0.486381
66
514
3.636364
0.439394
0.083333
0.091667
0.125
0.466667
0.466667
0.466667
0.216667
0
0
0
0.023669
0.342412
514
25
78
20.56
0.686391
0
0
0.526316
0
0.052632
0.116732
0.087549
0
0
0
0
0
1
0.210526
false
0
0.052632
0
0.578947
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
7c007a5d23b8a2015cedebc0918ddd73086265d1
398
py
Python
secrets_example.py
fuzzysearch404/SpotifyPlaylistScripts
17915742ca666edd8376f04d0e2687b207c32471
[ "MIT" ]
null
null
null
secrets_example.py
fuzzysearch404/SpotifyPlaylistScripts
17915742ca666edd8376f04d0e2687b207c32471
[ "MIT" ]
1
2021-11-03T13:40:55.000Z
2021-11-03T13:40:55.000Z
secrets_example.py
fuzzysearch404/SpotifyPlaylistScripts
17915742ca666edd8376f04d0e2687b207c32471
[ "MIT" ]
null
null
null
# Spotify web application client ID CLIENT_ID = 'your_client_id' # Spotify web application client secret CLIENT_SECRET = 'your_client_secret' # Redirect URI. This can be any uri. # Howerver, you MUST add this uri to your Spotify web app's settings. # Application settings -> Edit Settings -> Redirect URIs. # Add it there and save the settings. REDIRECT_URI = 'https://localhost:2020/done'
44.222222
70
0.753769
59
398
4.966102
0.542373
0.102389
0.143345
0.1843
0
0
0
0
0
0
0
0.012085
0.168342
398
9
71
44.222222
0.873112
0.670854
0
0
0
0
0.504274
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2