hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aa36eeeeb4a90c0aa0bc0cab9ddd3271c23ecf9a | 3,206 | py | Python | hseling-api-iceform/hseling_api_iceform/models.py | dkbrz/hseling-repo-iceform | 581926a0dca3ec463eb59bada8eefa047f24b256 | [
"MIT"
] | null | null | null | hseling-api-iceform/hseling_api_iceform/models.py | dkbrz/hseling-repo-iceform | 581926a0dca3ec463eb59bada8eefa047f24b256 | [
"MIT"
] | null | null | null | hseling-api-iceform/hseling_api_iceform/models.py | dkbrz/hseling-repo-iceform | 581926a0dca3ec463eb59bada8eefa047f24b256 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
DB Models:
Text
Token
TextContent - full corpus
ClusterFilters - search
NgramEntries - ngram entry coordinates
"""
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import ForeignKey
db = SQLAlchemy()
class Text(db.Model):
"""
Table with text names (names of sagas)
"""
id = db.Column(db.Integer, primary_key=True)
text_name = db.Column(db.Text)
class Token(db.Model):
"""
Token
word_form: form in text
lemma: lemma
"""
id = db.Column(db.Integer, primary_key=True)
word_form = db.Column(db.Text)
lemma = db.Column(db.Text)
pos = db.Column(db.Text)
class TextContent(db.Model):
"""
TextContent
text: foreign key (Text)
chapter: chapter index
paragraph: index of paragraph in chapter
sentence: index of sentence in paragraph
sentence_unique: unique sentence ID (in all texts)
token: token object (with lemma and word form)
text_obj: text with id and name
"""
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.Integer, ForeignKey("text.id"))
chapter = db.Column(db.Integer)
paragraph = db.Column(db.Integer)
sentence = db.Column(db.Integer)
sentence_unique = db.Column(db.Integer)
idx = db.Column(db.Integer)
token_id = db.Column(db.Integer, ForeignKey("token.id"))
# relationships
token = db.relationship("Token", uselist=False, primaryjoin="Token.id==TextContent.token_id")
text_obj = db.relationship("Text", uselist=False, primaryjoin="Text.id==TextContent.text")
class ClusterFilters(db.Model):
"""
ClusterFilters
cluster_id: unique cluster identifier
short_ngram_id: POS model
cluster: cluster id within short_ngram group
unique_text: number of texts where this cluster occurs
n_entries: number of entries (in all texts)
text: textual representation
"""
id = db.Column(db.Integer, primary_key=True)
cluster_id = db.Column(db.Integer)
short_ngram_id = db.Column(db.Integer)
cluster = db.Column(db.Integer)
n_entries = db.Column(db.Integer)
unique_text = db.Column(db.Integer)
text = db.Column(db.Text)
verb_text = db.Column(db.Text)
class FinalClusters(db.Model):
id = db.Column(db.Integer, primary_key=True)
cluster_id = db.Column(db.Integer)
short_ngram_id = db.Column(db.Integer)
cluster = db.Column(db.Integer)
n_entries = db.Column(db.Integer)
unique_text = db.Column(db.Integer)
text = db.Column(db.Text)
verb_text = db.Column(db.Text)
class NgramEntries(db.Model):
"""
NgramEntries - coordinates of ngram entries
start, end: word indices of the first and last words
"""
id = db.Column(db.Integer, primary_key=True)
short_ngram_id = db.Column(db.Integer)
cluster_id = db.Column(db.Integer)
text = db.Column(db.Integer, ForeignKey("text.id"))
chapter = db.Column(db.Integer)
paragraph = db.Column(db.Integer)
sentence = db.Column(db.Integer)
sentence_unique = db.Column(db.Integer)
start = db.Column(db.Integer)
end = db.Column(db.Integer)
text_obj = db.relationship("Text", uselist=False, primaryjoin="Text.id==NgramEntries.text")
| 28.371681 | 97 | 0.685901 | 446 | 3,206 | 4.843049 | 0.17713 | 0.148148 | 0.185185 | 0.251852 | 0.503241 | 0.464352 | 0.463426 | 0.463426 | 0.408333 | 0.375926 | 0 | 0.000387 | 0.194635 | 3,206 | 112 | 98 | 28.625 | 0.836174 | 0.266064 | 0 | 0.615385 | 0 | 0 | 0.052608 | 0.036735 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.038462 | 0 | 0.980769 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
aa43035bffecfb1cef00f4af24ebad0b85e2dbea | 1,274 | py | Python | stepik_selenium_final/pages/product_page.py | vshagur/stepik-selenium-final | 86842dc5c23f87c846a02a3d6f18c89c94935f23 | [
"MIT"
] | null | null | null | stepik_selenium_final/pages/product_page.py | vshagur/stepik-selenium-final | 86842dc5c23f87c846a02a3d6f18c89c94935f23 | [
"MIT"
] | null | null | null | stepik_selenium_final/pages/product_page.py | vshagur/stepik-selenium-final | 86842dc5c23f87c846a02a3d6f18c89c94935f23 | [
"MIT"
] | 1 | 2020-07-12T11:57:17.000Z | 2020-07-12T11:57:17.000Z | from stepik_selenium_final.locators import ProductPageLocators
from stepik_selenium_final.pages import BasePage
class ProductPage(BasePage):
def __init__(self, browser, url):
super().__init__(browser, url)
self.title = None
self.price = None
def add_product_to_cart(self):
self.title = self.get_element(*ProductPageLocators.PRODUCT_TITLE).text
self.price = self.get_element(*ProductPageLocators.PRODUCT_PRICE).text
self.get_element(*ProductPageLocators.ADD_TO_CART_BUTTON).click()
def check_message_product_added_to_cart(self):
text = self.get_element(*ProductPageLocators.MESSAGE_PRODUCT_ADDED).text
assert self.title == text
def check_not_message_product_added_to_cart(self):
assert self.is_not_element_present(*ProductPageLocators.MESSAGE_PRODUCT_ADDED), \
"Success message is presented, but should not be"
def check_message_product_added_to_cart_is_disappearing(self):
assert self.is_disappearing_element(*ProductPageLocators.MESSAGE_PRODUCT_ADDED), \
"Success message should disappear"
def check_message_cart_total(self):
text = self.get_element(*ProductPageLocators.MESSAGE_CART_TOTAL).text
assert self.price == text
| 41.096774 | 90 | 0.748823 | 155 | 1,274 | 5.774194 | 0.283871 | 0.174302 | 0.127374 | 0.184358 | 0.481564 | 0.329609 | 0.181006 | 0 | 0 | 0 | 0 | 0 | 0.174254 | 1,274 | 30 | 91 | 42.466667 | 0.85076 | 0 | 0 | 0 | 0 | 0 | 0.062009 | 0 | 0 | 0 | 0 | 0 | 0.173913 | 1 | 0.26087 | false | 0 | 0.086957 | 0 | 0.391304 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
a4ae1483214c461caccbaa8423f72cab68bed529 | 600 | py | Python | ACME/geometry/poly_corner_cosines.py | mauriziokovacic/ACME | 2615b66dd4addfd5c03d9d91a24c7da414294308 | [
"MIT"
] | 3 | 2019-10-23T23:10:55.000Z | 2021-09-01T07:30:14.000Z | ACME/geometry/poly_corner_cosines.py | mauriziokovacic/ACME-Python | 2615b66dd4addfd5c03d9d91a24c7da414294308 | [
"MIT"
] | null | null | null | ACME/geometry/poly_corner_cosines.py | mauriziokovacic/ACME-Python | 2615b66dd4addfd5c03d9d91a24c7da414294308 | [
"MIT"
] | 1 | 2020-07-11T11:35:43.000Z | 2020-07-11T11:35:43.000Z | from ..math.acos import *
from ..math.dot import *
from ..math.normvec import *
from .poly_edges import *
def poly_corner_cosines(P, T):
"""
Returns the per polygon corner cosines
Parameters
----------
P : Tensor
a (N, D,) points set tensor
T : LongTensor
a (M, T,) topology tensor
Returns
-------
Tensor
the (T, M,) corner cosines tensor
"""
E = torch.cat([normr(e).unsqueeze(0) for e in poly_edges(P, T)], dim=0)
E = torch.cat((E[-1].unsqueeze(0), E), dim=0)
return dot(E[1:], -E[:-1], dim=2).squeeze().t()
| 22.222222 | 75 | 0.551667 | 88 | 600 | 3.715909 | 0.454545 | 0.073395 | 0.085627 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018349 | 0.273333 | 600 | 26 | 76 | 23.076923 | 0.731651 | 0.351667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.5 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
a4b88904224a7e9d2272f52544cf3713982a6999 | 1,121 | py | Python | mydemoapp/models.py | Kartik-CR7/MyDemoProject | 6415b70ca8279964fc96fa4df2ee320a915f6e25 | [
"bzip2-1.0.6"
] | null | null | null | mydemoapp/models.py | Kartik-CR7/MyDemoProject | 6415b70ca8279964fc96fa4df2ee320a915f6e25 | [
"bzip2-1.0.6"
] | 8 | 2021-03-19T04:46:59.000Z | 2022-03-12T00:10:00.000Z | mydemoapp/models.py | Kartik-CR7/MyDemoProject | 6415b70ca8279964fc96fa4df2ee320a915f6e25 | [
"bzip2-1.0.6"
] | null | null | null | from django.db import models
from django.core.validators import MaxLengthValidator
from django_resized import ResizedImageField
from PIL import Image
# Create your models here.
class Like(models.Model):
id = models.AutoField(primary_key = True)
sess_response = models.CharField(max_length=100)
button_id = models.CharField(max_length=50,default=0)
# (validators=[MaxLengthValidator(50)])#we can give MaxLengthValidators to validate the length of integer field.
class BT_Contact(models.Model):
Contact_id = models.AutoField(primary_key = True)
First_Name = models.CharField(max_length = 50,default='Default_name')
Last_Name = models.CharField(max_length = 50,default= 'Default_lastname')
Emailid = models.CharField(max_length= 100 ,default='Default@email.com')
Message = models.CharField(max_length= 800 ,default= 'No_Message')
class BT_Imageupload(models.Model):
Image_id = models.AutoField(primary_key= True)
Image_name = models.CharField(max_length=200,default='Unnamed_Image')
Image = ResizedImageField(size=[800,800],upload_to='Media/',blank=True,quality=100) | 50.954545 | 120 | 0.767172 | 149 | 1,121 | 5.604027 | 0.422819 | 0.125749 | 0.150898 | 0.201198 | 0.354491 | 0.256287 | 0.105389 | 0.105389 | 0 | 0 | 0 | 0.030706 | 0.128457 | 1,121 | 22 | 121 | 50.954545 | 0.823951 | 0.119536 | 0 | 0 | 0 | 0 | 0.075203 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
a4bef0301771c959e6bdd061b645fed4cecf3b2b | 1,409 | py | Python | scripts/nodes/filter/sensor.py | AutonomousFieldRoboticsLab/jetyak_uav_utils | 7926df2cf34b0be2647b62896c98af82ca6f1e53 | [
"MIT"
] | null | null | null | scripts/nodes/filter/sensor.py | AutonomousFieldRoboticsLab/jetyak_uav_utils | 7926df2cf34b0be2647b62896c98af82ca6f1e53 | [
"MIT"
] | null | null | null | scripts/nodes/filter/sensor.py | AutonomousFieldRoboticsLab/jetyak_uav_utils | 7926df2cf34b0be2647b62896c98af82ca6f1e53 | [
"MIT"
] | 1 | 2021-12-09T01:30:58.000Z | 2021-12-09T01:30:58.000Z | import numpy as np
class Sensor:
"""
Sensor class
Handles the sensor data and measurements
Updates R matrix using the residuals
"""
def __init__(self, ID, Rnom, H, N, chiCritical):
self.ID = ID
self.R = Rnom
self.H = H
self.N = N
self.Rnom = Rnom
self.chiC = chiCritical
self.residuals = None
self.Z = None
def setZ(self, measurement):
self.Z = measurement
def getID(self):
return self.ID
def getZ(self):
return self.Z
def getChi(self):
return self.chiC
def getR(self):
return self.R
def getH(self):
return self.H
def updateR(self, r, P):
if (r is not None) and (P is not None):
if self.residuals is None:
self.residuals = np.transpose(r)
elif self.residuals.shape[0] < self.N:
self.residuals = np.concatenate((self.residuals, np.transpose(r)), axis = 0)
else:
self.residuals = np.concatenate((self.residuals[1:], np.transpose(r)), axis = 0)
if self.residuals.shape[0] == self.N:
res = np.asmatrix(self.residuals)
hatRdiag = np.array(np.diagonal(((res.T * res) / self.N) - P))
for i in range(hatRdiag.shape[0]):
if hatRdiag[i] < self.Rnom[i,i]:
hatRdiag[i] = self.Rnom[i,i]
# if self.ID == 'jgps' or self.ID == 'tag':
# print self.ID
# print "Residuals: ", r
# print "P: ", P
# print "New R: ", np.diag(hatRdiag)
self.R = np.diag(hatRdiag)
| 22.015625 | 84 | 0.612491 | 216 | 1,409 | 3.976852 | 0.296296 | 0.151339 | 0.08149 | 0.055879 | 0.259604 | 0.19092 | 0 | 0 | 0 | 0 | 0 | 0.005644 | 0.245564 | 1,409 | 63 | 85 | 22.365079 | 0.802446 | 0.159688 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0 | 0.026316 | 0.131579 | 0.394737 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
a4cfb8f9c948786a719f38a876ae84e9ed8b699e | 559 | py | Python | src/framestructure/__init__.py | FongAnthonyM/python-framestructure | 64a12f4c9e401445dc58263ec9f49621d27127eb | [
"MIT"
] | null | null | null | src/framestructure/__init__.py | FongAnthonyM/python-framestructure | 64a12f4c9e401445dc58263ec9f49621d27127eb | [
"MIT"
] | 18 | 2021-12-16T14:26:49.000Z | 2022-03-28T14:32:28.000Z | src/framestructure/__init__.py | FongAnthonyM/python-framestructure | 64a12f4c9e401445dc58263ec9f49621d27127eb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" __init__.py
framestructure provides several classes for creating structures of dataframes.
"""
# Package Header #
from .__header__ import *
# Header #
__author__ = __author__
__credits__ = __credits__
__maintainer__ = __maintainer__
__email__ = __email__
# Imports #
# Local Packages #
from .dataframeinterface import DataFrameInterface
from .blankdataframe import BlankDataFrame
from .dataframe import DataFrame
from .timeseriesframe import *
from .directorytimeframe import *
from .filetimeframe import *
| 24.304348 | 78 | 0.785331 | 56 | 559 | 7.125 | 0.625 | 0.050125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002066 | 0.134168 | 559 | 22 | 79 | 25.409091 | 0.822314 | 0.323792 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.636364 | 0 | 0.636364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
a4cfcd7aca9677a1f8cb4d104d8e6941f5eefb30 | 277 | py | Python | pyamazon/Helpers/utility.py | Essam3152/test | 9b8a16d35ad4cb4538500d29c7598247ac8992b9 | [
"MIT"
] | 12 | 2021-12-01T15:29:45.000Z | 2022-03-23T01:12:01.000Z | pyamazon/Helpers/utility.py | Essam3152/test | 9b8a16d35ad4cb4538500d29c7598247ac8992b9 | [
"MIT"
] | null | null | null | pyamazon/Helpers/utility.py | Essam3152/test | 9b8a16d35ad4cb4538500d29c7598247ac8992b9 | [
"MIT"
] | 11 | 2021-11-20T17:25:46.000Z | 2021-11-22T10:14:15.000Z | import re
def sort_urls(url_list,reverse=True):
return sorted(url_list, key=lambda k: k['bandwidth'], reverse=reverse)
def name_checker(name):
name = name.replace("'", "")
name = re.findall(r"([\w\d-]+)", name)
return ' '.join([x for x in name])
| 23.083333 | 75 | 0.606498 | 41 | 277 | 4 | 0.634146 | 0.085366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.209386 | 277 | 11 | 76 | 25.181818 | 0.748858 | 0 | 0 | 0 | 0 | 0 | 0.078947 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.142857 | 0.142857 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
a4d7b44cdbc954b024600fd31d36d74375304da1 | 3,811 | py | Python | src/icalendar/caselessdict.py | devlitz/icalendar | f7154d55280c33cc4cf08dbbb78414dd00b5506b | [
"BSD-2-Clause"
] | 3 | 2015-12-30T05:32:55.000Z | 2021-06-15T15:04:49.000Z | src/icalendar/caselessdict.py | devlitz/icalendar | f7154d55280c33cc4cf08dbbb78414dd00b5506b | [
"BSD-2-Clause"
] | null | null | null | src/icalendar/caselessdict.py | devlitz/icalendar | f7154d55280c33cc4cf08dbbb78414dd00b5506b | [
"BSD-2-Clause"
] | 1 | 2017-05-05T02:48:37.000Z | 2017-05-05T02:48:37.000Z | # -*- coding: utf-8 -*-
from icalendar.compat import iteritems
from icalendar.parser_tools import to_unicode
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
def canonsort_keys(keys, canonical_order=None):
"""Sorts leading keys according to canonical_order. Keys not specified in
canonical_order will appear alphabetically at the end.
"""
canonical_map = dict((k, i) for i, k in enumerate(canonical_order or []))
head = [k for k in keys if k in canonical_map]
tail = [k for k in keys if k not in canonical_map]
return sorted(head, key=lambda k: canonical_map[k]) + sorted(tail)
def canonsort_items(dict1, canonical_order=None):
"""Returns a list of items from dict1, sorted by canonical_order.
"""
return [(k, dict1[k]) for \
k in canonsort_keys(dict1.keys(), canonical_order)]
class CaselessDict(OrderedDict):
"""A dictionary that isn't case sensitive, and only uses strings as keys.
Values retain their case.
"""
def __init__(self, *args, **kwargs):
"""Set keys to upper for initial dict.
"""
super(CaselessDict, self).__init__(*args, **kwargs)
for key, value in self.items():
key_upper = to_unicode(key).upper()
if key != key_upper:
super(CaselessDict, self).__delitem__(key)
self[key_upper] = value
def __getitem__(self, key):
key = to_unicode(key)
return super(CaselessDict, self).__getitem__(key.upper())
def __setitem__(self, key, value):
key = to_unicode(key)
super(CaselessDict, self).__setitem__(key.upper(), value)
def __delitem__(self, key):
key = to_unicode(key)
super(CaselessDict, self).__delitem__(key.upper())
def __contains__(self, key):
key = to_unicode(key)
return super(CaselessDict, self).__contains__(key.upper())
def get(self, key, default=None):
key = to_unicode(key)
return super(CaselessDict, self).get(key.upper(), default)
def setdefault(self, key, value=None):
key = to_unicode(key)
return super(CaselessDict, self).setdefault(key.upper(), value)
def pop(self, key, default=None):
key = to_unicode(key)
return super(CaselessDict, self).pop(key.upper(), default)
def popitem(self):
return super(CaselessDict, self).popitem()
def has_key(self, key):
key = to_unicode(key)
return super(CaselessDict, self).__contains__(key.upper())
def update(self, *args, **kwargs):
# Multiple keys where key1.upper() == key2.upper() will be lost.
mappings = list(args) + [kwargs]
for mapping in mappings:
if hasattr(mapping, 'items'):
mapping = iteritems(mapping)
for key, value in mapping:
self[key] = value
def copy(self):
return type(self)(super(CaselessDict, self).copy())
def __repr__(self):
return '%s(%s)' % (type(self).__name__, dict(self))
def __eq__(self, other):
return self is other or dict(self.items()) == dict(other.items())
# A list of keys that must appear first in sorted_keys and sorted_items;
# must be uppercase.
canonical_order = None
def sorted_keys(self):
"""Sorts keys according to the canonical_order for the derived class.
Keys not specified in canonical_order will appear at the end.
"""
return canonsort_keys(self.keys(), self.canonical_order)
def sorted_items(self):
"""Sorts items according to the canonical_order for the derived class.
Items not specified in canonical_order will appear at the end.
"""
return canonsort_items(self, self.canonical_order)
| 34.333333 | 78 | 0.644975 | 492 | 3,811 | 4.788618 | 0.229675 | 0.083192 | 0.106961 | 0.050934 | 0.322156 | 0.304754 | 0.301783 | 0.259338 | 0.239813 | 0.181239 | 0 | 0.002438 | 0.246654 | 3,811 | 110 | 79 | 34.645455 | 0.818182 | 0.202572 | 0 | 0.151515 | 0 | 0 | 0.003725 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | false | 0 | 0.075758 | 0.060606 | 0.590909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
a4faddb7db9d31dc2fe74d8d9ade291833862364 | 1,326 | py | Python | fhq-server-tests/run_tests.py | Sardcaster/fhq-server | 9154363c0e303afdb1489919d844b837767dd879 | [
"MIT"
] | null | null | null | fhq-server-tests/run_tests.py | Sardcaster/fhq-server | 9154363c0e303afdb1489919d844b837767dd879 | [
"MIT"
] | null | null | null | fhq-server-tests/run_tests.py | Sardcaster/fhq-server | 9154363c0e303afdb1489919d844b837767dd879 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import os
import subprocess
import fhqtest
import signal
import time
import traceback
import libtestfhq
import libtestwebserver
import libtestusers
import libtestscoreboard
import libtestpublicevents
import libteststats
import libtestquests
import libtestleaks
import libtestgames
import libtestknowledgebase
import libtestserversettings
tests_passed = False
tests = [
libtestwebserver,
libtestusers,
libtestgames,
libtestquests,
libtestscoreboard,
libtestpublicevents,
libtestleaks,
libtestknowledgebase,
libtestserversettings,
libteststats
]
try:
fhqtest.print_header(" > > > TESTS: begin ")
libtestfhq.start_server()
fhqtest.init_enviroment()
for t in tests:
fhqtest.print_header(" > > > " + t.test_name + ": begin ")
try:
t.run_tests()
except Exception as e:
fhqtest.deinit_enviroment()
fhqtest.throw_err("Some tests wrong")
exit(-1)
fhqtest.print_header(" > > > " + t.test_name + ": end ")
tests_passed = True
finally:
fhqtest.print_header(" > > > TESTS: end ")
if tests_passed:
fhqtest.print_success("All tests passed")
else:
fhqtest.log_err("Some tests failed")
libtestfhq.stop_server() | 23.263158 | 66 | 0.686275 | 134 | 1,326 | 6.664179 | 0.477612 | 0.067189 | 0.080627 | 0.051512 | 0.06047 | 0.06047 | 0 | 0 | 0 | 0 | 0 | 0.00293 | 0.227753 | 1,326 | 57 | 67 | 23.263158 | 0.869141 | 0.032428 | 0 | 0.038462 | 0 | 0 | 0.089704 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.076923 | 0.346154 | 0 | 0.346154 | 0.096154 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
3504302dd6ef49bfee5a7219fb474010306ae444 | 609 | py | Python | Module02/guess_primer.py | biomed-bioinformatics-bootcamp/bmes-t580-2019-coursework-Buisse | e8661fa3d1658cc67001551f21937900c7631b57 | [
"MIT"
] | null | null | null | Module02/guess_primer.py | biomed-bioinformatics-bootcamp/bmes-t580-2019-coursework-Buisse | e8661fa3d1658cc67001551f21937900c7631b57 | [
"MIT"
] | null | null | null | Module02/guess_primer.py | biomed-bioinformatics-bootcamp/bmes-t580-2019-coursework-Buisse | e8661fa3d1658cc67001551f21937900c7631b57 | [
"MIT"
] | null | null | null | import random
print('--------------------------')
print('----------GUESS THAT PRIMER GAME')
print('--------------------------')
goal = random.choice('ACGT')
goal += random.choice('ACGT')
goal += random.choice('ACGT')
goal += random.choice('ACGT')
goal += random.choice('ACGT')
print(goal)
guess = 'NNNN'
name = input('Player what is your name?')
while guess != goal:
guess = input('Guess a 5 bp primer')
misses = 0
for i in range(len(guess)):
if guess [i] != goal[i]:
misses += 1
if misses > 0:
print('Sorry, you guess %i bases wrong. Play again?' %misses)
| 21 | 69 | 0.543514 | 78 | 609 | 4.24359 | 0.461538 | 0.151057 | 0.241692 | 0.302115 | 0.302115 | 0.302115 | 0.302115 | 0.302115 | 0.302115 | 0.302115 | 0 | 0.008264 | 0.205255 | 609 | 28 | 70 | 21.75 | 0.67562 | 0 | 0 | 0.3 | 0 | 0 | 0.321839 | 0.085386 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.05 | 0 | 0.05 | 0.25 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
3505337c3a5e1ae3c2fd1b436d2e77323402910d | 1,008 | py | Python | Python_OOP/Inheritance/demo.py | antonarnaudov/SoftUniProjects | 01cbdce2b350b57240045d1bc3e21d34f9d0351d | [
"MIT"
] | null | null | null | Python_OOP/Inheritance/demo.py | antonarnaudov/SoftUniProjects | 01cbdce2b350b57240045d1bc3e21d34f9d0351d | [
"MIT"
] | null | null | null | Python_OOP/Inheritance/demo.py | antonarnaudov/SoftUniProjects | 01cbdce2b350b57240045d1bc3e21d34f9d0351d | [
"MIT"
] | null | null | null | class Person:
def __init__(self, name, age):
self.name = name
self.age = age
def __repr__(self):
return f'Hello, my name is {self.name} and im {self.age}'
class Student(Person):
def __init__(self, name, age, grade):
super().__init__(name, age)
# super(Student, self).__init__(name, age)
# Person.__init__(self, name, age)
self.grade = grade
def __repr__(self):
return f'{super().__repr__()} Im a {self.__class__.__name__} and my grade is {self.grade}'
class Worker(Person):
def __init__(self, name, age, salary):
super().__init__(name, age)
self.salary = salary
def __repr__(self):
return f'{super().__repr__()} Im a {self.__class__.__name__} and my salary is {self.salary}'
person = Person('Bebe Ivan', 12)
student = Student('Bebe Ivan', 12, 6)
worker = Worker('Tati Ivan', 24, 600)
print(person)
print(student)
print(worker)
print(Worker.mro())
# MRO -> в какъв ред се изпълнява кода
| 25.846154 | 100 | 0.62996 | 140 | 1,008 | 4.078571 | 0.257143 | 0.085814 | 0.084063 | 0.105079 | 0.366025 | 0.294221 | 0.168126 | 0.168126 | 0.168126 | 0.168126 | 0 | 0.01287 | 0.229167 | 1,008 | 38 | 101 | 26.526316 | 0.722008 | 0.109127 | 0 | 0.2 | 0 | 0.08 | 0.263982 | 0.055928 | 0 | 0 | 0 | 0 | 0 | 1 | 0.24 | false | 0 | 0 | 0.12 | 0.48 | 0.16 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
350c45beb746b5466c38b3247bfad2ac874e7157 | 174 | py | Python | AtCoder/ABC/000-159/ABC127_C.py | sireline/PyCode | 8578467710c3c1faa89499f5d732507f5d9a584c | [
"MIT"
] | null | null | null | AtCoder/ABC/000-159/ABC127_C.py | sireline/PyCode | 8578467710c3c1faa89499f5d732507f5d9a584c | [
"MIT"
] | null | null | null | AtCoder/ABC/000-159/ABC127_C.py | sireline/PyCode | 8578467710c3c1faa89499f5d732507f5d9a584c | [
"MIT"
] | null | null | null | N, M = [int(n) for n in input().split()]
L, R = (1, N)
for i in range(M):
l, r = [int(n) for n in input().split()]
L, R = (max(l, L), min(r, R))
print(max(0, R-L+1))
| 24.857143 | 44 | 0.482759 | 41 | 174 | 2.04878 | 0.390244 | 0.142857 | 0.166667 | 0.190476 | 0.52381 | 0.52381 | 0.52381 | 0.52381 | 0.52381 | 0 | 0 | 0.022727 | 0.241379 | 174 | 6 | 45 | 29 | 0.613636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
350d79ec4bea5beff627e5acda4dd571cea02065 | 415 | tac | Python | vmcache/service.tac | belm0/vmprof-server | c26f93b5bfec977da33bc62841251dbb956b7f5a | [
"MIT"
] | 35 | 2015-06-14T06:50:32.000Z | 2022-02-24T22:26:56.000Z | vmcache/service.tac | belm0/vmprof-server | c26f93b5bfec977da33bc62841251dbb956b7f5a | [
"MIT"
] | 31 | 2015-06-25T12:12:35.000Z | 2022-03-17T12:55:30.000Z | vmcache/service.tac | belm0/vmprof-server | c26f93b5bfec977da33bc62841251dbb956b7f5a | [
"MIT"
] | 23 | 2015-08-23T17:17:17.000Z | 2022-03-17T12:56:28.000Z | import sys
from twisted.internet.protocol import Factory
from twisted.internet import endpoints
from twisted.internet import reactor
from vmcache.cache import CacheFactory
from twisted.application import internet, service
factory = CacheFactory()
application = service.Application("cached application data (LRU scheme)")
server = endpoints.serverFromString(reactor, "unix:./cache.socket")
server.listen(factory)
| 29.642857 | 73 | 0.824096 | 49 | 415 | 6.979592 | 0.469388 | 0.128655 | 0.166667 | 0.146199 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098795 | 415 | 13 | 74 | 31.923077 | 0.914439 | 0 | 0 | 0 | 0 | 0 | 0.13253 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.6 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
350eafe1f55f87030928447123886f1cd0feef9d | 3,349 | py | Python | tests/test_relex_snowball.py | cjcourt/cdesnowball | ed08637a06d72d3ef83296bfb686a682df0d4443 | [
"MIT"
] | 13 | 2018-06-25T14:07:07.000Z | 2022-02-16T17:00:11.000Z | tests/test_relex_snowball.py | cjcourt/cdesnowball | ed08637a06d72d3ef83296bfb686a682df0d4443 | [
"MIT"
] | null | null | null | tests/test_relex_snowball.py | cjcourt/cdesnowball | ed08637a06d72d3ef83296bfb686a682df0d4443 | [
"MIT"
] | 4 | 2020-02-26T08:34:26.000Z | 2021-03-18T17:33:36.000Z | # -*- coding: utf-8 -*-
"""
Test relex snowball
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import sys
import logging
import os
import unittest
from chemdataextractor.relex import Snowball, ChemicalRelationship, Relation, Entity
from chemdataextractor.model import BaseModel, StringType, ListType, ModelType, Compound
from chemdataextractor.parse.elements import I, R, Any, OneOrMore, Optional
from chemdataextractor.parse.common import lrb, rrb, delim
from chemdataextractor.parse.actions import join, merge
from chemdataextractor.parse.cem import chemical_name
from chemdataextractor.doc import Sentence
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
# Create a new model
class CurieTemperature(BaseModel):
specifier = StringType()
value = StringType()
units = StringType()
# Add to the available models
Compound.curie_temperatures = ListType(ModelType(CurieTemperature))
# Define a very basic entity tagger
specifier = (I('curie') + I('temperature') + Optional(lrb | delim) + Optional(R('^T(C|c)(urie)?')) + Optional(rrb) | R('^T(C|c)(urie)?'))('specifier').add_action(join)
units = (R('^[CFK]\.?$'))('units').add_action(merge)
value = (R('^\d+(\.\,\d+)?$'))('value')
# Let the entities be any combination of chemical names, specifier values and units
entities = (chemical_name | specifier | value + units)
# Now create a very generic parse phrase that will match any combination of these entities
curie_temperature_phrase = (entities + OneOrMore(entities | Any()))('curie_temperature')
curie_temp_entities = [chemical_name, specifier, value, units]
# Define the relationship and give it a name
curie_temp_relationship = ChemicalRelationship(curie_temp_entities, curie_temperature_phrase, name='curie_temperatures')
# class TestSnowball(unittest.TestCase):
# maxDiff = None
# training_corpus = 'tests/data/relex/curie_training/'
# snowball_pkl = 'tests/data/relex/curie_temperatures.pkl'
# snowball_pkl_py2 = 'tests/data/relex/curie_temperatures_py2.pkl'
# def test_load_snowball(self):
# if sys.version_info[0] == 2:
# sb = Snowball.load(self.snowball_pkl_py2)
# else:
# sb = Snowball.load(self.snowball_pkl)
# self.assertIsInstance(sb, Snowball)
# def test_extract(self):
# if sys.version_info[0] == 2:
# curie_temp_snowball = Snowball.load(self.snowball_pkl_py2)
# else:
# curie_temp_snowball = Snowball.load(self.snowball_pkl)
# curie_temp_snowball.save_file_name = 'curie_test_output'
# test_sentence = Sentence('BiFeO3 is ferromagnetic with a curie temperature of 1103 K and this is very interesting')
# result = curie_temp_snowball.extract(test_sentence)
# self.assertEqual(len(result), 1)
# expected_entities = [Entity('BiFeO3', chemical_name, 0, 1), Entity('curie temperature', specifier, 0,0), Entity('1103', value, 0,0), Entity('K', units, 0,0)]
# expected_relation = Relation(expected_entities, confidence=1.0)
# self.assertEqual(result[0], expected_relation)
# self.assertEqual(result[0].confidence, expected_relation.confidence)
if __name__ == '__main__':
unittest.main()
| 38.056818 | 167 | 0.727381 | 415 | 3,349 | 5.657831 | 0.33494 | 0.062606 | 0.027257 | 0.040886 | 0.153322 | 0.120102 | 0.07368 | 0.037479 | 0 | 0 | 0 | 0.011368 | 0.159451 | 3,349 | 87 | 168 | 38.494253 | 0.822735 | 0.505225 | 0 | 0 | 0 | 0 | 0.081014 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.625 | 0.03125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
3513991b0e2d3d74dd80f852d3355113d00be1be | 450 | py | Python | src/hgbot_infra/fake_MSC.py | huegli/hgbot_infra | 1a3e388c92745f60b3b97fcfed57070f02b7b4a1 | [
"BSD-3-Clause"
] | null | null | null | src/hgbot_infra/fake_MSC.py | huegli/hgbot_infra | 1a3e388c92745f60b3b97fcfed57070f02b7b4a1 | [
"BSD-3-Clause"
] | null | null | null | src/hgbot_infra/fake_MSC.py | huegli/hgbot_infra | 1a3e388c92745f60b3b97fcfed57070f02b7b4a1 | [
"BSD-3-Clause"
] | null | null | null | """Fake Motor Speed Control for use in CI environments w/o real hardware"""
class MotorSpeedControl(object):
def __init__(self):
self._fail_safe = False
self.foundChip = True
pass
def Init(self):
pass
def SetCommsFailsafe(self, state):
self._fail_safe = state
def MotorsOff(self):
pass
def SetMotor1(self, speed):
pass
def SetMotor2(self, speed):
pass
| 16.666667 | 75 | 0.604444 | 53 | 450 | 4.981132 | 0.566038 | 0.106061 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006472 | 0.313333 | 450 | 26 | 76 | 17.307692 | 0.847896 | 0.153333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0.333333 | 0 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
351f1d44429b277ad279e110c88ef06f8d8ba37e | 721 | py | Python | ocd_backend/secrets.default.py | fletcher91/open-raadsinformatie | a1626453e43ffb6c2aef17587f611ad622a8d9a9 | [
"CC-BY-4.0"
] | 1 | 2021-03-16T19:17:30.000Z | 2021-03-16T19:17:30.000Z | ocd_backend/secrets.default.py | fletcher91/open-raadsinformatie | a1626453e43ffb6c2aef17587f611ad622a8d9a9 | [
"CC-BY-4.0"
] | null | null | null | ocd_backend/secrets.default.py | fletcher91/open-raadsinformatie | a1626453e43ffb6c2aef17587f611ad622a8d9a9 | [
"CC-BY-4.0"
] | null | null | null | # This file is used to store password secrets which are not to be committed
# to git. The key has to match one of the sources in the ocd_backend/sources
# directory. If there a secret is required but not supplied in this file, the
# program will fail. A secret like gegevensmagazijn will also match postfixes
# like gegevensmagazijn-moties. The most specific secret is used, so secrets
# like gegevensmagazijn-a will match gegevensmagazijn-a-moties.
#
# Example:
# SECRETS = {
# 'gegevensmagazijn': (
# "some_user",
# "some_password",
# ),
# 'gegevensmagazijn-a': (
# "some_user",
# "some_password",
# ),
# }
SECRETS = {
'<ID>': (
"<USERNAME>",
"<PASSWORD>",
),
}
| 27.730769 | 77 | 0.65742 | 91 | 721 | 5.153846 | 0.527473 | 0.127932 | 0.051173 | 0.085288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.231623 | 721 | 25 | 78 | 28.84 | 0.84657 | 0.837725 | 0 | 0 | 0 | 0 | 0.244898 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.166667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
3534920018d2e437be9d99c660408b8891b56367 | 520 | py | Python | tests/r/test_pneumon.py | hajime9652/observations | 2c8b1ac31025938cb17762e540f2f592e302d5de | [
"Apache-2.0"
] | 199 | 2017-07-24T01:34:27.000Z | 2022-01-29T00:50:55.000Z | tests/r/test_pneumon.py | hajime9652/observations | 2c8b1ac31025938cb17762e540f2f592e302d5de | [
"Apache-2.0"
] | 46 | 2017-09-05T19:27:20.000Z | 2019-01-07T09:47:26.000Z | tests/r/test_pneumon.py | hajime9652/observations | 2c8b1ac31025938cb17762e540f2f592e302d5de | [
"Apache-2.0"
] | 45 | 2017-07-26T00:10:44.000Z | 2022-03-16T20:44:59.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.pneumon import pneumon
def test_pneumon():
"""Test module pneumon.py by downloading
pneumon.csv and testing shape of
extracted data has 3470 rows and 15 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = pneumon(test_path)
try:
assert x_train.shape == (3470, 15)
except:
shutil.rmtree(test_path)
raise()
| 21.666667 | 46 | 0.755769 | 72 | 520 | 5.180556 | 0.569444 | 0.080429 | 0.128686 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028103 | 0.178846 | 520 | 23 | 47 | 22.608696 | 0.845433 | 0.219231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 1 | 0.066667 | false | 0 | 0.466667 | 0 | 0.533333 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
35380d78049f0487bb0c82853fb6090d2f10b02f | 495 | py | Python | source/jwk.py | janmojzis/acmeshell | a972706906263ebbc0cdf3ad205bbe2645937836 | [
"CC0-1.0"
] | 5 | 2018-07-25T14:10:52.000Z | 2021-12-04T21:36:15.000Z | source/jwk.py | janmojzis/acmeshell | a972706906263ebbc0cdf3ad205bbe2645937836 | [
"CC0-1.0"
] | 1 | 2018-06-21T10:21:48.000Z | 2018-06-21T10:21:48.000Z | source/jwk.py | janmojzis/acmeshell | a972706906263ebbc0cdf3ad205bbe2645937836 | [
"CC0-1.0"
] | 1 | 2019-11-25T17:19:09.000Z | 2019-11-25T17:19:09.000Z | from tobase64 import tobase64
from tobytes import tobytes
import hashlib
def jwk(e, n):
"""
Create JSON Web Key from RSA exponent end modulus
"""
return { "e": tobase64(e), "kty": "RSA", "n": tobase64(n) }
def jwkthumb(e, n):
"""
JSON Web Key Thumbprint SHA256 from RSA exponent end modulus
"""
js = '{"e":"%s","kty":"RSA","n":"%s"}' % (tobase64(e), tobase64(n))
return tobase64(hashlib.sha256(tobytes(js)).digest())
| 23.571429 | 75 | 0.565657 | 64 | 495 | 4.375 | 0.390625 | 0.092857 | 0.071429 | 0.128571 | 0.178571 | 0 | 0 | 0 | 0 | 0 | 0 | 0.055402 | 0.270707 | 495 | 20 | 76 | 24.75 | 0.720222 | 0.222222 | 0 | 0 | 0 | 0 | 0.115385 | 0.091716 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.375 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
353e14d9658722579711ef0ef0ac7ca3c4704db5 | 270 | py | Python | intents/helpers/logging.py | dario-chiappetta/dialogflow_agents | ecb03bdce491a3c9d6769816507f3027fd5a60d1 | [
"Apache-2.0"
] | 6 | 2021-06-24T12:22:21.000Z | 2021-07-21T21:06:19.000Z | intents/helpers/logging.py | dario-chiappetta/dialogflow_agents | ecb03bdce491a3c9d6769816507f3027fd5a60d1 | [
"Apache-2.0"
] | 27 | 2021-06-05T10:41:08.000Z | 2021-11-01T17:29:38.000Z | intents/helpers/logging.py | dariowho/intents | ecb03bdce491a3c9d6769816507f3027fd5a60d1 | [
"Apache-2.0"
] | null | null | null | import json
class jsondict(dict):
"""
A dictionary that is serialized to JSON when casted to `str`. This is
convenient in logs.
"""
def __str__(self):
return json.dumps(self, indent=2)
def __repr__(self):
return self.__str__()
| 19.285714 | 73 | 0.625926 | 36 | 270 | 4.361111 | 0.694444 | 0.127389 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005102 | 0.274074 | 270 | 13 | 74 | 20.769231 | 0.795918 | 0.32963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.166667 | 0.333333 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
101e8f19fec5996b4135ab21dd8ee960a387292b | 829 | py | Python | FUNDASTORE/APPS/PRODUCTOS/models.py | GabrielB-07/FundaStore-cgb | b509a9743a651344b32dd7a40ab789f1db48e54b | [
"CC0-1.0"
] | null | null | null | FUNDASTORE/APPS/PRODUCTOS/models.py | GabrielB-07/FundaStore-cgb | b509a9743a651344b32dd7a40ab789f1db48e54b | [
"CC0-1.0"
] | null | null | null | FUNDASTORE/APPS/PRODUCTOS/models.py | GabrielB-07/FundaStore-cgb | b509a9743a651344b32dd7a40ab789f1db48e54b | [
"CC0-1.0"
] | null | null | null | from django.db import models
# Create your models here.
class Categoria(models.Model):
cat_id = models.AutoField(primary_key=True)
cat_nombre = models.CharField(max_length=128)
cat_itbms = models.DecimalField(max_digits=8,decimal_places=2)
def __str__(self):
return self.cat_nombre
class Producto(models.Model):
pro_id = models.AutoField(primary_key=True)
pro_nombre = models.CharField(max_length=128)
pro_precio = models.DecimalField(max_digits=8,decimal_places=2)
pro_stock = models.IntegerField()
pro_descripcion = models.TextField(null=True)
pro_categoria = models.ForeignKey(to=Categoria,on_delete=models.CASCADE,null=True)
pro_imagen = models.ImageField(upload_to='PRODUCTOS',max_length=128,default='default.png')
def __str__(self):
return self.pro_nombre | 37.681818 | 94 | 0.75392 | 114 | 829 | 5.210526 | 0.45614 | 0.045455 | 0.060606 | 0.080808 | 0.424242 | 0.356902 | 0.141414 | 0.141414 | 0 | 0 | 0 | 0.018284 | 0.14234 | 829 | 22 | 95 | 37.681818 | 0.817159 | 0.028951 | 0 | 0.117647 | 0 | 0 | 0.024876 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0 | 0.058824 | 0.117647 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
1020444fe5040aee5d11e10ed482da8714a70dd0 | 84 | py | Python | openbox/utils/fanova/__version__.py | Dee-Why/lite-bo | 804e93b950148fb98b7e52bd56c713edacdb9b6c | [
"BSD-3-Clause"
] | 184 | 2021-06-02T06:35:25.000Z | 2022-03-31T10:33:11.000Z | openbox/utils/fanova/__version__.py | ZongWei-HUST/open-box | 011791aba4e44b20a6544020c73601638886d143 | [
"MIT"
] | 16 | 2021-11-15T11:13:57.000Z | 2022-03-24T12:51:17.000Z | openbox/utils/fanova/__version__.py | ZongWei-HUST/open-box | 011791aba4e44b20a6544020c73601638886d143 | [
"MIT"
] | 24 | 2021-06-18T04:52:57.000Z | 2022-03-30T11:14:03.000Z | # License: MIT
"""
https://github.com/automl/fanova
"""
__version__ = "2.0.20.dev"
| 12 | 32 | 0.642857 | 12 | 84 | 4.166667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054054 | 0.119048 | 84 | 6 | 33 | 14 | 0.621622 | 0.547619 | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
102e0e33927ce030021968e9e39203b71da74267 | 169 | py | Python | Draft/Find all the opposites in the array.py | mwk0408/codewars_solutions | 9b4f502b5f159e68024d494e19a96a226acad5e5 | [
"MIT"
] | 6 | 2020-09-03T09:32:25.000Z | 2020-12-07T04:10:01.000Z | Draft/Find all the opposites in the array.py | mwk0408/codewars_solutions | 9b4f502b5f159e68024d494e19a96a226acad5e5 | [
"MIT"
] | 1 | 2021-12-13T15:30:21.000Z | 2021-12-13T15:30:21.000Z | Draft/Find all the opposites in the array.py | mwk0408/codewars_solutions | 9b4f502b5f159e68024d494e19a96a226acad5e5 | [
"MIT"
] | null | null | null | def find_opposites(seq):
check=set()
res=set()
for i in seq:
if -i in check:
res.add(abs(i))
check.add(i)
return sorted(res) | 21.125 | 27 | 0.514793 | 26 | 169 | 3.307692 | 0.576923 | 0.069767 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.35503 | 169 | 8 | 28 | 21.125 | 0.788991 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
102efa23f602a5c990941d58b8b9fa8d80682c83 | 1,354 | py | Python | utils/import_envs.py | mcx-lab/rl-baselines3-zoo | f89938be3b4d9769d9562e7b3d6d1342461adc5c | [
"MIT"
] | null | null | null | utils/import_envs.py | mcx-lab/rl-baselines3-zoo | f89938be3b4d9769d9562e7b3d6d1342461adc5c | [
"MIT"
] | 19 | 2021-08-23T03:21:10.000Z | 2021-11-26T07:36:42.000Z | utils/import_envs.py | mcx-lab/rl-baselines3-zoo | f89938be3b4d9769d9562e7b3d6d1342461adc5c | [
"MIT"
] | null | null | null | try:
import pybullet_envs # pytype: disable=import-error
except ImportError:
pybullet_envs = None
try:
import highway_env # pytype: disable=import-error
except ImportError:
highway_env = None
try:
import neck_rl # pytype: disable=import-error
except ImportError:
neck_rl = None
try:
import mocca_envs # pytype: disable=import-error
except ImportError:
mocca_envs = None
try:
import custom_envs # pytype: disable=import-error
except ImportError:
custom_envs = None
try:
import gym_donkeycar # pytype: disable=import-error
except ImportError:
gym_donkeycar = None
try:
import panda_gym # pytype: disable=import-error
except ImportError:
panda_gym = None
"""Set up gym interface for simulation environments."""
import gym
from gym.envs.registration import registry, make, spec
def register(env_id, *args, **kvargs):
if env_id in registry.env_specs:
return
else:
return gym.envs.registration.register(env_id, *args, **kvargs)
register(
env_id="A1GymEnv-v0",
entry_point="blind_walking.envs.gym_envs:A1GymEnv",
max_episode_steps=1000,
reward_threshold=1000.0,
)
register(
env_id="A1BlindWalkingBulletEnv-v0",
entry_point="blind_walking.envs.gym_envs:A1BlindWalkingBulletEnv",
max_episode_steps=2000,
reward_threshold=2000.0,
)
| 21.83871 | 70 | 0.727474 | 174 | 1,354 | 5.477011 | 0.316092 | 0.066107 | 0.139559 | 0.176285 | 0.435467 | 0.387198 | 0.21511 | 0.073452 | 0 | 0 | 0 | 0.021739 | 0.184638 | 1,354 | 61 | 71 | 22.196721 | 0.841486 | 0.149188 | 0 | 0.340426 | 0 | 0 | 0.113866 | 0.103765 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021277 | false | 0 | 0.340426 | 0 | 0.404255 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
1033e36427a4a13f830410dceac3133444553fab | 230 | py | Python | Python/Numpy/np-min-and-max.py | ekant1999/HackerRank | 084d4550b4eaf130837ab26a4efdbcaf8b667cdc | [
"MIT"
] | 9 | 2017-03-19T16:27:31.000Z | 2022-02-17T11:42:21.000Z | Python/Numpy/np-min-and-max.py | ekant1999/HackerRank | 084d4550b4eaf130837ab26a4efdbcaf8b667cdc | [
"MIT"
] | null | null | null | Python/Numpy/np-min-and-max.py | ekant1999/HackerRank | 084d4550b4eaf130837ab26a4efdbcaf8b667cdc | [
"MIT"
] | 6 | 2019-02-18T11:26:24.000Z | 2022-03-21T14:13:15.000Z | import numpy
n, m = map(int, raw_input().split())
a = []
for i in range(n):
a += map(int, raw_input().split())
a = numpy.array(a)
a = numpy.reshape(a, (n,m))
a_min = numpy.min(a, axis = 1)
print numpy.max(a_min) | 16.428571 | 37 | 0.573913 | 43 | 230 | 2.976744 | 0.488372 | 0.03125 | 0.140625 | 0.21875 | 0.3125 | 0.3125 | 0 | 0 | 0 | 0 | 0 | 0.005587 | 0.221739 | 230 | 14 | 38 | 16.428571 | 0.709497 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.111111 | null | null | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1061ed7d8a04fbf2ca86dbbc43f9362ac4bebc82 | 1,022 | py | Python | blog/routers/authentication.py | thoeunsopheara/fastapi_crash_course | 5442dcbc552b72d6588db73eb62290d607a641ba | [
"MIT"
] | null | null | null | blog/routers/authentication.py | thoeunsopheara/fastapi_crash_course | 5442dcbc552b72d6588db73eb62290d607a641ba | [
"MIT"
] | null | null | null | blog/routers/authentication.py | thoeunsopheara/fastapi_crash_course | 5442dcbc552b72d6588db73eb62290d607a641ba | [
"MIT"
] | null | null | null |
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from sqlalchemy.orm import Session
from datetime import datetime, timedelta
from .. import schemas, database, models
from ..hashing import Hash
from ..token import create_access_token
get_db = database.get_db
routers = APIRouter(tags=['authentication'])
@routers.post('/login')
def login(request: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_db)):
user = db.query(models.User).filter(models.User.email == request.username).first()
if not user:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail='invalid credential')
if not Hash.verify(request.password, user.password):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
# access_token_expire = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(data={'sub': user.email})
return {'access_token': access_token, 'token_type': 'bearer'}
| 39.307692 | 95 | 0.772016 | 128 | 1,022 | 5.984375 | 0.453125 | 0.100522 | 0.044386 | 0.073107 | 0.099217 | 0.099217 | 0 | 0 | 0 | 0 | 0 | 0.008949 | 0.125245 | 1,022 | 25 | 96 | 40.88 | 0.847875 | 0.066536 | 0 | 0 | 0 | 0 | 0.072555 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0.166667 | 0.388889 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
106a831d0a359c893ad604042389b6a80aad99fd | 2,202 | py | Python | cleanup.py | timfox456/question_generator | 5f8052c99c1d9e7ccca145b554135c382edd14fc | [
"MIT"
] | null | null | null | cleanup.py | timfox456/question_generator | 5f8052c99c1d9e7ccca145b554135c382edd14fc | [
"MIT"
] | null | null | null | cleanup.py | timfox456/question_generator | 5f8052c99c1d9e7ccca145b554135c382edd14fc | [
"MIT"
] | null | null | null | import csv
import re
def cleanup(input_row):
item_type = input_row[18]
for i in input_row:
i = re.sub(r'<a.*?>|</a> ', '', i)
i = re.sub(r'<\/LI><LI>', '\.', i)
i = re.sub(r'<.*?>', '', i)
return_str = ""
return_str += "The product Name is " + input_row[1] + ".\n"
return_str += "The product category is" + input_row[2] + ".\n"
return_str += "The price is " + input_row[3] + ".\n"
return_str += "The average customer rating is " + input_row[4] + ".\n"
for item in input_row[5].split(','):
item = re.sub(r'<a.*?>|</a> ', '', item)
item = re.sub(r'<\/LI><LI>', '\.', item)
item = re.sub(r'<.*?>', '', item)
return_str += item + ".\n"
return_str += "The color is " + input_row[6] + ".\n"
return_str += "The color group is " + input_row[7] + ".\n"
return_str += "The size is " + input_row[8] + ".\n"
return_str += "The gender is " + input_row[9] + ".\n"
return_str += "The item type is " + input_row[11] + ".\n"
for item in input_row[12].split(','):
item = re.sub(r'<a.*?>|</a> ', '', item)
item = re.sub(r'<\/LI><LI>', '\.', item)
item = re.sub(r'<.*?>', '', item)
return_str += "It has " + item + ".\n"
return_str += "The category is " + input_row[13] + ".\n"
return_str += "The condition is " + input_row[14] + ".\n"
for item in input_row[15].split(','):
item = re.sub(r'<a.*?>|</a> ', '', item)
item = re.sub(r'<\/LI><LI>', '\.', item)
item = re.sub(r'<.*?>', '', item)
return_str += "It is " + item + ".\n"
return_str += "The item is designed for " + input_row[16] + ".\n"
return_str += "The item type is " + input_row[18] + ".\n"
return_str += "The item type is " + input_row[19] + ".\n"
return_str += "The fit is " + input_row[20] + ".\n"
input_row[20] = re.sub(r'<\/LI><LI>', '.\n', input_row[20])
input_row[20] = re.sub(r'<.*?>', '', input_row[20])
for item in input_row[21].split('.'):
item = re.sub(r'<\/LI><LI>', '.\n', item)
item = re.sub(r'<.*?>', '', item)
return_str += "It is " + item + ".\n"
return return_str
| 38.631579 | 75 | 0.485922 | 328 | 2,202 | 3.115854 | 0.17378 | 0.203523 | 0.093933 | 0.178082 | 0.548924 | 0.434442 | 0.330724 | 0.330724 | 0.330724 | 0.239726 | 0 | 0.024149 | 0.266576 | 2,202 | 56 | 76 | 39.321429 | 0.608669 | 0 | 0 | 0.26087 | 0 | 0 | 0.228286 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021739 | false | 0 | 0.043478 | 0 | 0.086957 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
10a26a3cfd70fe2c9c23fe0cb12510c2faa0329c | 1,160 | py | Python | apis_core/apis_vis/api_views.py | sviatoplok/apis-core | c23718af2a51598e32684b9b954b594ceef1f0f7 | [
"MIT"
] | 1 | 2019-09-02T09:14:06.000Z | 2019-09-02T09:14:06.000Z | apis_core/apis_vis/api_views.py | sviatoplok/apis-core | c23718af2a51598e32684b9b954b594ceef1f0f7 | [
"MIT"
] | null | null | null | apis_core/apis_vis/api_views.py | sviatoplok/apis-core | c23718af2a51598e32684b9b954b594ceef1f0f7 | [
"MIT"
] | null | null | null | from django.contrib.contenttypes.models import ContentType
from rest_framework.generics import ListAPIView
from .serializers import *
from apis_core.apis_relations.models import PersonInstitution
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
class GetVisJson(ListAPIView):
filter_backends = (DjangoFilterBackend, filters.SearchFilter)
depth = 2
# TODO: add a generic filter thing
def get_serializer(self, instance=None, data=None, many=False, partial=False):
vis = self.request.query_params.get('vis', None)
if vis == 'av-age':
return VisAgeSerializer(self.get_queryset(), many=False)
elif vis == 'avg-relations':
return AvRelations(self.get_queryset(), many=False)
else:
return None
def get_queryset(self, **kwargs):
relation = self.kwargs['relation'].lower()
relation_model = ContentType.objects.get(
app_label='apis_relations', model=relation).model_class()
print("from get_queryset {}".format(relation))
queryset = relation_model.objects.all()
return queryset
| 37.419355 | 82 | 0.706897 | 133 | 1,160 | 6.030075 | 0.481203 | 0.054863 | 0.042394 | 0.047382 | 0.05985 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001076 | 0.199138 | 1,160 | 30 | 83 | 38.666667 | 0.862217 | 0.027586 | 0 | 0 | 0 | 0 | 0.056838 | 0 | 0 | 0 | 0 | 0.033333 | 0 | 1 | 0.083333 | false | 0 | 0.25 | 0 | 0.625 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
10b3c9f6423784a65ff34c0db0d900171ca89cc9 | 321 | py | Python | depricated/update_weights.py | uglyboxer/learn_nums | 8f8f4595b75e0978df54bf7b21f273560de0e119 | [
"Unlicense"
] | null | null | null | depricated/update_weights.py | uglyboxer/learn_nums | 8f8f4595b75e0978df54bf7b21f273560de0e119 | [
"Unlicense"
] | null | null | null | depricated/update_weights.py | uglyboxer/learn_nums | 8f8f4595b75e0978df54bf7b21f273560de0e119 | [
"Unlicense"
] | null | null | null | def update_weights(weights, error, l_rate, vector):
"""Takes in a given weight, error, learning rate, and vector element
Retuns updated weight
"""
return [weight + (elem * l_rate * error) for elem, weight in zip(vector, weights)]
if __name__ == '__main__':
print(update_weights((.1, .1, .2), -6, .1, (.5, .5, .5)))
| 35.666667 | 83 | 0.669782 | 49 | 321 | 4.142857 | 0.591837 | 0.128079 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02974 | 0.161994 | 321 | 8 | 84 | 40.125 | 0.724907 | 0.271028 | 0 | 0 | 0 | 0 | 0.035556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0 | 0.5 | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
10beb94e98eade865dc1a805ab56c4818b645bbf | 621 | py | Python | tests/test_compat.py | sarah-vanderlaan/pyjwt | b65e1ac6dc4d11801f3642eaab34ae6a54162c18 | [
"MIT"
] | 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | tests/test_compat.py | sarah-vanderlaan/pyjwt | b65e1ac6dc4d11801f3642eaab34ae6a54162c18 | [
"MIT"
] | 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | tests/test_compat.py | sarah-vanderlaan/pyjwt | b65e1ac6dc4d11801f3642eaab34ae6a54162c18 | [
"MIT"
] | 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z | from jwt.compat import constant_time_compare
from jwt.utils import force_bytes
class TestCompat:
def test_constant_time_compare_returns_true_if_same(self):
assert constant_time_compare(
force_bytes('abc'), force_bytes('abc')
)
def test_constant_time_compare_returns_false_if_diff_lengths(self):
assert not constant_time_compare(
force_bytes('abc'), force_bytes('abcd')
)
def test_constant_time_compare_returns_false_if_totally_different(self):
assert not constant_time_compare(
force_bytes('abcd'), force_bytes('efgh')
)
| 31.05 | 76 | 0.713366 | 79 | 621 | 5.126582 | 0.367089 | 0.207407 | 0.328395 | 0.140741 | 0.622222 | 0.622222 | 0.540741 | 0.540741 | 0 | 0 | 0 | 0 | 0.214171 | 621 | 19 | 77 | 32.684211 | 0.829918 | 0 | 0 | 0.133333 | 0 | 0 | 0.033816 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.2 | false | 0 | 0.133333 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
52bb79d07a96b79b2e4b1cc343749e3e44d8504a | 205 | py | Python | PYTHON_VERSION/CC_PostProc.py | Cocytee/CamoGen | 750e92e5e9d1467dbd2e5c9fe0ea16d6d4861a92 | [
"Unlicense"
] | 1 | 2021-04-22T05:40:16.000Z | 2021-04-22T05:40:16.000Z | PYTHON_VERSION/CC_Settings.py | Cocytee/CamoGen | 750e92e5e9d1467dbd2e5c9fe0ea16d6d4861a92 | [
"Unlicense"
] | null | null | null | PYTHON_VERSION/CC_Settings.py | Cocytee/CamoGen | 750e92e5e9d1467dbd2e5c9fe0ea16d6d4861a92 | [
"Unlicense"
] | null | null | null | # Camo setting and processign settings
#processing setting
#resolution
#smoothing
#saturation
#noise
#etc..
#camo setting
#Xshift
#Yshift
#etc..
#save settings
#saveloc
#savetype
| 12.8125 | 39 | 0.678049 | 21 | 205 | 6.619048 | 0.761905 | 0.158273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.234146 | 205 | 15 | 40 | 13.666667 | 0.88535 | 0.731707 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
52c88de14e72b0248fe2d1834a044b1a2d3d96c2 | 5,657 | py | Python | CODE/corpus-prep/manage_name_entity.py | pegahani/REUS | 3de7505fcf971333e294c490371d7b429d8fc9d0 | [
"MIT"
] | null | null | null | CODE/corpus-prep/manage_name_entity.py | pegahani/REUS | 3de7505fcf971333e294c490371d7b429d8fc9d0 | [
"MIT"
] | null | null | null | CODE/corpus-prep/manage_name_entity.py | pegahani/REUS | 3de7505fcf971333e294c490371d7b429d8fc9d0 | [
"MIT"
] | null | null | null | import xml.etree.ElementTree as ET
from meeting import meeting
class manage_name_enity(meeting):
def get_entities_type(self):
if self.corpus_type == 'MAN':
schema = "../../AMI/ami_public_manual/ontologies/ne-types.xml"
tree = ET.parse(schema)
#a root for entity types
self.root_entity_types = tree.getroot()
return
def get_entities_roots(self):
file_name = self.get_file_name()
entities_files = []
participants = [i for i in self.get_participants().keys()]
entities_roots = dict.fromkeys(participants)
if self.corpus_type == 'MAN':
for key in participants:
entities_files.append(self.file + "/namedEntities/" + file_name + '.' + key + ".ne.xml")
for item in range(len(participants)):
tree_ = ET.parse(entities_files[item])
entities_roots[participants[item]] = tree_.getroot()
return entities_roots
def find_entity_type(self, entity_id):
"""
this function get the entity id and finds its name in ne-types.xml file
:param entity_id:
:return:
"""
#TODO we can get other information from entity type files. it depends what we need exactly
for child in self.root_entity_types.findall('ne-type'):
if child.attrib['{http://nite.sourceforge.net/}id'] == entity_id:
return child.attrib['name']
else:
for grand_child in child.findall('ne-type'):
if grand_child.attrib['{http://nite.sourceforge.net/}id'] == entity_id:
return grand_child.attrib['name']
else:
for grand_grand_child in grand_child.findall('ne-type'):
if grand_grand_child.attrib['{http://nite.sourceforge.net/}id'] == entity_id:
return grand_grand_child.attrib['name']
else:
for descendant in grand_grand_child.findall('ne-type'):
if descendant.attrib['{http://nite.sourceforge.net/}id'] == entity_id:
return descendant.attrib['name']
return
def get_entity_list(self, speaker):
file = open('./manuel_corpus_name_entities/' + self.meeting + '.' + speaker + '.txt', 'w')
root_entities_speaker = self.get_entities_roots()[speaker]
root_words_speaker = self.get_roots_words()[speaker]
for child in root_entities_speaker:
tempo = child[0].attrib['href']
founded = tempo.find('(')
entity_type = tempo[founded+1:-1]
words_included = child[1].attrib['href']
(start, stop) = self.get_words_interval_for_speaker(words_included)
if stop != None:
file.write(self.get_word_interval(start, stop, root_words_speaker) + ' : ' + self.find_entity_type(entity_type)+ '\n')
else:
file.write(self.get_Word(start, root_words_speaker, speaker) + ' : ' + self.find_entity_type(entity_type) + '\n')
file.close()
return
# ex = manage_name_enity('MAN', 'meet_2')
# ex.initializations()
# ex.get_meeting_list()
#
meeting_list = ['meet_1', 'meet_2', 'meet_3', 'meet_4', 'meet_5', 'meet_6', 'meet_7', 'meet_8', 'meet_10', 'meet_11', 'meet_12', 'meet_13', 'meet_14', 'meet_15', 'meet_16', 'meet_17', 'meet_18', 'meet_19', 'meet_20', 'meet_21', 'meet_22', 'meet_23', 'meet_25', 'meet_26', 'meet_27', 'meet_28', 'meet_29', 'meet_30', 'meet_31', 'meet_32', 'meet_33', 'meet_34', 'meet_35', 'meet_36', 'meet_37', 'meet_38', 'meet_39', 'meet_40', 'meet_41', 'meet_42', 'meet_43', 'meet_44', 'meet_45', 'meet_46', 'meet_47', 'meet_48', 'meet_49', 'meet_50', 'meet_51', 'meet_51b', 'meet_52', 'meet_53', 'meet_54', 'meet_55', 'meet_56', 'meet_57', 'meet_58', 'meet_59', 'meet_60', 'meet_61', 'meet_62', 'meet_63', 'meet_64', 'meet_65', 'meet_66', 'meet_67', 'meet_68', 'meet_69', 'meet_70', 'meet_71', 'meet_72', 'meet_73', 'meet_74', 'meet_75', 'meet_76', 'meet_77', 'meet_78', 'meet_79', 'meet_80', 'meet_81', 'meet_82', 'meet_83', 'meet_84', 'meet_85', 'meet_86', 'meet_87', 'meet_88', 'meet_89', 'meet_90', 'meet_91', 'meet_92', 'meet_93', 'meet_94', 'meet_95', 'meet_96', 'meet_97', 'meet_98', 'meet_99', 'meet_112', 'meet_113', 'meet_114', 'meet_115', 'meet_116', 'meet_117', 'meet_118', 'meet_119', 'meet_120', 'meet_121', 'meet_122', 'meet_123', 'meet_124', 'meet_125', 'meet_126', 'meet_127', 'meet_128', 'meet_129', 'meet_130', 'meet_131', 'meet_132', 'meet_133', 'meet_134', 'meet_135', 'meet_136', 'meet_137', 'meet_138', 'meet_139', 'meet_140', 'meet_141', 'meet_142', 'meet_143', 'meet_144', 'meet_145', 'meet_146', 'meet_147', 'meet_148', 'meet_149', 'meet_150', 'meet_151', 'meet_156', 'meet_157', 'meet_158', 'meet_159', 'meet_160', 'meet_161', 'meet_162', 'meet_163', 'meet_164', 'meet_165', 'meet_166', 'meet_167', 'meet_168', 'meet_169', 'meet_170', 'meet_171', 'meet_172', 'meet_188', 'meet_173', 'meet_174', 'meet_175', 'meet_176', 'meet_177', 'meet_178', 'meet_179', 'meet_180', 'meet_181', 'meet_182', 'meet_183', 'meet_184', 'meet_185', 'meet_186', 'meet_187']
t = meeting_list.index('meet_187')
# print meeting_list[0]
for meeting in meeting_list[t+1:]:
print meeting
ex = manage_name_enity("MAN", meeting)
ex.get_entities_type()
ex.initializations()
ex.get_meeting_list()
ex.get_participants()
ex.get_entities_roots()
for speaker in ex.get_participants().keys():
ex.get_entity_list(speaker) | 52.869159 | 1,962 | 0.610571 | 785 | 5,657 | 4.049682 | 0.351592 | 0.025165 | 0.02202 | 0.018874 | 0.195974 | 0.158855 | 0.089022 | 0.089022 | 0.066373 | 0.052532 | 0 | 0.094881 | 0.223086 | 5,657 | 107 | 1,963 | 52.869159 | 0.628441 | 0.03836 | 0 | 0.136364 | 0 | 0 | 0.298527 | 0.015295 | 0 | 0 | 0 | 0.009346 | 0 | 0 | null | null | 0 | 0.030303 | null | null | 0.015152 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
52cb75a6fb0baab6ecea231a0130accf41e80359 | 768 | py | Python | test/test_odt_brf_synopsis.py | peter88213/yWrestler | 32a152e8b814983ecaef6e9861df1d6b7008568d | [
"MIT"
] | null | null | null | test/test_odt_brf_synopsis.py | peter88213/yWrestler | 32a152e8b814983ecaef6e9861df1d6b7008568d | [
"MIT"
] | null | null | null | test/test_odt_brf_synopsis.py | peter88213/yWrestler | 32a152e8b814983ecaef6e9861df1d6b7008568d | [
"MIT"
] | null | null | null | """Integration tests for the pyWriter project.
Test the odt brief synopsis.
For further information see https://github.com/peter88213/PyWriter
Published under the MIT License (https://opensource.org/licenses/mit-license.php)
"""
from pywriter.odt.odt_brief_synopsis import OdtBriefSynopsis
from pywriter.test.export_test import ExportTest
import unittest
class NrmOpr(ExportTest, unittest.TestCase):
_exportClass = OdtBriefSynopsis
# The test methods must be defined here to identify the source of failure.
def test_yw7_to_exp(self):
super().test_yw7_to_exp()
def test_yw7_to_exp_ui(self):
super().test_yw7_to_exp_ui()
def main():
unittest.main()
if __name__ == '__main__':
main()
| 25.6 | 82 | 0.714844 | 101 | 768 | 5.178218 | 0.534653 | 0.053537 | 0.068834 | 0.091778 | 0.145315 | 0.080306 | 0 | 0 | 0 | 0 | 0 | 0.014658 | 0.200521 | 768 | 29 | 83 | 26.482759 | 0.837134 | 0.386719 | 0 | 0 | 0 | 0 | 0.018391 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.230769 | 0 | 0.615385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
52dd6a557d3d16e969524b0a507af135081d6741 | 1,447 | py | Python | var/spack/repos/builtin/packages/r-fit-models/package.py | varioustoxins/spack | cab0e4cb240f34891a6d753f3393e512f9a99e9a | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/r-fit-models/package.py | varioustoxins/spack | cab0e4cb240f34891a6d753f3393e512f9a99e9a | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6 | 2022-01-08T08:41:11.000Z | 2022-03-14T19:28:07.000Z | var/spack/repos/builtin/packages/r-fit-models/package.py | foeroyingur/spack | 5300cbbb2e569190015c72d0970d25425ea38647 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RFitModels(RPackage):
"""Compare Fitted Models
The fit.models function and its associated methods (coefficients, print,
summary, plot, etc.) were originally provided in the robust package to
compare robustly and classically fitted model objects. See chapters 2, 3,
and 5 in Insightful (2002) 'Robust Library User's Guide'
<https://robust.r-forge.r-project.org/Robust.pdf>). The aim of the
fit.models package is to separate this fitted model object comparison
functionality from the robust package and to extend it to support fitting
methods (e.g., classical, robust, Bayesian, regularized, etc.) more
generally."""
homepage = "https://cloud.r-project.org/package=fit.models"
url = "https://cloud.r-project.org/src/contrib/fit.models_0.5-14.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/fit.models"
version('0.64', sha256='f70806bfa85a05337fa5a665264d640e307584714a07a329fbe96c86b0e864da')
version('0.5-14', sha256='93b9d119e97b36c648a19c891fc5e69f5306eb5b9bac16bf377555057afd4b6e')
version('0.5-13', sha256='7df545fce135159e9abf0a19076628d3ec2999e89f018e142a7a970428823d48')
depends_on('r-lattice', type=('build', 'run'))
| 46.677419 | 96 | 0.748445 | 186 | 1,447 | 5.806452 | 0.607527 | 0.041667 | 0.040741 | 0.05 | 0.082407 | 0.062963 | 0.062963 | 0.062963 | 0 | 0 | 0 | 0.137429 | 0.145128 | 1,447 | 30 | 97 | 48.233333 | 0.735651 | 0.536973 | 0 | 0 | 0 | 0.111111 | 0.6288 | 0.3072 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
52eddd7473758b1d8a62f5b79b517a892bcf0980 | 7,797 | py | Python | tests/utils/test_el_utils.py | adolnik/oozie-to-airflow | e4eed9098fb91c234a2c9c6505ca84a54e6a9e32 | [
"Apache-2.0"
] | 61 | 2019-05-23T14:41:53.000Z | 2022-03-03T11:33:38.000Z | tests/utils/test_el_utils.py | adolnik/oozie-to-airflow | e4eed9098fb91c234a2c9c6505ca84a54e6a9e32 | [
"Apache-2.0"
] | 505 | 2019-05-20T15:21:09.000Z | 2022-03-01T23:10:31.000Z | tests/utils/test_el_utils.py | adolnik/oozie-to-airflow | e4eed9098fb91c234a2c9c6505ca84a54e6a9e32 | [
"Apache-2.0"
] | 33 | 2019-05-23T01:30:47.000Z | 2022-03-28T10:25:09.000Z | # -*- coding: utf-8 -*-
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests EL utils"""
import tempfile
import unittest
import unittest.mock
from parameterized import parameterized
from o2a.converter.exceptions import ParseException
from o2a.utils import el_utils
from o2a.utils.el_utils import normalize_path, escape_string_with_python_escapes, replace_url_el
# pylint: disable=too-many-public-methods
from o2a.o2a_libs.property_utils import PropertySet
class TestELUtils(unittest.TestCase):
def test_parse_els_no_file(self):
expected_properties = {}
props = PropertySet(job_properties={"key": "value"}, config={}, action_node_properties={})
self.assertEqual(expected_properties, el_utils.extract_evaluate_properties(None, props=props))
def test_parse_els_file(self):
prop_file = tempfile.NamedTemporaryFile("w", delete=False)
prop_file.write("#comment\n" "key=value")
prop_file.close()
job_properties = {"test": "answer"}
props = PropertySet(job_properties=job_properties, config={}, action_node_properties={})
expected = {"key": "value"}
self.assertEqual(expected, el_utils.extract_evaluate_properties(prop_file.name, props=props))
def test_parse_els_file_list(self):
# Should remain unchanged, as the conversion from a comma-separated string to a List will
# occur before writing to file.
prop_file = tempfile.NamedTemporaryFile("w", delete=False)
prop_file.write("#comment\n" "key=value,value2,${test}")
prop_file.close()
job_properties = {"test": "answer"}
props = PropertySet(config={}, job_properties=job_properties, action_node_properties={})
expected = {"key": "value,value2,answer"}
self.assertEqual(expected, el_utils.extract_evaluate_properties(prop_file.name, props=props))
def test_parse_els_multiple_line_with_back_references(self):
# Should remain unchanged, as the conversion from a comma-separated string to a List will
# occur before writing to file.
prop_file = tempfile.NamedTemporaryFile("w", delete=False)
prop_file.write(
"""
#comment
key=value,value2,${test}
key2=value
key3=refer${key2}
key4=refer${key5}
key5=test
"""
)
prop_file.close()
job_properties = {"test": "answer"}
props = PropertySet(config={}, job_properties=job_properties, action_node_properties={})
expected = {
"key": "value,value2,answer",
"key2": "value",
"key3": "refervalue",
"key4": "refer${key5}", # no forward-references
"key5": "test",
}
self.assertEqual(expected, el_utils.extract_evaluate_properties(prop_file.name, props=props))
@parameterized.expand(
[
("${nameNode}/examples/output-data/demo/pig-node", "/examples/output-data/demo/pig-node"),
("${nameNode}/examples/output-data/demo/pig-node2", "/examples/output-data/demo/pig-node2"),
("hdfs:///examples/output-data/demo/pig-node2", "/examples/output-data/demo/pig-node2"),
]
)
def test_normalize_path_green_path(self, oozie_path, expected_result):
cluster = "my-cluster"
region = "europe-west3"
job_properties = {"nameNode": "hdfs://localhost:8020"}
config = {"dataproc_cluster": cluster, "gcp_region": region}
result = normalize_path(oozie_path, props=PropertySet(config=config, job_properties=job_properties))
self.assertEqual(expected_result, result)
@parameterized.expand(
[
("${nameNode}/examples/output-data/demo/pig-node", "/examples/output-data/demo/pig-node"),
("${nameNode}/examples/output-data/demo/pig-node2", "/examples/output-data/demo/pig-node2"),
("hdfs:///examples/output-data/demo/pig-node2", "/examples/output-data/demo/pig-node2"),
("/examples/output-data/demo/pig-node", "/examples/output-data/demo/pig-node"),
]
)
def test_normalize_path_with_allow_no_schema(self, oozie_path, expected_result):
cluster = "my-cluster"
region = "europe-west3"
job_properties = {"nameNode": "hdfs://localhost:8020"}
config = {"dataproc_cluster": cluster, "gcp_region": region}
result = normalize_path(
oozie_path, props=PropertySet(config=config, job_properties=job_properties), allow_no_schema=True
)
self.assertEqual(expected_result, result)
@parameterized.expand(
[
("${nameNode_1}/examples/output-data/demo/pig-node",),
("/examples/output-data/demo/pig-node",),
("http:///examples/output-data/demo/pig-node2",),
]
)
def test_normalize_path_red_path(self, oozie_path):
cluster = "my-cluster"
region = "europe-west3"
job_properties = {"nameNode": "hdfs://localhost:8020"}
config = {"dataproc_cluster": cluster, "gcp_region": region}
with self.assertRaisesRegex(ParseException, "Unknown path format. "):
normalize_path(oozie_path, props=PropertySet(config=config, job_properties=job_properties))
@parameterized.expand(
[("http:///examples/output-data/demo/pig-node2",), ("ftp:///examples/output-data/demo/pig-node2",)]
)
def test_normalize_path_red_path_allowed_no_schema(self, oozie_path):
cluster = "my-cluster"
region = "europe-west3"
job_properties = {"nameNode": "hdfs://localhost:8020"}
config = {"dataproc_cluster": cluster, "gcp_region": region}
with self.assertRaisesRegex(ParseException, "Unknown path format. "):
normalize_path(
oozie_path,
props=PropertySet(config=config, job_properties=job_properties),
allow_no_schema=True,
)
@parameterized.expand(
[
(
"${nameNode}/examples/output-data/demo/pig-node",
"{{nameNode}}/examples/output-data/demo/pig-node",
),
(
"${nameNode}/examples/output-data/demo/pig-node2",
"{{nameNode}}/examples/output-data/demo/pig-node2",
),
("hdfs:///examples/output-data/demo/pig-node2", "hdfs:///examples/output-data/demo/pig-node2"),
]
)
def test_replace_url_el_green_path(self, oozie_url, expected_result):
cluster = "my-cluster"
region = "europe-west3"
job_properties = {"nameNode": "hdfs://localhost:8020"}
config = {"dataproc_cluster": cluster, "gcp_region": region}
result = replace_url_el(oozie_url, props=PropertySet(config=config, job_properties=job_properties))
self.assertEqual(expected_result, result)
@parameterized.expand(
[
("test", "'test'"),
("ą", "'\\xc4\\x85'"),
("'", "'\\''"),
(
"This string is \" replaced with 'Escaped one'",
"'This string is \" replaced with \\'Escaped one\\''",
),
('"', "'\"'"),
]
)
def test_escape_python_string(self, input_string, expected_string):
self.assertEqual(expected_string, escape_string_with_python_escapes(input_string))
| 42.606557 | 109 | 0.647685 | 889 | 7,797 | 5.498313 | 0.212598 | 0.066489 | 0.092062 | 0.11252 | 0.725655 | 0.703969 | 0.696604 | 0.66653 | 0.652414 | 0.637889 | 0 | 0.011981 | 0.218546 | 7,797 | 182 | 110 | 42.840659 | 0.790251 | 0.112736 | 0 | 0.394161 | 0 | 0 | 0.257542 | 0.159529 | 0 | 0 | 0 | 0 | 0.072993 | 1 | 0.072993 | false | 0 | 0.058394 | 0 | 0.138686 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
52f5fd554efe1d9dc4814f2a802926ad49c6dc96 | 284 | py | Python | style_transfer/layer.py | haonguyen1107/style_transfer | 8df9b20ce8ebc446cf2c0a67393001b3cf318fed | [
"MIT"
] | null | null | null | style_transfer/layer.py | haonguyen1107/style_transfer | 8df9b20ce8ebc446cf2c0a67393001b3cf318fed | [
"MIT"
] | 6 | 2021-05-21T16:38:24.000Z | 2022-02-10T02:01:14.000Z | style_transfer/layer.py | haonguyen1107/style_transfer | 8df9b20ce8ebc446cf2c0a67393001b3cf318fed | [
"MIT"
] | null | null | null | import numpy as np
import tensorflow as tf
def upsample_nearest(inputs, scale):
shape = tf.shape(input=inputs)
n, h, w, c = shape[0], shape[1], shape[2], shape[3]
return tf.image.resize(inputs, tf.stack([h*scale, w*scale]), method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) | 35.5 | 111 | 0.707746 | 47 | 284 | 4.234043 | 0.595745 | 0.070352 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016461 | 0.144366 | 284 | 8 | 111 | 35.5 | 0.802469 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
52f6af1720ba0dec0d1ab65abeda14c9c6638dc8 | 4,827 | py | Python | securedblog/settings.py | Drop-G/SecuredBlog | 7ebe7da91bb744486ccf48b7d86ff7603669c4db | [
"MIT"
] | null | null | null | securedblog/settings.py | Drop-G/SecuredBlog | 7ebe7da91bb744486ccf48b7d86ff7603669c4db | [
"MIT"
] | null | null | null | securedblog/settings.py | Drop-G/SecuredBlog | 7ebe7da91bb744486ccf48b7d86ff7603669c4db | [
"MIT"
] | null | null | null | """
Django settings for securedblog project.
Generated by 'django-admin startproject' using Django 3.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import django_heroku
import os
import environ
env = environ.Env(
# set casting, default value
DEBUG=(bool, False)
)
# reading .env file
environ.Env.read_env()
# False if not in os.environ
DEBUG = env('DEBUG')
# Raises django's ImproperlyConfigured exception if SECRET_KEY not in os.environ
SECRET_KEY = env('SECRET_KEY')
# Parse database connection url strings like psql://user:pass@127.0.0.1:8458/db
DATABASES = {
# read os.environ['DATABASE_URL'] and raises ImproperlyConfigured exception if not found
'default': env.db(),
# read os.environ['SQLITE_URL']
'extra': env.db('SQLITE_URL', default='sqlite:////tmp/my-tmp-sqlite.db')
}
CACHES = {
# read os.environ['CACHE_URL'] and raises ImproperlyConfigured exception if not found
'default': env.cache(),
# read os.environ['REDIS_URL']
'redis': env.cache('REDIS_URL')
}
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["secured-blog.herokuapp.com"]
# Application definition
INSTALLED_APPS = [
'blog.apps.BlogConfig',
'users.apps.UsersConfig',
'crispy_forms',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'fontawesome-free',
'storages',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'securedblog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'securedblog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR,'staticfiles')
CRISPY_TEMPLATE_PACK = 'bootstrap4'
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
MEDIA_URL = '/media/'
LOGIN_REDIRECT_URL = 'security-home'
LOGIN_URL = 'login'
# AWS_ACCESS_KEY_ID = env('AWS_ACCESS_KEY_ID')
# AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY')
# AWS_STORAGE_BUCKET_NAME = env('AWS_STORAGE_BUCKET_NAME')
# AWS_DEFAULT_ACL = 'public-read'
# AWS_S3_FILE_OVERWRITE = False
# AWS_DEFAULT_ACL = None
# DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# AWS_S3_REGION_NAME = 'us-east-2'
# AWS_S3_SIGNATURE_VERSION = 's3v4'
# AWS_QUERYSTRING_AUTH=False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_HOST_USER = env('EMAIL_USER')
EMAIL_HOST_PASSWORD = env('EMAIL_PASS')
django_heroku.settings(locals()) | 26.377049 | 92 | 0.712036 | 585 | 4,827 | 5.707692 | 0.37094 | 0.058401 | 0.046122 | 0.052411 | 0.175801 | 0.175801 | 0.116502 | 0.116502 | 0.072477 | 0.036538 | 0 | 0.012039 | 0.156826 | 4,827 | 183 | 93 | 26.377049 | 0.808354 | 0.368345 | 0 | 0.020202 | 1 | 0 | 0.483882 | 0.365238 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.060606 | 0.040404 | 0 | 0.040404 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
5e000b0b38aee109135c65234832568d1fd44b1b | 518 | py | Python | estampes/tools/__init__.py | jbloino/estampes | 39d248cba3541f684c4684661a1664fc42157bdb | [
"MIT"
] | null | null | null | estampes/tools/__init__.py | jbloino/estampes | 39d248cba3541f684c4684661a1664fc42157bdb | [
"MIT"
] | 2 | 2021-06-07T12:51:35.000Z | 2021-07-14T17:14:24.000Z | estampes/tools/__init__.py | jbloino/estampes | 39d248cba3541f684c4684661a1664fc42157bdb | [
"MIT"
] | null | null | null | """Module with different tools to facilitate data operations in ESTAMPES
This module provides different simple tools, by submodules to facilitate
common operations like conversions, transformations...
The tools are gathered by submodules based on their intended use:
`atom`
Atom-specific tools.
`comp`
Related to basic software/computing operations (ex: mem. units).
`math`
Simple math functions.
`vib`
Related to vibrations, for instance eigenvectors orientations.
See submodules for details.
"""
| 30.470588 | 72 | 0.774131 | 66 | 518 | 6.075758 | 0.712121 | 0.05985 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.164093 | 518 | 16 | 73 | 32.375 | 0.926097 | 0.984556 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5e110e3b6bd0ef87b51a828ee6028ea35db35d83 | 468 | py | Python | visualDet3D/networks/detectors/yolomono3d_core.py | jovialio/visualDet3D | 5a54e4547576e4eb23d768746b2bdb2b4f25753f | [
"Apache-2.0"
] | 250 | 2021-02-02T02:23:18.000Z | 2022-03-31T11:00:10.000Z | visualDet3D/networks/detectors/yolomono3d_core.py | jovialio/visualDet3D | 5a54e4547576e4eb23d768746b2bdb2b4f25753f | [
"Apache-2.0"
] | 56 | 2021-02-03T08:32:11.000Z | 2022-03-30T01:41:46.000Z | visualDet3D/networks/detectors/yolomono3d_core.py | jovialio/visualDet3D | 5a54e4547576e4eb23d768746b2bdb2b4f25753f | [
"Apache-2.0"
] | 45 | 2021-02-25T02:01:15.000Z | 2022-03-03T10:32:58.000Z | import numpy as np
import torch.nn as nn
import torch
import math
import time
from visualDet3D.networks.backbones import resnet
class YoloMono3DCore(nn.Module):
"""Some Information about YoloMono3DCore"""
def __init__(self, backbone_arguments=dict()):
super(YoloMono3DCore, self).__init__()
self.backbone =resnet(**backbone_arguments)
def forward(self, x):
x = self.backbone(x['image'])
x = x[0]
return x
| 24.631579 | 51 | 0.675214 | 58 | 468 | 5.275862 | 0.534483 | 0.117647 | 0.104575 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013774 | 0.224359 | 468 | 18 | 52 | 26 | 0.829201 | 0.07906 | 0 | 0 | 0 | 0 | 0.011765 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.428571 | 0 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
5e260803c3c6580bff0c9245566a0a51c9544936 | 1,302 | py | Python | LeetCode/Problems/667_beautiful_arrangement.py | hooyao/LeetCode-Py3 | f462b66ae849f4332a4b150f206dd49c7519e83b | [
"MIT"
] | null | null | null | LeetCode/Problems/667_beautiful_arrangement.py | hooyao/LeetCode-Py3 | f462b66ae849f4332a4b150f206dd49c7519e83b | [
"MIT"
] | null | null | null | LeetCode/Problems/667_beautiful_arrangement.py | hooyao/LeetCode-Py3 | f462b66ae849f4332a4b150f206dd49c7519e83b | [
"MIT"
] | null | null | null | import sys
class Solution:
def constructArray(self, n, k):
"""
:type n: int
:type k: int
:rtype: List[int]
"""
result = []
k_left = k
i = 1
j = n
idx = 0
while i <= j:
if k == 1:
result.append(i)
i += 1
else:
if idx == 0:
result.append(i)
i += 1
else:
if k_left > 1:
if idx % 2 == 1:
result.append(j)
j -= 1
k_left -= 1
else:
result.append(i)
i += 1
k_left -= 1
else:
last = result[-1]
if last > j:
result.append(j)
j -= 1
else:
result.append(i)
i += 1
idx += 1
return result
def main(*args):
solution = Solution()
print(solution.constructArray(6, 3))
if __name__ == '__main__':
main(*sys.argv[1:])
| 24.111111 | 44 | 0.284946 | 113 | 1,302 | 3.176991 | 0.300885 | 0.200557 | 0.144847 | 0.155989 | 0.350975 | 0.228412 | 0.228412 | 0 | 0 | 0 | 0 | 0.041237 | 0.627496 | 1,302 | 53 | 45 | 24.566038 | 0.698969 | 0.033026 | 0 | 0.463415 | 0 | 0 | 0.006563 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04878 | false | 0 | 0.02439 | 0 | 0.121951 | 0.02439 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
eaae66bedd922a4e82d4370e92ec3578413a7199 | 478 | py | Python | querybook/server/lib/engine_status_checker/null_checker.py | darapuk/querybook | 5ff8ab9d093505b1c7474f3ad5dbc48da8ae7bed | [
"Apache-2.0"
] | null | null | null | querybook/server/lib/engine_status_checker/null_checker.py | darapuk/querybook | 5ff8ab9d093505b1c7474f3ad5dbc48da8ae7bed | [
"Apache-2.0"
] | null | null | null | querybook/server/lib/engine_status_checker/null_checker.py | darapuk/querybook | 5ff8ab9d093505b1c7474f3ad5dbc48da8ae7bed | [
"Apache-2.0"
] | null | null | null | from .base_checker import BaseEngineStatusChecker, EngineStatus
from const.query_execution import QueryEngineStatus
class NullChecker(BaseEngineStatusChecker):
@classmethod
def NAME(cls) -> str:
return "NullChecker"
@classmethod
def check(cls, engine_id: int, uid: int) -> EngineStatus:
"""Perform the check
Override if you want custom results
"""
return {"status": QueryEngineStatus.UNAVAILABLE.value, "messages": []}
| 29.875 | 78 | 0.698745 | 47 | 478 | 7.042553 | 0.744681 | 0.084592 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.211297 | 478 | 15 | 79 | 31.866667 | 0.877984 | 0.110879 | 0 | 0.222222 | 0 | 0 | 0.062189 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.222222 | 0.111111 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
eabed358854f4e132cc5dcdc9eb98532c1a9b517 | 528 | py | Python | tests/example_wf/test_handler_python_version.py | MacHu-GWU/afwf-project | 09ba89da9ec03c125031bb7cc01bcdf0042d3408 | [
"MIT"
] | null | null | null | tests/example_wf/test_handler_python_version.py | MacHu-GWU/afwf-project | 09ba89da9ec03c125031bb7cc01bcdf0042d3408 | [
"MIT"
] | null | null | null | tests/example_wf/test_handler_python_version.py | MacHu-GWU/afwf-project | 09ba89da9ec03c125031bb7cc01bcdf0042d3408 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import pytest
from afwf.example_wf.handlers.python_version import handler
class TestHandler:
def test_get_all_python_version(self):
versions = handler.get_all_python_version()
assert "2.7.8" in versions
assert "3.6.15" in versions
assert "3.7.13" in versions
assert "3.8.13" in versions
assert "3.9.9" in versions
if __name__ == "__main__":
import os
basename = os.path.basename(__file__)
pytest.main([basename, "-s", "--tb=native"])
| 24 | 59 | 0.649621 | 75 | 528 | 4.293333 | 0.533333 | 0.15528 | 0.198758 | 0.21118 | 0.118012 | 0 | 0 | 0 | 0 | 0 | 0 | 0.046569 | 0.227273 | 528 | 21 | 60 | 25.142857 | 0.742647 | 0.039773 | 0 | 0 | 0 | 0 | 0.09703 | 0 | 0 | 0 | 0 | 0 | 0.357143 | 1 | 0.071429 | false | 0 | 0.214286 | 0 | 0.357143 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
eabf94fdd828f484110a645f4effc81e590226b8 | 6,050 | py | Python | nebula2/data/ResultSet.py | knwng/nebula-python | b37877736eb8cd740110797d7923442e74435b32 | [
"Apache-2.0"
] | null | null | null | nebula2/data/ResultSet.py | knwng/nebula-python | b37877736eb8cd740110797d7923442e74435b32 | [
"Apache-2.0"
] | null | null | null | nebula2/data/ResultSet.py | knwng/nebula-python | b37877736eb8cd740110797d7923442e74435b32 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# --coding:utf-8--
# Copyright (c) 2020 vesoft inc. All rights reserved.
#
# This source code is licensed under Apache 2.0 License,
# attached with Common Clause Condition 1.0, found in the LICENSES directory.
from nebula2.common.ttypes import ErrorCode
from nebula2.data.DataObject import DataSetWrapper
class ResultSet(object):
def __init__(self, resp, all_latency, decode_type='utf-8', timezone_offset: int = 0):
"""Constructor method
:param resp: the response from the service
:param all_latency: the execution time from the time the client
sends the request to the time the response is received and the data is decoded
:param decode_type: the decode_type for decode binary value, it comes from the service,
but now the service not return, use utf-8 default
:param timezone_offset: the timezone offset for calculate local time,
it comes from the service
"""
self._decode_type = decode_type
self._resp = resp
self._data_set_wrapper = None
self._all_latency = all_latency
self._timezone_offset = timezone_offset
if self._resp.data is not None:
self._data_set_wrapper = DataSetWrapper(data_set=resp.data,
decode_type=self._decode_type,
timezone_offset=self._timezone_offset)
def is_succeeded(self):
"""check the response from the sesrvice is succeeded
:return: bool
"""
return self._resp.error_code == ErrorCode.SUCCEEDED
def error_code(self):
"""if the response is failed, the service return the error code
:return: nebula2.common.ttypes.ErrorCode
"""
return self._resp.error_code
def space_name(self):
"""get the space for the current operation
:return: space name or ''
"""
if self._resp.space_name is None:
return ''
return self._resp.space_name.decode(self._decode_type)
def error_msg(self):
"""if the response is failed, the service return the error message
:return: error message
"""
if self._resp.error_msg is None:
return ''
return self._resp.error_msg.decode(self._decode_type)
def comment(self):
"""the comment return by service, it maybe some warning message
:return: comment message
"""
if self._resp.error_msg is None:
return ''
return self._resp.comment.decode(self._decode_type)
def latency(self):
"""the time the server processes the request
:return: latency
"""
return self._resp.latency_in_us
def whole_latency(self):
"""the execution time from the time the client
sends the request to the time the response is received and the data is decoded
:return: all_latency
"""
return self._all_latency
def plan_desc(self):
"""get plan desc, whe user want to get the execute plan use `PROFILE` and `EXPLAIN`
:return:plan desc
"""
return self._resp.plan_desc
def is_empty(self):
"""the data of response is empty
:return: true of false
"""
return self._data_set_wrapper is None or self._data_set_wrapper.get_row_size() == 0
def keys(self):
"""get the column names
:return: column names
"""
if self._data_set_wrapper is None:
return []
return self._data_set_wrapper.get_col_names()
def row_size(self):
"""get the row size
:return: row size
"""
if self._data_set_wrapper is None:
return 0
return len(self._data_set_wrapper.get_rows())
def col_size(self):
"""get column size
:return: column size
"""
if self._data_set_wrapper is None:
return 0
return len(self._data_set_wrapper.get_col_names())
def get_row_types(self):
"""get the value type of the row
:return: list<int>
ttypes.Value.__EMPTY__ = 0
ttypes.Value.NVAL = 1
ttypes.Value.BVAL = 2
ttypes.Value.IVAL = 3
ttypes.Value.FVAL = 4
ttypes.Value.SVAL = 5
ttypes.Value.DVAL = 6
ttypes.Value.TVAL = 7
ttypes.Value.DTVAL = 8
ttypes.Value.VVAL = 9
ttypes.Value.EVAL = 10
ttypes.Value.PVAL = 11
ttypes.Value.LVAL = 12
ttypes.Value.MVAL = 13
ttypes.Value.UVAL = 14
ttypes.Value.GVAL = 15
ttypes.Value.GGVAL = 16
"""
if self._data_set_wrapper is None:
return []
return self._data_set_wrapper.get_row_types()
def row_values(self, row_index):
"""get row values
:param row_index:
:return: list<ValueWrapper>
"""
if self._data_set_wrapper is None:
return []
return self._data_set_wrapper.row_values(row_index)
def column_values(self, key):
"""get column values
:param key: the specified column name
:return: list<ValueWrapper>
"""
if self._data_set_wrapper is None:
return []
return self._data_set_wrapper.column_values(key)
def rows(self):
"""get all rows
:return: list<Row>
"""
if self._data_set_wrapper is None:
return []
return self._data_set_wrapper.get_rows()
def __iter__(self):
"""the iterator for per row
:return: iter
"""
return iter(self._data_set_wrapper)
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self._data_set_wrapper)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 29.086538 | 95 | 0.600496 | 767 | 6,050 | 4.494133 | 0.230769 | 0.042646 | 0.063824 | 0.104439 | 0.343777 | 0.284305 | 0.261967 | 0.257905 | 0.251523 | 0.251523 | 0 | 0.010152 | 0.316198 | 6,050 | 207 | 96 | 29.227053 | 0.82306 | 0.370083 | 0 | 0.253333 | 0 | 0 | 0.003439 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.28 | false | 0 | 0.026667 | 0.026667 | 0.733333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
eaca64e35014042a9f7e5fd5db5aa4ea086467ed | 3,031 | py | Python | tests/managers/test_custom_managers.py | brosner/django-sqlalchemy | 3f427d726acac2469ba2b39c59210bde574591e5 | [
"BSD-3-Clause"
] | 4 | 2015-11-14T13:54:56.000Z | 2020-06-05T15:04:48.000Z | tests/managers/test_custom_managers.py | brosner/django-sqlalchemy | 3f427d726acac2469ba2b39c59210bde574591e5 | [
"BSD-3-Clause"
] | null | null | null | tests/managers/test_custom_managers.py | brosner/django-sqlalchemy | 3f427d726acac2469ba2b39c59210bde574591e5 | [
"BSD-3-Clause"
] | 3 | 2015-11-25T10:56:46.000Z | 2021-07-13T19:19:42.000Z | from django_sqlalchemy.test import *
from django_sqlalchemy.backend import metadata
from django.db import models
# An example of a custom manager called "objects".
class PersonManager(models.Manager):
def get_fun_people(self):
return self.filter(fun=True)
class Person(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
fun = models.BooleanField()
objects = PersonManager()
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
# An example of a custom manager that sets get_query_set().
class PublishedBookManager(models.Manager):
def get_query_set(self):
return super(PublishedBookManager, self).get_query_set().filter(is_published=True)
class Book(models.Model):
title = models.CharField(max_length=50)
author = models.CharField(max_length=30)
is_published = models.BooleanField()
published_objects = PublishedBookManager()
authors = models.ManyToManyField(Person, related_name='books')
def __unicode__(self):
return self.title
# An example of providing multiple custom managers.
class FastCarManager(models.Manager):
def get_query_set(self):
return super(FastCarManager, self).get_query_set().filter(top_speed__gt=150)
class Car(models.Model):
name = models.CharField(max_length=10)
mileage = models.IntegerField()
top_speed = models.IntegerField(help_text="In miles per hour.")
cars = models.Manager()
fast_cars = FastCarManager()
def __unicode__(self):
return self.name
metadata.create_all()
p1 = Person(first_name='Bugs', last_name='Bunny', fun=True)
p1.save()
p2 = Person(first_name='Droopy', last_name='Dog', fun=False)
p2.save()
b1 = Book(title='How to program', author='Rodney Dangerfield', is_published=True)
b1.save()
b2 = Book(title='How to be smart', author='Albert Einstein', is_published=False)
b2.save()
c1 = Car(name='Corvette', mileage=21, top_speed=180)
c1.save()
c2 = Car(name='Neon', mileage=31, top_speed=100)
c2.save()
class TestCustomManager(object):
def setup(self):
pass
def test_should_see_custom_manager_method(self):
assert_list_same([p1], Person.objects.get_fun_people())
def test_should_extend_default_manager(self):
assert_instance_of(PublishedBookManager, p2.books)
@raises(AttributeError)
def test_should_not_contain_a_default_manager_if_custom_provided(self):
Book.objects
def test_should_extend_default_manager_with_related_manager(self):
assert_instance_of(PersonManager, b2.authors)
def test_should_only_return_published_objects(self):
assert_list_same([b1], Book.published_objects.all())
def test_should_order_by(self):
assert_list_same([c1, c2], Car.cars.order_by('name'))
assert_list_same([c1], Car.fast_cars.all())
def test_should_return_default_manager_as_first_manager_in_class(self):
assert_list_same([c1, c2], Car._default_manager.order_by('name'))
| 31.905263 | 90 | 0.734411 | 414 | 3,031 | 5.091787 | 0.311594 | 0.023245 | 0.043169 | 0.056926 | 0.237666 | 0.147059 | 0.063567 | 0.039848 | 0.039848 | 0 | 0 | 0.017167 | 0.154404 | 3,031 | 94 | 91 | 32.244681 | 0.805306 | 0.051468 | 0 | 0.074627 | 0 | 0 | 0.044584 | 0 | 0 | 0 | 0 | 0 | 0.104478 | 1 | 0.208955 | false | 0.014925 | 0.044776 | 0.089552 | 0.656716 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
eace1818ea0e4bfbaf3d5f8eb0e2206012c172db | 258 | py | Python | main.py | Aulig/easee-kostal-control | b14c66e90f1bfaaebfc5db124f861cdc69c6d166 | [
"MIT"
] | null | null | null | main.py | Aulig/easee-kostal-control | b14c66e90f1bfaaebfc5db124f861cdc69c6d166 | [
"MIT"
] | null | null | null | main.py | Aulig/easee-kostal-control | b14c66e90f1bfaaebfc5db124f861cdc69c6d166 | [
"MIT"
] | null | null | null | import asyncio
import platform
import telegram_helper
if platform.system() == "Windows":
# otherwise some "RuntimeError: Event loop is closed" occur
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
telegram_helper.run_bot()
| 23.454545 | 75 | 0.794574 | 30 | 258 | 6.633333 | 0.7 | 0.140704 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.124031 | 258 | 10 | 76 | 25.8 | 0.880531 | 0.22093 | 0 | 0 | 0 | 0 | 0.035176 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
eaceda788ea8f95ceee341a5354da8160c0cf03f | 3,930 | py | Python | mara_page/acl.py | mara/mara-page | 1756505593ed77587312ab06c325265fc87403ab | [
"MIT"
] | 4 | 2018-02-07T13:21:16.000Z | 2020-02-27T09:38:44.000Z | mara_page/acl.py | mara/mara-page | 1756505593ed77587312ab06c325265fc87403ab | [
"MIT"
] | 4 | 2017-05-19T10:22:58.000Z | 2020-10-14T15:29:56.000Z | mara_page/acl.py | mara/mara-page | 1756505593ed77587312ab06c325265fc87403ab | [
"MIT"
] | 2 | 2017-07-27T16:14:57.000Z | 2020-02-06T12:55:55.000Z | """A minimal API for defining ACL protected resources and querying permissions"""
import functools
import typing
import flask
class AclResource:
def __init__(self,
name: str,
rank: int = 1,
children: typing.Optional[typing.List['AclResource']] = None):
"""
A resource that is protected by an acl
Args:
name: An identifier of the resource, must be unique among its parents
rank: How to sort entries within siblings
description: An optional help text
children: A list of sub-entries
"""
self.name = name
self.rank = rank
self.children = []
self.parent = None
if children:
for child in children:
self.add_child(child)
def add_child(self, child: 'AclResource'):
self.children.append(child)
child.set_parent(self)
def set_parent(self, parent: 'AclResource'):
"""Sets the parent of the entry"""
self.parent = parent
def __repr__(self):
return '<AclResource "' + self.name + '">'
def current_user_has_permission(resource: AclResource) -> bool:
"""
Whether the current user is allowed to access a specific resource.
"""
return current_user_has_permissions([resource])[0][1]
def current_user_has_permissions(resources: [AclResource]) -> [[AclResource, bool]]:
"""
Determines whether the currently logged in user has permissions for a list of resources.
Implement actual behavior by patching the function.
"""
return list(map(lambda resource: [resource, True], resources))
def current_user_email():
"""
Returns the email address of the currently logged in user.
Implement actual behavior by patching the function.
"""
return 'guest@localhost'
def user_has_permission(email: str, resource: AclResource) -> bool:
"""
Whether a user is allowed to access a specific resource.
"""
return user_has_permissions(email, [resource])[0][1]
def user_has_permissions(email: str, resources: [AclResource]) -> [[AclResource, bool]]:
"""
Determines whether a user has permissions for a list of resources.
Implement actual behavior by patching the function.
"""
return map(lambda resource: [resource, True], resources)
def require_permission(resource: AclResource, do_abort: bool = True,
abort_message="Sorry, but you don't have enough permissions to view this page.",
return_message="Not enough permissions.") \
-> typing.Callable:
"""
A decorator for protecting a resource by acl
Args:
resource: The resource for which user permissions are required
do_abort: When true, a http exception is raised if the the user does not have permission
(useful when protecting whole pages).
When false, a small error message is returned (useful for ajax handlers).
abort_message: The text of the "permission denied" http exception
return_message: The text of the returned "permission denied" inline content
Returns: The wrapped function
"""
def decorator(f):
def wrapper(*args, **kwargs):
if not current_user_has_permission(resource):
if do_abort:
flask.abort(403, abort_message)
else:
return inline_permission_denied_message(return_message)
else:
return f(*args, **kwargs)
functools.update_wrapper(wrapper, f)
return wrapper
return decorator
def inline_permission_denied_message(message="Not enough permissions"):
"""Returns a an inline html element that signals insufficient permissions"""
return f'<span style="font-style:italic;color:#aaa"><span class="fa fa-lock"> </span> {message}</span>'
| 32.75 | 107 | 0.642494 | 468 | 3,930 | 5.290598 | 0.316239 | 0.025444 | 0.043619 | 0.030291 | 0.260097 | 0.201131 | 0.159128 | 0.12601 | 0.105816 | 0.070275 | 0 | 0.002799 | 0.272774 | 3,930 | 119 | 108 | 33.02521 | 0.863541 | 0.368448 | 0 | 0.04 | 0 | 0.02 | 0.116792 | 0.01851 | 0 | 0 | 0 | 0 | 0 | 1 | 0.26 | false | 0 | 0.06 | 0.02 | 0.56 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
eaeb0aafeda9486cc364b3613b1b2607d5038f02 | 180 | py | Python | lachesis/__init__.py | j1fig/lachesis | ac732db825e5fd411da2ea280401a099e93ce2ce | [
"MIT"
] | null | null | null | lachesis/__init__.py | j1fig/lachesis | ac732db825e5fd411da2ea280401a099e93ce2ce | [
"MIT"
] | null | null | null | lachesis/__init__.py | j1fig/lachesis | ac732db825e5fd411da2ea280401a099e93ce2ce | [
"MIT"
] | null | null | null | from flask import Flask
app = Flask(__name__)
import lachesis.views
from lachesis.models.database import init_db, clear_db
if __name__ == '__main__':
app.run(debug=True)
| 13.846154 | 54 | 0.75 | 26 | 180 | 4.653846 | 0.653846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.161111 | 180 | 12 | 55 | 15 | 0.801325 | 0 | 0 | 0 | 0 | 0 | 0.044444 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
eaffe84271bc67de67a5a8f9f5947f5334e627da | 3,067 | py | Python | rarity_lookup.py | Demonliquid/ero-drop-bot | 6b8358cc5a9bca57e7e3f7c99b80be162cfe85b3 | [
"MIT"
] | null | null | null | rarity_lookup.py | Demonliquid/ero-drop-bot | 6b8358cc5a9bca57e7e3f7c99b80be162cfe85b3 | [
"MIT"
] | null | null | null | rarity_lookup.py | Demonliquid/ero-drop-bot | 6b8358cc5a9bca57e7e3f7c99b80be162cfe85b3 | [
"MIT"
] | null | null | null | # %%
import pandas as pd
import requests
from collections import Counter
# %%
def collection1_add_ids(coll1):
for address in coll1["address"]:
try:
condicion = coll1["address"] == address
coll1.loc[condicion, "coll1_ids"] = ",".join(coll1_add_columns(address))
except:
pass
def collection2_add_ids(coll2):
for address in coll2["address"]:
try:
condicion = coll2["address"] == address
coll2.loc[condicion, "coll2_ids"] = ",".join(coll2_add_columns(address))
except:
pass
def coll1_add_columns(holder_address):
account_nft_balance = get_coll1_NFT_balance(holder_address)
nfts_user_currently_has = get_coll1_nft_ids_holders(account_nft_balance)
return nfts_user_currently_has
def coll2_add_columns(holder_address):
account_nft_balance = get_coll2_NFT_balance(holder_address)
nfts_user_currently_has = get_coll2_nft_ids_holders(account_nft_balance)
return nfts_user_currently_has
def get_coll1_NFT_balance(holder_address):
bscscan_api_key = "T6R9ZY3EPH1D7MRH2X3V8VW6WRK6F4YH2F"
coll1_contract = "0xb0dfe92fc62b48611716dc9fa0d3a2187c1c854d"
url = f"https://api.bscscan.com/api?module=account&action=tokennfttx&contractaddress={coll1_contract}&address={holder_address}&page=1&offset=100&startblock=0&endblock=999999999&sort=asc&apikey={bscscan_api_key}"
response = requests.get(url)
data = response.json()
return data["result"]
def get_coll2_NFT_balance(holder_address):
bscscan_api_key = "T6R9ZY3EPH1D7MRH2X3V8VW6WRK6F4YH2F"
coll2_contract = "0x4502eb52a587d58b339576dbe3b09f96aeb54dd1"
url = f"https://api.bscscan.com/api?module=account&action=tokennfttx&contractaddress={coll2_contract}&address={holder_address}&page=1&offset=100&startblock=0&endblock=999999999&sort=asc&apikey={bscscan_api_key}"
response = requests.get(url)
data = response.json()
return data["result"]
def get_coll1_nft_ids_holders(account_nft_balance):
all_nft_transaction_ids = []
for nft in account_nft_balance:
if nft["tokenSymbol"] == "ECLC":
all_nft_transaction_ids.append(nft["tokenID"])
nft_count = Counter(all_nft_transaction_ids)
nfts_user_currently_has = []
for nft_id in nft_count:
if nft_count[nft_id] % 2 != 0:
nfts_user_currently_has.append(nft_id)
return nfts_user_currently_has
def get_coll2_nft_ids_holders(account_nft_balance):
all_nft_transaction_ids = []
for nft in account_nft_balance:
if nft["tokenSymbol"] == "ECLC2":
all_nft_transaction_ids.append(nft["tokenID"])
nft_count = Counter(all_nft_transaction_ids)
nfts_user_currently_has = []
for nft_id in nft_count:
if nft_count[nft_id] % 2 != 0:
nfts_user_currently_has.append(nft_id)
return nfts_user_currently_has
# %%
coll1 = pd.read_csv(r"collection1_holders.csv")
coll2 = pd.read_csv(r"collection2_holders.csv")
# %%
collection1_add_ids(coll1)
collection2_add_ids(coll2)
# %%
coll1
# %%
| 31.618557 | 215 | 0.727747 | 395 | 3,067 | 5.281013 | 0.202532 | 0.057526 | 0.081496 | 0.095877 | 0.739693 | 0.739693 | 0.701342 | 0.698466 | 0.579099 | 0.534995 | 0 | 0.057087 | 0.171829 | 3,067 | 96 | 216 | 31.947917 | 0.764173 | 0.005543 | 0 | 0.515152 | 0 | 0.030303 | 0.232337 | 0.065067 | 0 | 0 | 0.027604 | 0 | 0 | 1 | 0.121212 | false | 0.030303 | 0.045455 | 0 | 0.257576 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d82f9146a4d781d967c08f8b48f65831718062ad | 235 | py | Python | Numeric Patterns/numericpattern6.py | Daksh777/Python-PatternHouse | ab801631c2e1f5ed3cc12a26c959d41a5e51273d | [
"MIT"
] | 8 | 2021-03-20T11:26:35.000Z | 2022-01-05T02:39:15.000Z | Numeric Patterns/numericpattern6.py | Daksh777/Python-PatternHouse | ab801631c2e1f5ed3cc12a26c959d41a5e51273d | [
"MIT"
] | 851 | 2021-04-02T09:08:15.000Z | 2022-01-12T11:26:57.000Z | Numeric Patterns/numericpattern6.py | Daksh777/Python-PatternHouse | ab801631c2e1f5ed3cc12a26c959d41a5e51273d | [
"MIT"
] | 15 | 2021-04-13T06:10:17.000Z | 2022-01-08T05:07:21.000Z | # Numeric Pattern 6
"""
1 3 5 7 9
11 13 15 17 19
21 23 25 27 29
31 33 35 37 39
41 43 45 47 49
"""
i = 0
nums = list(range(1, 50, 2))
for _ in range(5):
j = i + 5
row = " ".join(map(str, nums[i:j]))
print(row)
i = j
| 11.75 | 39 | 0.523404 | 54 | 235 | 2.259259 | 0.796296 | 0.032787 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.335443 | 0.32766 | 235 | 19 | 40 | 12.368421 | 0.436709 | 0.374468 | 0 | 0 | 0 | 0 | 0.007299 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d8321955bdfea3d14eb1f1c1b9dd4346c9f70150 | 1,137 | py | Python | custom_components/example_load_platform/sensor.py | satori99/example-custom-config | 157f46ff2aca689902d4199a2d0ed23055cec4b6 | [
"Apache-2.0"
] | null | null | null | custom_components/example_load_platform/sensor.py | satori99/example-custom-config | 157f46ff2aca689902d4199a2d0ed23055cec4b6 | [
"Apache-2.0"
] | null | null | null | custom_components/example_load_platform/sensor.py | satori99/example-custom-config | 157f46ff2aca689902d4199a2d0ed23055cec4b6 | [
"Apache-2.0"
] | null | null | null | """Platform for sensor integration."""
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
from . import DOMAIN
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
# We only want this platform to be set up via discovery.
if discovery_info is None:
return
add_entities([ExampleSensor()])
class ExampleSensor(Entity):
"""Representation of a sensor."""
def __init__(self):
"""Initialize the sensor."""
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return 'Example Temperature'
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
def update(self):
"""Fetch new state data for the sensor.
This is the only method that should fetch new data for Home Assistant.
"""
self._state = self.hass.data[DOMAIN]['temperature']
| 25.840909 | 78 | 0.650836 | 140 | 1,137 | 5.171429 | 0.421429 | 0.062155 | 0.053867 | 0.046961 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.24978 | 1,137 | 43 | 79 | 26.44186 | 0.848769 | 0.326297 | 0 | 0.142857 | 0 | 0 | 0.042373 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.142857 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
d8377ddf37c94bba76e26d842cc8f2b8bd0c5ac2 | 8,210 | py | Python | tccli/services/__init__.py | ws0416/tencentcloud-cli | 0a90fa77c8be1efa30b196a3eeb31b8be1f6a325 | [
"Apache-2.0"
] | null | null | null | tccli/services/__init__.py | ws0416/tencentcloud-cli | 0a90fa77c8be1efa30b196a3eeb31b8be1f6a325 | [
"Apache-2.0"
] | null | null | null | tccli/services/__init__.py | ws0416/tencentcloud-cli | 0a90fa77c8be1efa30b196a3eeb31b8be1f6a325 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import imp
def action_caller(service):
cur_path = os.path.dirname(os.path.abspath(__file__))
fp, pathname, desc = imp.find_module(service, [cur_path])
mod = imp.load_module("tccli.services." + service, fp, pathname, desc)
return mod.action_caller
SERVICE_VERSIONS = {
"aa": [
"2020-02-24"
],
"aai": [
"2018-05-22"
],
"af": [
"2020-02-26"
],
"afc": [
"2020-02-26"
],
"ame": [
"2019-09-16"
],
"ams": [
"2020-12-29",
"2020-06-08"
],
"antiddos": [
"2020-03-09"
],
"apcas": [
"2020-11-27"
],
"ape": [
"2020-05-13"
],
"api": [
"2020-11-06"
],
"apigateway": [
"2018-08-08"
],
"asr": [
"2019-06-14"
],
"asw": [
"2020-07-22"
],
"autoscaling": [
"2018-04-19"
],
"ba": [
"2020-07-20"
],
"batch": [
"2017-03-12"
],
"bda": [
"2020-03-24"
],
"billing": [
"2018-07-09"
],
"bizlive": [
"2019-03-13"
],
"bm": [
"2018-04-23"
],
"bmeip": [
"2018-06-25"
],
"bmlb": [
"2018-06-25"
],
"bmvpc": [
"2018-06-25"
],
"bri": [
"2019-03-28"
],
"btoe": [
"2021-05-14",
"2021-03-03"
],
"cam": [
"2019-01-16"
],
"captcha": [
"2019-07-22"
],
"cat": [
"2018-04-09"
],
"cbs": [
"2017-03-12"
],
"ccc": [
"2020-02-10"
],
"cdb": [
"2017-03-20"
],
"cdn": [
"2018-06-06"
],
"cds": [
"2018-04-20"
],
"cfs": [
"2019-07-19"
],
"cfw": [
"2019-09-04"
],
"chdfs": [
"2020-11-12",
"2019-07-18"
],
"cii": [
"2021-04-08",
"2020-12-10"
],
"cim": [
"2019-03-18"
],
"cis": [
"2018-04-08"
],
"ckafka": [
"2019-08-19"
],
"clb": [
"2018-03-17"
],
"cloudaudit": [
"2019-03-19"
],
"cloudhsm": [
"2019-11-12"
],
"cls": [
"2020-10-16"
],
"cme": [
"2019-10-29"
],
"cmq": [
"2019-03-04"
],
"cms": [
"2019-03-21"
],
"cpdp": [
"2019-08-20"
],
"cr": [
"2018-03-21"
],
"cvm": [
"2017-03-12"
],
"cwp": [
"2018-02-28"
],
"cws": [
"2018-03-12"
],
"cynosdb": [
"2019-01-07"
],
"dayu": [
"2018-07-09"
],
"dbbrain": [
"2021-05-27",
"2019-10-16"
],
"dc": [
"2018-04-10"
],
"dcdb": [
"2018-04-11"
],
"dlc": [
"2021-01-25"
],
"dnspod": [
"2021-03-23"
],
"domain": [
"2018-08-08"
],
"drm": [
"2018-11-15"
],
"ds": [
"2018-05-23"
],
"dtf": [
"2020-05-06"
],
"dts": [
"2018-03-30"
],
"ecc": [
"2018-12-13"
],
"ecdn": [
"2019-10-12"
],
"ecm": [
"2019-07-19"
],
"eiam": [
"2021-04-20"
],
"eis": [
"2021-06-01",
"2020-07-15"
],
"emr": [
"2019-01-03"
],
"es": [
"2018-04-16"
],
"facefusion": [
"2018-12-01"
],
"faceid": [
"2018-03-01"
],
"fmu": [
"2019-12-13"
],
"ft": [
"2020-03-04"
],
"gaap": [
"2018-05-29"
],
"gme": [
"2018-07-11"
],
"gpm": [
"2020-08-20"
],
"gs": [
"2019-11-18"
],
"gse": [
"2019-11-12"
],
"habo": [
"2018-12-03"
],
"hcm": [
"2018-11-06"
],
"iai": [
"2020-03-03",
"2018-03-01"
],
"ic": [
"2019-03-07"
],
"ie": [
"2020-03-04"
],
"iir": [
"2020-04-17"
],
"ims": [
"2020-12-29",
"2020-07-13"
],
"iot": [
"2018-01-23"
],
"iotcloud": [
"2021-04-08",
"2018-06-14"
],
"iotexplorer": [
"2019-04-23"
],
"iottid": [
"2019-04-11"
],
"iotvideo": [
"2020-12-15",
"2019-11-26"
],
"iotvideoindustry": [
"2020-12-01"
],
"kms": [
"2019-01-18"
],
"lighthouse": [
"2020-03-24"
],
"live": [
"2018-08-01"
],
"lp": [
"2020-02-24"
],
"mariadb": [
"2017-03-12"
],
"market": [
"2019-10-10"
],
"memcached": [
"2019-03-18"
],
"mgobe": [
"2020-10-14",
"2019-09-29"
],
"mna": [
"2021-01-19"
],
"mongodb": [
"2019-07-25",
"2018-04-08"
],
"monitor": [
"2018-07-24"
],
"mps": [
"2019-06-12"
],
"mrs": [
"2020-09-10"
],
"ms": [
"2018-04-08"
],
"msp": [
"2018-03-19"
],
"mvj": [
"2019-09-26"
],
"nlp": [
"2019-04-08"
],
"npp": [
"2019-08-23"
],
"oceanus": [
"2019-04-22"
],
"ocr": [
"2018-11-19"
],
"organization": [
"2018-12-25"
],
"partners": [
"2018-03-21"
],
"postgres": [
"2017-03-12"
],
"privatedns": [
"2020-10-28"
],
"rce": [
"2020-11-03"
],
"redis": [
"2018-04-12"
],
"rkp": [
"2019-12-09"
],
"rp": [
"2020-02-24"
],
"rum": [
"2021-06-22"
],
"scf": [
"2018-04-16"
],
"ses": [
"2020-10-02"
],
"smpn": [
"2019-08-22"
],
"sms": [
"2021-01-11",
"2019-07-11"
],
"soe": [
"2018-07-24"
],
"solar": [
"2018-10-11"
],
"sqlserver": [
"2018-03-28"
],
"ssa": [
"2018-06-08"
],
"ssl": [
"2019-12-05"
],
"sslpod": [
"2019-06-05"
],
"ssm": [
"2019-09-23"
],
"sts": [
"2018-08-13"
],
"taf": [
"2020-02-10"
],
"tag": [
"2018-08-13"
],
"tat": [
"2020-10-28"
],
"tav": [
"2019-01-18"
],
"tbaas": [
"2018-04-16"
],
"tbm": [
"2018-01-29"
],
"tbp": [
"2019-06-27",
"2019-03-11"
],
"tcaplusdb": [
"2019-08-23"
],
"tcb": [
"2018-06-08"
],
"tcex": [
"2020-07-27"
],
"tci": [
"2019-03-18"
],
"tcr": [
"2019-09-24"
],
"tdmq": [
"2020-02-17"
],
"tem": [
"2021-07-01",
"2020-12-21"
],
"tia": [
"2018-02-26"
],
"tic": [
"2020-11-17"
],
"ticm": [
"2018-11-27"
],
"tics": [
"2018-11-15"
],
"tiems": [
"2019-04-16"
],
"tiia": [
"2019-05-29"
],
"tione": [
"2019-10-22"
],
"tiw": [
"2019-09-19"
],
"tke": [
"2018-05-25"
],
"tkgdq": [
"2019-04-11"
],
"tms": [
"2020-12-29",
"2020-07-13"
],
"tmt": [
"2018-03-21"
],
"trtc": [
"2019-07-22"
],
"tse": [
"2020-12-07"
],
"tsf": [
"2018-03-26"
],
"tsw": [
"2021-04-12",
"2020-09-24"
],
"tts": [
"2019-08-23"
],
"ump": [
"2020-09-18"
],
"vm": [
"2020-12-29",
"2020-07-09"
],
"vms": [
"2020-09-02"
],
"vod": [
"2018-07-17"
],
"vpc": [
"2017-03-12"
],
"waf": [
"2018-01-25"
],
"wav": [
"2021-01-29"
],
"wss": [
"2018-04-26"
],
"youmall": [
"2018-02-28"
],
"yunjing": [
"2018-02-28"
],
"yunsou": [
"2019-11-15",
"2018-05-04"
],
"zj": [
"2019-01-21"
]
} | 14.556738 | 74 | 0.308892 | 810 | 8,210 | 3.117284 | 0.293827 | 0.033267 | 0.01901 | 0.01901 | 0.018218 | 0.012673 | 0 | 0 | 0 | 0 | 0 | 0.352901 | 0.458465 | 8,210 | 564 | 75 | 14.556738 | 0.215025 | 0.002558 | 0 | 0.443649 | 0 | 0 | 0.332071 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001789 | false | 0 | 0.003578 | 0 | 0.007156 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
dc256420565ff5137b20577f0eef257e2096fca7 | 3,096 | py | Python | glue/python/HudiInitLoadNYTaxiData.py | bhavintandel/aws-glue-hudi-cdc | fe82b4ec9f50a8a2c2bd3243a4fc48dfb4f9194b | [
"MIT"
] | null | null | null | glue/python/HudiInitLoadNYTaxiData.py | bhavintandel/aws-glue-hudi-cdc | fe82b4ec9f50a8a2c2bd3243a4fc48dfb4f9194b | [
"MIT"
] | null | null | null | glue/python/HudiInitLoadNYTaxiData.py | bhavintandel/aws-glue-hudi-cdc | fe82b4ec9f50a8a2c2bd3243a4fc48dfb4f9194b | [
"MIT"
] | null | null | null | import sys
from pyspark.context import SparkContext
from pyspark.sql.session import SparkSession
from awsglue.context import GlueContext
from awsglue.job import Job
from awsglue.dynamicframe import DynamicFrame
from pyspark.sql.functions import col, to_timestamp, monotonically_increasing_id, to_date, when
from awsglue.utils import getResolvedOptions
from pyspark.sql.types import *
from datetime import datetime
args = getResolvedOptions(sys.argv, ['JOB_NAME','curated_bucket'])
spark = SparkSession.builder.config('spark.serializer','org.apache.spark.serializer.KryoSerializer').config('spark.sql.hive.convertMetastoreParquet','false').getOrCreate()
sc = spark.sparkContext
glueContext = GlueContext(sc)
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
yellow_tripdata_schema = StructType([ StructField("vendorid",IntegerType(),True), StructField("tpep_pickup_datetime",TimestampType(),True), StructField("tpep_dropoff_datetime",TimestampType(),True), StructField("passenger_count", IntegerType(), True), StructField("trip_distance", DoubleType(), True), StructField("ratecodeid", IntegerType(), True), StructField("store_and_fwd_flag", StringType(), True), StructField("pulocationid", IntegerType(), True), StructField("dolocationid", IntegerType(), True), StructField("payment_type", IntegerType(), True), StructField("fare_amount", DoubleType(), True), StructField("extra", DoubleType(), True), StructField("mta_tax", DoubleType(), True), StructField("tip_amount", DoubleType(), True), StructField("tolls_amount", DoubleType(), True), StructField("improvement_surcharge", DoubleType(), True), StructField("total_amount", DoubleType(), True), StructField("congestion_surcharge", DoubleType(), True), StructField("pk_col", LongType(), True)])
inputDf = spark.read.schema(yellow_tripdata_schema).option("header", "true").csv("s3://nyc-tlc/trip data/yellow_tripdata_{2018,2019,2020}*.csv").withColumn("pk_col",monotonically_increasing_id() + 1)
commonConfig = {'className' : 'org.apache.hudi', 'hoodie.datasource.hive_sync.use_jdbc':'false', 'hoodie.datasource.write.precombine.field': 'tpep_pickup_datetime', 'hoodie.datasource.write.recordkey.field': 'pk_col', 'hoodie.table.name': 'ny_yellow_trip_data', 'hoodie.consistency.check.enabled': 'true', 'hoodie.datasource.hive_sync.database': 'default', 'hoodie.datasource.hive_sync.table': 'ny_yellow_trip_data', 'hoodie.datasource.hive_sync.enable': 'true', 'path': 's3://' + args['curated_bucket'] + '/default/ny_yellow_trip_data'}
unpartitionDataConfig = {'hoodie.datasource.hive_sync.partition_extractor_class': 'org.apache.hudi.hive.NonPartitionedExtractor', 'hoodie.datasource.write.keygenerator.class': 'org.apache.hudi.keygen.NonpartitionedKeyGenerator'}
initLoadConfig = {'hoodie.bulkinsert.shuffle.parallelism': 68, 'hoodie.datasource.write.operation': 'bulk_insert'}
combinedConf = {**commonConfig, **unpartitionDataConfig, **initLoadConfig}
glueContext.write_dynamic_frame.from_options(frame = DynamicFrame.fromDF(inputDf, glueContext, "inputDf"), connection_type = "marketplace.spark", connection_options = combinedConf)
| 110.571429 | 990 | 0.791667 | 352 | 3,096 | 6.789773 | 0.403409 | 0.112971 | 0.094142 | 0.050209 | 0.01841 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005828 | 0.057817 | 3,096 | 27 | 991 | 114.666667 | 0.813507 | 0 | 0 | 0 | 0 | 0 | 0.37823 | 0.226098 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.043478 | 0.434783 | 0 | 0.434783 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
dc2a7e5a9f05bb3e39315ef03bc85b79b991924e | 1,792 | py | Python | 6.0001/ps0/helloworld.py | ramanakshay/MITOCW | 4b6e30c2304e9a25586e12eaa105d09635c4476e | [
"CC-BY-4.0",
"MIT"
] | 3 | 2021-04-26T09:15:04.000Z | 2022-02-23T10:28:35.000Z | 6.0001/ps0/helloworld.py | ramanakshay/MITOCW | 4b6e30c2304e9a25586e12eaa105d09635c4476e | [
"CC-BY-4.0",
"MIT"
] | null | null | null | 6.0001/ps0/helloworld.py | ramanakshay/MITOCW | 4b6e30c2304e9a25586e12eaa105d09635c4476e | [
"CC-BY-4.0",
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Jul 20 09:42:33 2019
@author: aksha
"""
#printing examples
#print("Hello World")
#
#print("The Lion King")
#print ("was released in 1996")
#print("by Disney.")
#
#print("4*8 is", 4*8)
#print("2**2**2 is", 2**2**2)
#print("1+2+3+4+5 is", 1+2+3+4+5)
#input and variables
#print("Halt!")
#user_input = input("Who goes there? ")
#print("You may pass,", user_input)
#
#a=123.4
#b1 = 'spam'
#first_name = 'bill'
#b = 432
#c = a+b
#print('a+b =', c)
#print('first name is', first_name)
#print('Emails have a lot of', b1)
#assignment
#a = 1
#print(a)
#a = a + 1
#print(a)
#a = a*3
#print(a)
#number = float(input("Type in a number: "))
#integer = int(input("Type in an integer: "))
#text = input("Type in a string: ")
#print("Number is", number)
#print("number is a", type(number))
#print("number*2 =", number*2)
#print('integer is', integer)
#print('it is a', type(integer))
#print('integer*2 =', integer*2)
#print(text)
#print(type(text))
#print(text*5)
#F to C
#fahr_temp = float(input("Fahranheit Temperature: "))
#celc_temp = (fahr_temp - 32)*(5/9)
#print("Celcius Temperature: ", celc_temp)
#math
#weightkg = float(input('Enter weight in kilograms: '))
#stonemass = round(weightkg*2.2/14)
#print('you weigh', stonemass,'stones. ')
#variables
#variablesareboxes = 1
#print(variablesareboxes)
#variablesareboxes = 288
#print(variablesareboxes)
#red = 10
#blue = 8
#print(red, blue)
#red = blue
#print(red, blue)
#yellow = red + blue
#print(yellow)
#red = red+3
#print(yellow)
#print(red)
#stringsandvariables
#question = "What did you have for breakfast?"
#print(question)
#answer = input()
#print("nice. you had " + answer) #concatenation
| 20.134831 | 56 | 0.614397 | 268 | 1,792 | 4.078358 | 0.391791 | 0.009149 | 0.030192 | 0.007319 | 0.026532 | 0.017383 | 0 | 0 | 0 | 0 | 0 | 0.048176 | 0.189174 | 1,792 | 88 | 57 | 20.363636 | 0.704061 | 0.853237 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
dc3d3954443643a675f92d6039955453e5e5708f | 356 | py | Python | strongr/secretsdomain/handler/listsecretshandler.py | bigr-erasmusmc/StrongR | 48573e170771a251f629f2d13dba7173f010a38c | [
"Apache-2.0"
] | null | null | null | strongr/secretsdomain/handler/listsecretshandler.py | bigr-erasmusmc/StrongR | 48573e170771a251f629f2d13dba7173f010a38c | [
"Apache-2.0"
] | null | null | null | strongr/secretsdomain/handler/listsecretshandler.py | bigr-erasmusmc/StrongR | 48573e170771a251f629f2d13dba7173f010a38c | [
"Apache-2.0"
] | null | null | null | import strongr.core.gateways
from strongr.secretsdomain.model import Secret
import itertools
class ListSecretsHandler():
def __call__(self, query):
session = strongr.core.gateways.Gateways.sqlalchemy_session()
result = session.query(Secret.key).order_by(Secret.key).all()
return list(itertools.chain.from_iterable(result))
| 25.428571 | 69 | 0.747191 | 42 | 356 | 6.166667 | 0.619048 | 0.084942 | 0.146718 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151685 | 356 | 13 | 70 | 27.384615 | 0.857616 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.375 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
dc4715c494e22dfca736cca8075c6e5f3be6f505 | 1,364 | py | Python | src/oaipmh/tests/createdata_deleted_records.py | unt-libraries/pyoai | a8160d73ff3b5de20c9a5b358f9c555316245523 | [
"BSD-3-Clause"
] | 58 | 2015-02-06T17:54:33.000Z | 2022-02-03T11:30:08.000Z | src/oaipmh/tests/createdata_deleted_records.py | unt-libraries/pyoai | a8160d73ff3b5de20c9a5b358f9c555316245523 | [
"BSD-3-Clause"
] | 40 | 2015-01-15T15:20:36.000Z | 2022-03-09T09:12:09.000Z | src/oaipmh/tests/createdata_deleted_records.py | unt-libraries/pyoai | a8160d73ff3b5de20c9a5b358f9c555316245523 | [
"BSD-3-Clause"
] | 45 | 2015-02-08T14:14:54.000Z | 2022-02-03T16:15:33.000Z | from fakeserver import FakeCreaterServerProxy
# tied to the server at EUR..
server = FakeCreaterServerProxy(
'http://dspace.ubib.eur.nl/oai/',
'/home/eric/CVS_checkouts/oai/tests/fake2')
#deleted record
print "GetRecord"
header, metadata, about = server.getRecord(
metadataPrefix='oai_dc', identifier='hdl:1765/1160')
print "identifier:", header.identifier()
print "datestamp:", header.datestamp()
print "setSpec:", header.setSpec()
print "isDeleted:", header.isDeleted()
print
#normal record
print "GetRecord"
header, metadata, about = server.getRecord(
metadataPrefix='oai_dc', identifier='hdl:1765/1162')
print "identifier:", header.identifier()
print "datestamp:", header.datestamp()
print "setSpec:", header.setSpec()
print "isDeleted:", header.isDeleted()
print
print "ListRecords"
for header, metadata, about in server.listRecords(
from_=datetime(2004, 01, 01), until=datetime(2004, 02, 01),
metadataPrefix='oai_dc'):
print "header"
print "identifier:", header.identifier()
print "datestamp:", header.datestamp()
print "setSpec:", header.setSpec()
print "isDeleted:", header.isDeleted()
print "metadata"
if metadata is not None:
for fieldname in metadata.getMap().keys():
print "%s:" % fieldname, metadata.getField(fieldname)
print "about"
print about
print
server.save()
| 29.652174 | 65 | 0.711144 | 157 | 1,364 | 6.146497 | 0.363057 | 0.043523 | 0.059067 | 0.096373 | 0.556477 | 0.556477 | 0.556477 | 0.556477 | 0.556477 | 0.556477 | 0 | 0.028351 | 0.146628 | 1,364 | 45 | 66 | 30.311111 | 0.800687 | 0.039589 | 0 | 0.513514 | 0 | 0 | 0.215926 | 0.030628 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.027027 | null | null | 0.621622 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
dc475c60813e74e92a1e2d038ce43f9cf10f51dd | 464 | py | Python | Bioinformatics-Armory/src/Read_Quality_Distribution.py | crf1111/Bio-Informatics-Learning | 2ccc02d7a23584c12aee44c5620160cdcaf70bd4 | [
"MIT"
] | 1 | 2018-10-10T19:03:52.000Z | 2018-10-10T19:03:52.000Z | Bioinformatics-Armory/src/Read_Quality_Distribution.py | crf1111/Bio-Informatics-Learning | 2ccc02d7a23584c12aee44c5620160cdcaf70bd4 | [
"MIT"
] | null | null | null | Bioinformatics-Armory/src/Read_Quality_Distribution.py | crf1111/Bio-Informatics-Learning | 2ccc02d7a23584c12aee44c5620160cdcaf70bd4 | [
"MIT"
] | null | null | null | from Bio import SeqIO
avg = 21
def main(fname):
records = list(SeqIO.parse(fname,'fastq'))
num = 0
for record in records:
vs = record.letter_annotations['phred_quality']
if sum(vs)/float(len(vs)) < avg:
num += 1
print num
# record = records[0]
# print record.letter_annotations['phred_quality']
if __name__ == '__main__':
import sys, os
fname = os.path.join(os.getcwd(),sys.argv[-1])
main(fname) | 22.095238 | 55 | 0.612069 | 64 | 464 | 4.25 | 0.5625 | 0.066176 | 0.169118 | 0.205882 | 0.272059 | 0.272059 | 0 | 0 | 0 | 0 | 0 | 0.017291 | 0.252155 | 464 | 21 | 56 | 22.095238 | 0.766571 | 0.146552 | 0 | 0 | 0 | 0 | 0.06599 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.142857 | null | null | 0.071429 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
dc49162e6a22e46043d1be7363c0e347e677374d | 2,856 | py | Python | synapse/util/debug.py | mweinelt/synapse | 42a9ea37e4c6ff9d91b530c40d366446b9fc2234 | [
"Apache-2.0"
] | 1 | 2017-02-03T18:58:29.000Z | 2017-02-03T18:58:29.000Z | synapse/util/debug.py | mweinelt/synapse | 42a9ea37e4c6ff9d91b530c40d366446b9fc2234 | [
"Apache-2.0"
] | null | null | null | synapse/util/debug.py | mweinelt/synapse | 42a9ea37e4c6ff9d91b530c40d366446b9fc2234 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer, reactor
from functools import wraps
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
def debug_deferreds():
"""Cause all deferreds to wait for a reactor tick before running their
callbacks. This increases the chance of getting a stack trace out of
a defer.inlineCallback since the code waiting on the deferred will get
a chance to add an errback before the deferred runs."""
# Helper method for retrieving and restoring the current logging context
# around a callback.
def with_logging_context(fn):
context = LoggingContext.current_context()
def restore_context_callback(x):
with PreserveLoggingContext(context):
return fn(x)
return restore_context_callback
# We are going to modify the __init__ method of defer.Deferred so we
# need to get a copy of the old method so we can still call it.
old__init__ = defer.Deferred.__init__
# We need to create a deferred to bounce the callbacks through the reactor
# but we don't want to add a callback when we create that deferred so we
# we create a new type of deferred that uses the old __init__ method.
# This is safe as long as the old __init__ method doesn't invoke an
# __init__ using super.
class Bouncer(defer.Deferred):
__init__ = old__init__
# We'll add this as a callback to all Deferreds. Twisted will wait until
# the bouncer deferred resolves before calling the callbacks of the
# original deferred.
def bounce_callback(x):
bouncer = Bouncer()
reactor.callLater(0, with_logging_context(bouncer.callback), x)
return bouncer
# We'll add this as an errback to all Deferreds. Twisted will wait until
# the bouncer deferred resolves before calling the errbacks of the
# original deferred.
def bounce_errback(x):
bouncer = Bouncer()
reactor.callLater(0, with_logging_context(bouncer.errback), x)
return bouncer
@wraps(old__init__)
def new__init__(self, *args, **kargs):
old__init__(self, *args, **kargs)
self.addCallbacks(bounce_callback, bounce_errback)
defer.Deferred.__init__ = new__init__
| 39.666667 | 78 | 0.721989 | 407 | 2,856 | 4.899263 | 0.400491 | 0.03009 | 0.027081 | 0.016048 | 0.17653 | 0.16349 | 0.1334 | 0.1334 | 0.1334 | 0.1334 | 0 | 0.006726 | 0.219188 | 2,856 | 71 | 79 | 40.225352 | 0.887444 | 0.585084 | 0 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.115385 | 0 | 0.576923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
dc4edcd32b38488ad3df20ac8d2685f6b5dcb947 | 474 | py | Python | src/genie/libs/parser/junos/tests/ShowOspfInterfaceBrief/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/junos/tests/ShowOspfInterfaceBrief/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/junos/tests/ShowOspfInterfaceBrief/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"instance": {
"master": {
"areas": {
"0.0.0.1": {
"interfaces": {
"ge-0/0/2.0": {
"state": "BDR",
"dr_id": "10.16.2.2",
"bdr_id": "10.64.4.4",
"nbrs_count": 5,
}
}
}
}
}
}
}
| 24.947368 | 50 | 0.202532 | 32 | 474 | 2.875 | 0.65625 | 0.065217 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128049 | 0.654008 | 474 | 18 | 51 | 26.333333 | 0.432927 | 0 | 0 | 0 | 0 | 0 | 0.196203 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
dc5faf9776d8e685857ee4d038850a2f228ca354 | 2,959 | py | Python | raven/utils/serializer/base.py | alex/raven | 28227282891d7eb7e3600a458d0d8c7164f63dcb | [
"BSD-3-Clause"
] | 1 | 2015-11-08T12:45:40.000Z | 2015-11-08T12:45:40.000Z | raven/utils/serializer/base.py | alex/raven | 28227282891d7eb7e3600a458d0d8c7164f63dcb | [
"BSD-3-Clause"
] | null | null | null | raven/utils/serializer/base.py | alex/raven | 28227282891d7eb7e3600a458d0d8c7164f63dcb | [
"BSD-3-Clause"
] | null | null | null | """
raven.utils.serializer.base
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from raven.utils.encoding import to_string, to_unicode
from raven.utils.serializer.manager import register
from types import ClassType, TypeType
from uuid import UUID
def has_sentry_metadata(value):
try:
return callable(value.__getattribute__('__sentry__'))
except:
return False
class Serializer(object):
types = ()
def __init__(self, manager):
self.manager = manager
def can(self, value):
"""
Given ``value``, return a boolean describing whether this
serializer can operate on the given type
"""
return isinstance(value, self.types)
def serialize(self, value):
"""
Given ``value``, coerce into a JSON-safe type.
"""
return value
def recurse(self, value):
"""
Given ``value``, recurse (using the parent serializer) to handle
coercing of newly defined values.
"""
return self.manager.transform(value)
@register
class IterableSerializer(Serializer):
types = (tuple, list, set, frozenset)
def serialize(self, value):
try:
return type(value)(self.recurse(o) for o in value)
except Exception:
# We may be dealing with something like a namedtuple
class value_type(list):
__name__ = type(value).__name__
return value_type(self.recurse(o) for o in value)
@register
class UUIDSerializer(Serializer):
types = (UUID,)
def serialize(self, value):
return repr(value)
@register
class DictSerializer(Serializer):
types = (dict,)
def serialize(self, value):
return dict((to_string(k), self.recurse(v)) for k, v in value.iteritems())
@register
class UnicodeSerializer(Serializer):
types = (unicode,)
def serialize(self, value):
return to_unicode(value)
@register
class StringSerializer(Serializer):
types = (str,)
def serialize(self, value):
return to_string(value)
@register
class TypeSerializer(Serializer):
types = (ClassType, TypeType,)
def can(self, value):
return not super(TypeSerializer, self).can(value) and has_sentry_metadata(value)
def serialize(self, value):
return self.recurse(value.__sentry__())
@register
class BooleanSerializer(Serializer):
types = (bool,)
def serialize(self, value):
return bool(value)
@register
class FloatSerializer(Serializer):
types = (float,)
def serialize(self, value):
return float(value)
@register
class IntegerSerializer(Serializer):
types = (int,)
def serialize(self, value):
return int(value)
@register
class LongSerializer(Serializer):
types = (long,)
def serialize(self, value):
return long(value)
| 21.757353 | 88 | 0.64515 | 336 | 2,959 | 5.577381 | 0.324405 | 0.067236 | 0.093917 | 0.123266 | 0.15635 | 0.055496 | 0.024546 | 0 | 0 | 0 | 0 | 0.00179 | 0.244677 | 2,959 | 135 | 89 | 21.918519 | 0.836689 | 0.158499 | 0 | 0.328947 | 0 | 0 | 0.004172 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0 | 0.052632 | 0.131579 | 0.789474 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
dc66d84e1bf16da4de12b75a38558fd627c4ead0 | 192 | py | Python | PyCharm/Aulas/Aula10.py | gabrieldtc/CursoEmVideoPython | 01a2540bb3754c1182837cef52bbf59541e6e845 | [
"MIT"
] | null | null | null | PyCharm/Aulas/Aula10.py | gabrieldtc/CursoEmVideoPython | 01a2540bb3754c1182837cef52bbf59541e6e845 | [
"MIT"
] | null | null | null | PyCharm/Aulas/Aula10.py | gabrieldtc/CursoEmVideoPython | 01a2540bb3754c1182837cef52bbf59541e6e845 | [
"MIT"
] | null | null | null | nome = str(input('Qual é o seu nome? ')).strip().upper()
if nome == 'GABRIEL':
print('Que lindo nome você tem!')
else:
print('Seu nome é tão normal!')
print('Bom dia, {}'.format(nome)) | 32 | 56 | 0.619792 | 31 | 192 | 3.83871 | 0.709677 | 0.117647 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.171875 | 192 | 6 | 57 | 32 | 0.748428 | 0 | 0 | 0 | 0 | 0 | 0.430052 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
dc6c1da4d4280d16c5d6cf72dbcc85984b19b939 | 660 | py | Python | assets/views.py | continual-delivery/stratahq | 64d9f42fc4298f6d0854441f0e514ecb042bfd9d | [
"Unlicense"
] | 1 | 2018-12-05T00:13:49.000Z | 2018-12-05T00:13:49.000Z | assets/views.py | continual-delivery/stratahq | 64d9f42fc4298f6d0854441f0e514ecb042bfd9d | [
"Unlicense"
] | null | null | null | assets/views.py | continual-delivery/stratahq | 64d9f42fc4298f6d0854441f0e514ecb042bfd9d | [
"Unlicense"
] | null | null | null | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponse
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from management.models import Customer, Environment, CustomerEnvironment
from assets.models import TaskHistory, Server, Application
class IndexView(LoginRequiredMixin, generic.ListView):
login_url = '/login'
redirect_field_name = 'next'
template_name = 'home/index.html'
context_object_name = 'latest_task_list'
def get_queryset(self):
"""Return the last five published questions."""
return TaskHistory.objects.order_by('-start_time')[:5] | 38.823529 | 72 | 0.780303 | 81 | 660 | 6.185185 | 0.716049 | 0.07984 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007042 | 0.139394 | 660 | 17 | 73 | 38.823529 | 0.875 | 0.062121 | 0 | 0 | 0 | 0 | 0.084691 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.461538 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
dc6d99d2b7168731ff2c1981f4a5025f54380833 | 30,636 | py | Python | qccalculator/qcplots.py | bigbio/qccalculator | 40b8637caacac774b963f2d1f81f838880581a58 | [
"Apache-2.0"
] | 1 | 2022-01-11T19:40:44.000Z | 2022-01-11T19:40:44.000Z | qccalculator/qcplots.py | bigbio/qccalculator | 40b8637caacac774b963f2d1f81f838880581a58 | [
"Apache-2.0"
] | 1 | 2020-10-05T14:52:25.000Z | 2020-10-05T14:52:25.000Z | qccalculator/qcplots.py | bigbio/qccalculator | 40b8637caacac774b963f2d1f81f838880581a58 | [
"Apache-2.0"
] | 2 | 2020-06-01T22:00:39.000Z | 2021-04-24T08:57:21.000Z | # import math, datetime
# import tempfile
# import base64
# import re
# import numpy as np
# from enum import Enum
# import os
# from statistics import mean, median, stdev
# from typing import List, Dict, Set, Any, Optional, Callable
# import pyopenms as oms
# from collections import defaultdict
# import itertools
#
# class PlotType(Enum):
# PNG = 1
# SVG = 2
# PLOTLY = 3
#
# def handle_plot_format(pp, plot_type: PlotType, hosturl="http://localhost", port=5000):
# if plot_type == PlotType.PLOTLY:
# plotly = importr('plotly')
# ppp = plotly.ggplotly(pp)
# htmlwidgets = importr('htmlwidgets')
# serverstructure_library_destination = "lib"
# with tempfile.NamedTemporaryFile() as t:
# htmlwidgets.saveWidget(ppp, t.name, libdir="replaceme", selfcontained = False)
# # start stupid fix to get all the recent libs written in the flask lib directory
# htmlwidgets.saveWidget(ppp, 'bof', libdir=serverstructure_library_destination, selfcontained = False)
# os.remove('bof')
# # end stupid fix
# with open(t.name, "r") as f:
# s = f.read()
# s = s.replace("replaceme", "{h}{p}/{l}".format(h=hosturl,
# l=serverstructure_library_destination,
# p="" if port is None else ":"+str(port)))
# return s
# else:
# with tempfile.NamedTemporaryFile() as t:
# if plot_type == PlotType.SVG:
# grdevices = importr('grDevices')
# grdevices.svg(file=t.name)
# pp.plot()
# grdevices.dev_off()
#
# with open(t.name, "r") as f:
# s = f.read()
# else:
# grdevices = importr('grDevices')
# grdevices.png(file=t.name, width=512, height=512)
# pp.plot()
# grdevices.dev_off()
#
# with open(t.name, "rb") as fb:
# s = base64.b64encode(fb.read()).decode()
# return s
#
# def plot_TIC(tic_table, start_time, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d= {'RT': robjects.POSIXct((tuple([start_time + datetime.timedelta(seconds=i) for i in tic_table.value['RT']]))),
# 'int': robjects.FloatVector(tuple(tic_table.value['int'])) }
# dataf = robjects.DataFrame(d)
# scales = importr('scales')
# c0 = robjects.r('c(0,0)')
#
# lim_maj=int(max(tic_table.value['RT'])//(60*30))
# lim_min=int(max(tic_table.value['RT'])//(60*10))+1
# b_maj=robjects.POSIXct(tuple([start_time + datetime.timedelta(seconds=60*30* i) for i in range(0,lim_maj+1)]))
# b_min=robjects.POSIXct(tuple([start_time + datetime.timedelta(seconds=60*10* i) for i in range(0,lim_min+1)]))
#
# axislabels = robjects.StrVector(tuple([(datetime.datetime.fromtimestamp(60*30* i)).strftime("%H:%M") for i in range(0,lim_maj+1)]))
#
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_line() + \
# ggplot2.aes_string(x='RT', y='int') + \
# ggplot2.scale_x_datetime(breaks=b_maj, minor_breaks=b_min, labels = axislabels, expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Intensity", x="Time") + \
# ggplot2.ggtitle("TIC")
# #does not work: date_minor_breaks=scales.date_breaks("5 minutes")
# # scales.date_format("%H:%M")
#
# # ltb = robjects.r('theme(plot.margin = unit(c(.1,1,.1,.1), "cm"))')
# # pp = pp + ltb
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_SN(table, mslevel=2, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d= {'SN': robjects.FloatVector(tuple(table.value['SN'])) }
# dataf = robjects.DataFrame(d)
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# m = median(table.value['SN'])
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_histogram(binwidth=.5, colour="black", fill="white") + \
# ggplot2.aes_string(x='SN', y='..density..') + \
# ggplot2.geom_density(alpha=.1, fill="green") + \
# ggplot2.geom_vline(ggplot2.aes(xintercept='median(SN, na.rm=TRUE)'), color="red", linetype="dashed", size=1) + \
# ggplot2.geom_text(ggplot2.aes_string(x=str(m), y=rinf, label="'median={}'".format(str(round(m,2))) ), hjust="left", vjust="top") + \
# ggplot2.scale_x_continuous(expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(x="S/N") + \
# ggplot2.ggtitle("S/N distribution in MS{} spectra".format(str(mslevel)))
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_dppm(psm_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d= {'deltaPPM': robjects.FloatVector(tuple(psm_table.value['delta_ppm'])) }
# dataf = robjects.DataFrame(d)
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
# m = median(psm_table.value['delta_ppm'])
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_histogram(binwidth=.5, colour="black", fill="white") + \
# ggplot2.aes_string(x='deltaPPM', y='..density..') + \
# ggplot2.geom_density(alpha=.1, fill="green") + \
# ggplot2.geom_vline(ggplot2.aes(xintercept='median(deltaPPM, na.rm=TRUE)'), color="red", linetype="dashed", size=1) + \
# ggplot2.geom_text(ggplot2.aes_string(x=str(m), y=rinf, label="'median={}'".format(str(round(m,2))) ), hjust="left", vjust="top") + \
# ggplot2.scale_x_continuous(expand=c0, limit=c10) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(x=parse('paste(Delta, "ppm")'), y="Frequency density") + \
# ggplot2.ggtitle("Mass error distribution")
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_dppm_over_time(psm_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d= {'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in psm_table.value['RT']]))),
# 'deltaPPM': robjects.FloatVector(tuple(psm_table.value['delta_ppm'])) }
# dataf = robjects.DataFrame(d)
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
#
# b=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10)]))
# lim_maj=int(max(psm_table.value['RT'])//(60*30))
# lim_min=int(max(psm_table.value['RT'])//(60*10))+1
# b_maj=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*30* i) for i in range(0,lim_maj+1)]))
# b_min=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10* i) for i in range(0,lim_min+1)]))
#
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
#
#
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_point(alpha=0.5) + \
# ggplot2.aes_string(x='RT',y='deltaPPM') + \
# ggplot2.scale_x_datetime(breaks=b_maj, minor_breaks=b_min, labels = scales.date_format("%H:%M"), expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.stat_smooth(colour="red", method="loess", span=0.2) + \
# ggplot2.geom_hline(yintercept=0, colour="blue") + \
# ggplot2.geom_hline(yintercept=stdev(psm_table.value['delta_ppm']) ,linetype="dotted", colour="green") + \
# ggplot2.geom_hline(yintercept=-stdev(psm_table.value['delta_ppm']) ,linetype="dotted", colour="green") + \
# ggplot2.labs(y=parse('paste(Delta, "ppm")'), x="Time") + \
# ggplot2.ggtitle(parse('paste(Delta, "ppm over time")'))
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# # TODO needs cv refinement
# def plot_scorecorrelatenoise(psm_table, prec_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# scrtyp = list(filter(lambda x: x!="RT" and x!="c", psm_table.value.keys()))[0] # mah!
#
# npa_psm = np.array([psm_table.value['RT'],
# psm_table.value[scrtyp]])
# npa_psm = npa_psm[:,npa_psm[0].argsort()]
#
# npa_prc = np.array([prec_table.value['RT'],
# prec_table.value['SN']])
# npa_prc = npa_prc[:,npa_prc[0].argsort()]
#
# # which of these are identified???
# idinter = np.intersect1d(np.around(npa_prc[0], decimals=4),np.around(npa_psm[0], decimals=4), assume_unique=True, return_indices=True)
# rpy2.robjects.numpy2ri.activate()
# dataf = robjects.DataFrame( {'SN': npa_prc[:,idinter[1]][1] ,
# 'score': npa_psm[:,idinter[2]][1] })
#
# stats = importr('stats')
# base = importr('base')
# r2 = np.around(np.float(base.summary(stats.lm('SN~score^2', data=dataf))[7][0]), decimals=4) # 7 is r.squared, 8 is adj.r.squared - find out more with items()
# # TODO check
# c0 = robjects.r('c(0,0)')
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.aes_string(x='score', y='SN') + \
# ggplot2.geom_point() + \
# ggplot2.geom_smooth(method = "lm", formula = "y~poly(x,2)", se=False) + \
# ggplot2.scale_x_continuous(expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(x="score({})".format(scrtyp), y="S/N") + \
# ggplot2.ggtitle("ID score and noise correlation (quadratic R.squared={r2})".format(r2=r2))
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_lengths(seq_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# regex_mod = r'(\([^\(]*\))'
# regex_noaa = r'([^A-Za-z])'
# # TODO test this: '.(iTRAQ4plex)M(Oxidation)C(Carbamidomethyl)HNVNR'
# d= {'PeptideSequence': robjects.StrVector(tuple(seq_table.value['peptide'])),
# 'Length': robjects.IntVector(tuple([len(re.sub(regex_noaa, '', re.sub(regex_mod, '', x))) for x in seq_table.value['peptide']])) }
# dataf = robjects.DataFrame(d)
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
# m = mean(d['Length'])
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_histogram(binwidth=1, origin = -0.5, colour="black", fill="white") + \
# ggplot2.aes_string(x='Length') + \
# ggplot2.geom_vline(ggplot2.aes(xintercept='mean(Length, na.rm=TRUE)'), color="red", linetype="dashed", size=1) + \
# ggplot2.geom_text(ggplot2.aes_string(x=str(m), y=rinf, label="'mean={}'".format(str(round(m,2))) ), hjust="left", vjust="top") + \
# ggplot2.scale_x_continuous(expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Count") + \
# ggplot2.ggtitle("Length distribution of identified peptide sequences")
# # parse('paste(Delta, "ppm")' does not work in ggplot2.ggtitle
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_topn(prec_table, surv_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# h = np.histogram(prec_table.value['RT'], bins=surv_table.value['RT']+[surv_table.value['RT'][-1]+surv_table.value['RT'][-2]])
#
# d= {'SN': robjects.FloatVector(tuple(surv_table.value['SN'])),
# 'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in surv_table.value['RT']]))),
# 'TopN': robjects.IntVector(tuple(h[0])) }
# dataf = robjects.DataFrame(d)
#
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
# m = median(d['TopN'])
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_histogram(binwidth=1, origin = -0.5, colour="black", fill="white") + \
# ggplot2.aes_string(x='TopN') + \
# ggplot2.geom_vline(ggplot2.aes(xintercept='median(TopN, na.rm=TRUE)'), color="red", linetype="dashed", size=1) + \
# ggplot2.geom_text(ggplot2.aes_string(x=str(m), y=rinf, label="'median={}'".format(str(round(m))) ), hjust="left", vjust="top") + \
# ggplot2.scale_x_continuous(breaks=scales.pretty_breaks(), expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Count") + \
# ggplot2.ggtitle("TopN sampling")
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_topn_sn(prec_table, surv_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# h = np.histogram(prec_table.value['RT'], bins=surv_table.value['RT']+[surv_table.value['RT'][-1]+surv_table.value['RT'][-2]])
# qs = np.quantile(surv_table.value['SN'], [.25,.5,.75])
# d= {'SN': robjects.FloatVector(tuple(surv_table.value['SN'])),
# 'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in surv_table.value['RT']]))),
# 'TopN': robjects.IntVector(tuple(h[0])),
# 'SN.quartile': robjects.FactorVector((tuple( ['q1' if v < qs[0] else 'q2' if qs[0]<v<qs[1] else 'q3' if qs[1]<v<qs[2] else 'q4' for v in surv_table.value['SN']] )))}
# dataf = robjects.DataFrame(d)
#
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
# m = median(d['TopN'])
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.aes_string(x='TopN', fill='SN.quartile') + \
# ggplot2.geom_histogram(binwidth=1, origin = -0.5) + \
# ggplot2.geom_vline(ggplot2.aes(xintercept='median(TopN, na.rm=TRUE)'), color="red", linetype="dashed", size=1) + \
# ggplot2.geom_text(ggplot2.aes_string(x=str(m), y=rinf, label="'median={}'".format(str(round(m))) ), hjust="left", vjust="top") + \
# ggplot2.scale_x_continuous(breaks=scales.pretty_breaks(), expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Count") + \
# ggplot2.ggtitle("TopN sampling")
# # parse('paste(Delta, "ppm")' does not work in ggplot2.ggtitle
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_topn_rt(prec_table, surv_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# h = np.histogram(prec_table.value['RT'], bins=surv_table.value['RT']+[surv_table.value['RT'][-1]+surv_table.value['RT'][-2]])
#
# d= {'SN': robjects.FloatVector(tuple(surv_table.value['SN'])),
# 'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in surv_table.value['RT']]))),
# 'TopN': robjects.IntVector(tuple(h[0])) }
# dataf = robjects.DataFrame(d)
#
# b=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10)]))
# lim_maj=int(max(surv_table.value['RT'])//(60*30))
# lim_min=int(max(surv_table.value['RT'])//(60*10))+1
# b_maj=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*30* i) for i in range(0,lim_maj+1)]))
# b_min=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10* i) for i in range(0,lim_min+1)]))
#
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
#
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_col(width = 1) + \
# ggplot2.aes_string(x='RT',y='TopN') + \
# ggplot2.scale_x_datetime(breaks=b_maj, minor_breaks=b_min, labels = scales.date_format("%H:%M"), expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Count", x="TopN sampling range") + \
# ggplot2.ggtitle("TopN utilisation")
#
#
# # TODO also plot real histogram with color of SN or target/decoy?
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_idmap(prec_table, psm_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d_psm= {'MZ': robjects.FloatVector(tuple(psm_table.value['MZ'])),
# 'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in psm_table.value['RT']]))),
# 'col': robjects.FactorVector(tuple(["identified"]*len(psm_table.value['MZ']))) }
# dataf_psm = robjects.DataFrame(d_psm)
#
# d_prc= {'MZ': robjects.FloatVector(tuple(prec_table.value['precursor_mz'])),
# 'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in prec_table.value['RT']]))),
# 'col': robjects.FactorVector(tuple(["recorded"]*len(prec_table.value['precursor_mz']))) }
# dataf_prc = robjects.DataFrame(d_prc)
#
# b=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10)]))
# lim_maj=int(max(prec_table.value['RT'])//(60*30))
# lim_min=int(max(prec_table.value['RT'])//(60*10))+1
# b_maj=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*30* i) for i in range(0,lim_maj+1)]))
# b_min=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10* i) for i in range(0,lim_min+1)]))
#
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
#
# pp = ggplot2.ggplot(dataf_prc) + \
# ggplot2.geom_point() + \
# ggplot2.aes_string(x='RT',y='MZ', colour='col') + \
# ggplot2.scale_x_datetime(breaks=b_maj, minor_breaks=b_min, labels = scales.date_format("%H:%M"), expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="m/z", x="Time") + \
# ggplot2.geom_point(data=dataf_psm) + \
# ggplot2.ggtitle("ID map")
#
# ltb = robjects.r('theme(legend.title=element_blank())')
# pp = pp + ltb
#
# # TODO also plot real histogram with color of SN or target/decoy?
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_gravy(gravy_table, start_time, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d= {'RT': robjects.POSIXct((tuple([start_time + datetime.timedelta(seconds=i) for i in gravy_table.value['RT']]))),
# 'gravy': robjects.FloatVector(tuple(gravy_table.value['gravy'])) }
# dataf = robjects.DataFrame(d)
# scales = importr('scales')
# c0 = robjects.r('c(0,0)')
#
# lim_maj=int(max(gravy_table.value['RT'])//(60*30))
# lim_min=int(max(gravy_table.value['RT'])//(60*10))+1
# b_maj=robjects.POSIXct(tuple([start_time + datetime.timedelta(seconds=60*30* i) for i in range(0,lim_maj+1)]))
# b_min=robjects.POSIXct(tuple([start_time + datetime.timedelta(seconds=60*10* i) for i in range(0,lim_min+1)]))
#
# axislabels = robjects.StrVector(tuple([(datetime.datetime.fromtimestamp(60*30* i)).strftime("%H:%M") for i in range(0,lim_maj+1)]))
#
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_point(alpha=0.5) + \
# ggplot2.aes_string(x='RT', y='gravy') + \
# ggplot2.scale_x_datetime(breaks=b_maj, minor_breaks=b_min, labels = axislabels, expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="GRAVY", x="Time") + \
# ggplot2.ylim(robjects.r('c(-3,3)')) + \
# ggplot2.geom_line(y=0, colour="blue") + \
# ggplot2.stat_smooth(colour="red", method="loess", span=0.2) + \
# ggplot2.ggtitle("Hydropathy index (Kyte-Doolittle)")
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_charge(prec_table, psm_table=None, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d_prc= {'c': robjects.IntVector(tuple(prec_table.value['precursor_c'])),
# 'col': robjects.FactorVector(tuple(["recorded"]*len(prec_table.value['precursor_c']))) }
# dataf = robjects.DataFrame(d_prc)
#
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
#
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_bar() + \
# ggplot2.aes_string(x='c',fill='col') + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.scale_x_continuous(breaks=scales.pretty_breaks(), expand=c0) + \
# ggplot2.labs(x="Charge state", y="Count") + \
# ggplot2.ggtitle("Charge states") + \
# robjects.r('theme(legend.title=element_blank())')
#
# # TODO N/A handling???
#
# if psm_table:
# d_prc= {'c': robjects.IntVector(tuple(psm_table.value['c'])),
# 'col': robjects.FactorVector(tuple(["identified"]*len(psm_table.value['c']))) }
# dataf_id = robjects.DataFrame(d_prc)
# pp = pp + ggplot2.geom_bar(data=dataf_id)
#
# # TODO also plot real histogram with color of SN or target/decoy?
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_peaknum(table, mslevel=2, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d= {'peakcount': robjects.IntVector(tuple(table.value['peakcount']))}
# dataf = robjects.DataFrame(d)
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
# m = mean(d['peakcount'])
# binw = round(m/15)+1 #+1 avoids 0 binwidth
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_histogram(binwidth=binw, origin = -0.5, colour="black", fill="white") + \
# ggplot2.aes_string(x='peakcount') + \
# ggplot2.geom_vline(ggplot2.aes(xintercept='mean(peakcount, na.rm=TRUE)'), color="red", linetype="dashed", size=1) + \
# ggplot2.geom_text(ggplot2.aes_string(x=str(m), y=rinf, label="'mean={}'".format(str(round(m,2))) ), hjust="left", vjust="top") + \
# ggplot2.scale_x_continuous(expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Count") + \
# ggplot2.ggtitle("Peak count distribution for MS{} spectra".format(str(mslevel)))
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# # TODO unfinished
# def plot_intensities(table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# grdevices = importr('grDevices')
# d= {'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in table.value['RT']]))),
# 'int': robjects.FloatVector(tuple(table.value['int']))}
# dataf = robjects.DataFrame(d)
# scales = importr('scales')
# cow = importr('cowplot')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# combiaxis1 = robjects.r('theme(axis.text.x = element_blank(), axis.ticks.x = element_blank(), plot.margin=unit(c(1,1,-0.1,1), "cm"))')
# combiaxis2 = robjects.r('theme(axis.text.y = element_blank(), axis.ticks = element_blank(), plot.margin=unit(c(-0.1,1,1,1), "cm"))')
#
# ht = ggplot2.ggplot(dataf) + \
# ggplot2.geom_histogram(bins=100) + \
# ggplot2.aes_string(x='int') + \
# combiaxis1 + \
# ggplot2.labs(y="Count", x= robjects.r('element_blank()') ) + \
# ggplot2.ggtitle("Intensity distribution")
#
# bx = ggplot2.ggplot(dataf) + \
# ggplot2.geom_boxplot() + \
# ggplot2.aes_string(x=1, y='int') + \
# ggplot2.coord_flip() + \
# combiaxis2 + \
# ggplot2.labs(x="", y="Intensities")
#
# # pracma = importr('pracma')
# # AUC <- trapz(QCTIC$MS.1000894_.sec.,QCTIC$MS.1000285)
# # auc = np.trapz(table.value['int'], x=table.value['RT'])
#
# # Qs <- quantile(QCTIC$MS.1000285,prob = c(0.25, 0.5, 0.75))
# # Qs <- data.frame(Qs)
# # qs = np.quantile(table.value['int'], [.25,.5,.75])
#
# pp = cow.plot_grid(ht,bx, ncol = 1, align = 'v', axis = 'l', rel_heights = robjects.r('c(1,.25)'))
#
# # grdevices.png(file="tests/grid_png_func.png", width=512, height=512)
# # c = cow.plot_grid(ht,bx, ncol = 1, align = 'v', axis = 'l', rel_heights = robjects.r('c(1,.25)'))
# # c.plot()
# # grdevices.dev_off()
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# # TODO unfinished
# def plot_events(tic_table, surv_table, prec_table, psm_table=None, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# datasources = [("Chromatogram", tic_table),("MS1",surv_table),("MS2", prec_table)]
# if psm_table:
# datasources.append(("Identifications",psm_table))
#
# dataf = robjects.DataFrame({})
# for annot, tab in datasources:
# if annot == "Chromatogram":
# #quartiles of total intensity recorded
# tic = np.sum(tab.value['int'])
# vec = (np.cumsum(tab.value['int'])/tic)
# qs = [.25,.5,.75]
# else:
# vec = [i[0]+1 for i in enumerate(tab.value['RT'])]
# qs = np.quantile(vec, [.25,.5,.75])
#
# d_c= {'Time': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in tab.value['RT']]))),
# 'Quartile': robjects.FactorVector((tuple( ['q1' if v < qs[0] else 'q2' if qs[0]<v<qs[1] else 'q3' if qs[1]<v<qs[2] else 'q4' for v in vec] )))
# }
# td = (DataFrame(d_c).
# group_by('Quartile').
# summarize(n='n()', mx='max(Time)', mi='min(Time)').
# mutate(group="'"+annot+"'", dt='hms::as.hms(mx-mi)') )
# dataf = dataf.rbind(td)
#
# dataf = ( DataFrame(dataf).arrange('Quartile') )
# scales = importr('scales')
# chron = importr('chron')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
#
# # TODO time axis fix and Hz annotations
# b=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10)]))
# lim_maj=int(max(tic_table.value['RT'])//(60*30))
# lim_min=int(max(tic_table.value['RT'])//(60*10))+1
# b_maj=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*30* i) for i in range(0,lim_maj+1)]))
# b_min=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10* i) for i in range(0,lim_min+1)]))
#
# # TODO shitty time scale still does not work with stacked bars
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_bar(stat='identity', position = ggplot2.position_fill(reverse = True)) + \
# ggplot2.aes_string(x='group',y='mx',fill='Quartile') + \
# ggplot2.coord_flip() + \
# chron.scale_y_chron(format ="%H:%M:%S") + \
# ggplot2.labs(x="Events", y="Time") + \
# ggplot2.ggtitle("Quartiles of Chromatographic, MS1, MS2, and identification events over RT")
#
# dataf.to_csvfile('tests/events.csv')
# return handle_plot_format(pp, plot_type, hosturl, port)
# # ggplot2.scale_y_datetime(breaks=b_maj, minor_breaks=b_min,labels = scales.date_format("%H:%M"), expand=c0) + \
#
# def plot_targetdecoy(peptideids: List[oms.PeptideIdentification], plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# rid: Dict[int,Any] = defaultdict(lambda: defaultdict(int))
#
# for idspec in peptideids:
# for rank,psm in enumerate(idspec.getHits()):
# rid[rank+1][psm.getMetaValue('target_decoy').decode()] += 1
#
# # TODO beware no decoy in test data
#
# scales = importr('scales')
# c0 = robjects.r('c(0,0)')
#
# transposed_rid = list(zip(*[(list(itertools.chain(*sorted(x[1].items()))))+[x[0]] for x in rid.items()]))
# d= {'type': robjects.StrVector(transposed_rid[0]),
# 'count': robjects.IntVector(transposed_rid[1]) ,
# 'rank': robjects.IntVector(transposed_rid[2]) }
# dataf = robjects.DataFrame(d)
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.aes_string(fill='type', y='count', x='rank') + \
# ggplot2.geom_bar(position='dodge', stat='identity') + \
# ggplot2.scale_x_continuous(breaks=scales.pretty_breaks(max(transposed_rid[2]))) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Count", x='Rank') + \
# ggplot2.ggtitle("Target/Decoy")
#
#
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_coverage(coverage_table, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d = {'Accession': robjects.StrVector(tuple(coverage_table['Accession'])),
# 'Coverage': robjects.FloatVector(tuple(coverage_table['Coverage'])),
# 'TD': robjects.FactorVector(tuple(coverage_table['TD'])),
# 'Length': robjects.FloatVector(tuple(coverage_table['Length']))}
# dataf = robjects.DataFrame(d)
# c0 = robjects.r('c(0,0)')
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.aes_string(y='Coverage', x='Length', text='Accession', color='TD') + \
# ggplot2.geom_point() + \
# ggplot2.scale_x_continuous(expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.labs(y="Coverage", x='Protein length') + \
# ggplot2.ggtitle("Protein DB Coverage")
#
# # with open("tests/plotly_test/deleteme.html","w") as f:
# # f.write(qcplots.handle_plot_format(pp, qcplots.PlotType.PLOTLY,hosturl="",port=""))
# return handle_plot_format(pp, plot_type, hosturl, port)
#
# def plot_traptime(table, mslevel=2, plot_type=PlotType.PNG, hosturl="http://localhost", port=5000):
# d= {'ioninjectiontime': robjects.FloatVector(tuple(table.value['iontraptime'])),
# 'RT': robjects.POSIXct((tuple([datetime.datetime.fromtimestamp(i) for i in table.value['RT']]))) }
# dataf = robjects.DataFrame(d)
#
# m = mean(table.value['iontraptime'])
#
# b=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10)]))
# lim_maj=int(max(table.value['RT'])//(60*30))
# lim_min=int(max(table.value['RT'])//(60*10))+1
# b_maj=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*30* i) for i in range(0,lim_maj+1)]))
# b_min=robjects.POSIXct(tuple([datetime.datetime.fromtimestamp(60*10* i) for i in range(0,lim_min+1)]))
#
# scales = importr('scales')
# rinf = robjects.r('Inf')
# c0 = robjects.r('c(0,0)')
# c10 = robjects.r('c(-10,10)')
# lt = robjects.r("as.POSIXct('{}', tz = '')".format(str(datetime.datetime.fromtimestamp(table.value['RT'][ len(table.value['RT'])//2 ]))))
#
# pp = ggplot2.ggplot(dataf) + \
# ggplot2.geom_line() + \
# ggplot2.aes_string(x='RT',y='ioninjectiontime') + \
# ggplot2.scale_x_datetime(breaks=b_maj, minor_breaks=b_min, labels = scales.date_format("%H:%M"), expand=c0) + \
# ggplot2.scale_y_continuous(expand=c0) + \
# ggplot2.stat_smooth(colour="red", method="loess", span=0.2) + \
# ggplot2.labs(y="Ion injection time", x="Time") + \
# ggplot2.geom_text(ggplot2.aes_string(x=lt, y=rinf, label="'mean={}'".format(str(round(m,2))) ), hjust="left", vjust="top") + \
# ggplot2.ggtitle("Ion injection time over RT")
#
# return handle_plot_format(pp, plot_type, hosturl, port)
| 51.925424 | 175 | 0.611666 | 4,105 | 30,636 | 4.44458 | 0.11084 | 0.040559 | 0.026966 | 0.048452 | 0.685996 | 0.660455 | 0.645821 | 0.622582 | 0.614744 | 0.586681 | 0 | 0.032667 | 0.193628 | 30,636 | 589 | 176 | 52.013582 | 0.705878 | 0.960275 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0.001698 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
dc9aa560f4da61f8d0128a006844e3fa51e8e950 | 466 | py | Python | organizations/views.py | tarequeh/django-modularblog | 08b111fd12ab102d16224569e8064c6e6c76cd89 | [
"MIT"
] | null | null | null | organizations/views.py | tarequeh/django-modularblog | 08b111fd12ab102d16224569e8064c6e6c76cd89 | [
"MIT"
] | null | null | null | organizations/views.py | tarequeh/django-modularblog | 08b111fd12ab102d16224569e8064c6e6c76cd89 | [
"MIT"
] | 1 | 2021-02-05T18:49:27.000Z | 2021-02-05T18:49:27.000Z | from rest_framework import viewsets
from organizations.models import Organization
from organizations.permissions import OrgPermissions
from organizations.serializers import OrgSerializer
class OrgViewSet(viewsets.ModelViewSet):
"""
API endpoints for Organizations
"""
http_method_names = ['options', 'head', 'get', 'patch']
queryset = Organization.objects.all()
permission_classes = (OrgPermissions,)
serializer_class = OrgSerializer
| 27.411765 | 59 | 0.76824 | 45 | 466 | 7.844444 | 0.688889 | 0.144476 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.152361 | 466 | 16 | 60 | 29.125 | 0.893671 | 0.066524 | 0 | 0 | 0 | 0 | 0.045346 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.444444 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
dc9b687754219df96a73d5c05f99287d7a3dcefa | 3,006 | py | Python | test/tumblr_post_extractor.py | imsesaok/tumbl-down | 2faba20b58778e4f40344ff199f067b0914aa28c | [
"BSD-3-Clause"
] | null | null | null | test/tumblr_post_extractor.py | imsesaok/tumbl-down | 2faba20b58778e4f40344ff199f067b0914aa28c | [
"BSD-3-Clause"
] | 1 | 2022-03-29T01:24:36.000Z | 2022-03-29T01:24:36.000Z | test/tumblr_post_extractor.py | cherryband/tumbl-down | 2faba20b58778e4f40344ff199f067b0914aa28c | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
import unittest
import extractor.tumblr_post as extractor
class TumblrPostTestCase(unittest.TestCase):
# noinspection SpellCheckingInspection
def test_extract_tumblr_image_links(self):
# test single image
self.assertEqual(extractor.extract_tumblr_images("kiminukii", '656015203710976000'),
[
"https://64.media.tumblr.com/656c1b0e9008c5daa813c1530c7b4610/f994f9125257a26d-3f/s2048x3072/4a9f313d9f73f21bcc53facda98ad2fb681530c2.jpg"])
# test multiple images
self.assertEqual(
extractor.extract_tumblr_images("zandraart", '662604327475707904'),
[
"https://64.media.tumblr.com/7ce02d1c06accb2db65ffae410592807/b718ff429b370a25-d4/s640x960/dff9bb177424a46c598be9768abf6d7ace8276af.png",
"https://64.media.tumblr.com/8173aec87f24dad743c8607721c5fe06/b718ff429b370a25-c4/s640x960/f64312a68a8f8999dc3efad84b7777f8f62db00a.png",
"https://64.media.tumblr.com/c95be592fed81a52e597f524930207f8/b718ff429b370a25-36/s640x960/189ff2d22791c6110e2d2bbf9c0c8f5338b3a786.png"])
self.assertEqual(
extractor.extract_tumblr_images("kittenwitchandthebadvibes", '661792902683525120'),
[
"https://64.media.tumblr.com/c1500c8ad3c53c8dd3768b70bed09134/ccffdfdc14aead67-2f/s1280x1920/1837f31750522ceaed01f0b6679fc71492de6afe.gifv",
"https://64.media.tumblr.com/0b3958305e93b69bd446fa5ad8c6f4b2/ccffdfdc14aead67-f6/s1280x1920/e0e346f72539cf303e9ea3a7cb8e2185ace9c2f3.gifv",
"https://64.media.tumblr.com/b8a53976248ae44c1a595ffacbbe483d/ccffdfdc14aead67-e4/s1280x1920/53b3a4f0e34550b07dd0da44e2f25caa1ad18612.gifv"])
# test single image without viewer
self.assertEqual(
extractor.extract_tumblr_images("maruti-bitamin", '663149423546777600'),
[
"https://64.media.tumblr.com/bdf1a1b5dd22c7bb961452c86994e9bb/f4a9103f1a6f2e3c-6c/s1280x1920/3319e9b6809c8f9d7d56d3da49951762d0e70e54.jpg"])
# test video
self.assertEqual(extractor.extract_tumblr_images("simonalkenmayer", '664168073150988288'),
[])
# test post without media
self.assertEqual(extractor.extract_tumblr_images("stuckwith-harry", '640948223894290432'),
[])
# test tumblr post with custom domain
self.assertEqual(
extractor.extract_tumblr_images("dailyskyfox", '656342309663801344'),
[
"https://64.media.tumblr.com/6f508a4fc1a07ca20cdf786b730c982d/61ae23b5425caadf-52/s2048x3072/cf2cbfec7d5323d1cbd345eeb442d70eeed8c5d9.png"])
# test invalid blog and post id
with self.assertRaises(ValueError):
extractor.extract_tumblr_images("temmiechang", 'temmiechang')
with self.assertRaises(ValueError):
extractor.extract_tumblr_images("I am a teapot.", '346409543733251134') | 56.716981 | 169 | 0.711577 | 233 | 3,006 | 9.081545 | 0.429185 | 0.061437 | 0.093573 | 0.119093 | 0.293006 | 0.243384 | 0.05482 | 0.05482 | 0 | 0 | 0 | 0.291097 | 0.196607 | 3,006 | 53 | 170 | 56.716981 | 0.585093 | 0.075183 | 0 | 0.228571 | 0 | 0.257143 | 0.540765 | 0.009019 | 0 | 0 | 0 | 0 | 0.257143 | 1 | 0.028571 | false | 0 | 0.057143 | 0 | 0.114286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
dc9cb878c2c40ddd0c9b5c031b4df49295f39d5a | 1,019 | py | Python | twitoff/models.py | ik-okoro/Twitoff-20 | 0a3484298e55bbdb1e9dc2e336360d0c1a52cb82 | [
"MIT"
] | null | null | null | twitoff/models.py | ik-okoro/Twitoff-20 | 0a3484298e55bbdb1e9dc2e336360d0c1a52cb82 | [
"MIT"
] | null | null | null | twitoff/models.py | ik-okoro/Twitoff-20 | 0a3484298e55bbdb1e9dc2e336360d0c1a52cb82 | [
"MIT"
] | null | null | null | """
SQLAlchemy models and utility functions for Twitoff Application
"""
from flask_sqlalchemy import SQLAlchemy
DB = SQLAlchemy()
class User(DB.Model):
"""
Twitter User Table that will correspond to tweets - SQLAlchemy syntax
"""
id= DB.Column(DB.BigInteger, primary_key = True) # id column (primary key)
name = DB.Column(DB.String, nullable = False)
newest_tweet_id = DB.Column(DB.BigInteger) # keeps track of recent tweet
def __repr__(self):
return "<User: {}>".format(self.name)
class Tweet(DB.Model):
"""
Tweet text data - associated with Users Table
"""
id = DB.Column(DB.BigInteger, primary_key = True) # id column (primary key)
text = DB.Column(DB.Unicode(300))
vect = DB.Column(DB.PickleType, nullable = False)
user_id = DB.Column(DB.BigInteger, DB.ForeignKey("user.id"), nullable = False)
user = DB.relationship("User", backref = DB.backref("tweets", lazy = True))
def __repr__(self):
return "<Tweet: {}>".format(self.text) | 32.870968 | 82 | 0.670265 | 134 | 1,019 | 4.992537 | 0.425373 | 0.083707 | 0.104634 | 0.071749 | 0.227205 | 0.161435 | 0.161435 | 0.161435 | 0.161435 | 0.161435 | 0 | 0.003667 | 0.197252 | 1,019 | 31 | 83 | 32.870968 | 0.814181 | 0.251227 | 0 | 0.25 | 0 | 0 | 0.052486 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.0625 | 0.125 | 0.9375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
dcac9a4ad8e38debabf3f0ddbfff75cd5f32b0df | 280 | py | Python | generic_views/display_views/views.py | markbirds/Django-Code-Repo | b55762d2dab00640acf2e8e00ddc66716d53c6b5 | [
"MIT"
] | 1 | 2021-11-25T00:02:36.000Z | 2021-11-25T00:02:36.000Z | generic_views/display_views/views.py | markbirds/Django-Code-Repo | b55762d2dab00640acf2e8e00ddc66716d53c6b5 | [
"MIT"
] | null | null | null | generic_views/display_views/views.py | markbirds/Django-Code-Repo | b55762d2dab00640acf2e8e00ddc66716d53c6b5 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.views.generic import ListView, DetailView
from .models import DisplayViewModel
# Create your views here.
class UserListView(ListView):
model = DisplayViewModel
class DetailListView(DetailView):
model = DisplayViewModel
| 20 | 53 | 0.8 | 30 | 280 | 7.466667 | 0.6 | 0.089286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.146429 | 280 | 13 | 54 | 21.538462 | 0.937238 | 0.082143 | 0 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.428571 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
dcb6ba4f5c7eea84b4921e229dd0a17704f4c997 | 150 | py | Python | class/lect/Lect-18/tf2.py | MikenzieAlasca/F21-1010 | a7c15b8d9bf84f316aa6921f6d8a588c513a22b8 | [
"MIT"
] | 5 | 2021-09-09T21:08:14.000Z | 2021-12-14T02:30:52.000Z | class/lect/Lect-18/tf2.py | MikenzieAlasca/F21-1010 | a7c15b8d9bf84f316aa6921f6d8a588c513a22b8 | [
"MIT"
] | null | null | null | class/lect/Lect-18/tf2.py | MikenzieAlasca/F21-1010 | a7c15b8d9bf84f316aa6921f6d8a588c513a22b8 | [
"MIT"
] | 8 | 2021-09-09T17:46:07.000Z | 2022-02-08T22:41:35.000Z | import tensorflow as tf
c = tf.constant([[1,2,3,4], [-1,-2,-3,-4], [5,6,7,8]])
b = tf.math.segment_sum(c, tf.constant([0, 0, 1]))
print(c)
print(b)
| 18.75 | 54 | 0.58 | 34 | 150 | 2.529412 | 0.588235 | 0.069767 | 0.255814 | 0.093023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114504 | 0.126667 | 150 | 7 | 55 | 21.428571 | 0.541985 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.4 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f4a3db561fb8835ed7cbd5dfe1d0b107dfb2d96f | 256 | py | Python | tql/utils/__init__.py | Jie-Yuan/1_DataMining | f5338388b4f883233f350d4fb9c5903180883430 | [
"Apache-2.0"
] | 14 | 2019-06-25T13:46:32.000Z | 2020-10-27T02:04:59.000Z | tql/utils/__init__.py | Jie-Yuan/2_DataMining | f5338388b4f883233f350d4fb9c5903180883430 | [
"Apache-2.0"
] | null | null | null | tql/utils/__init__.py | Jie-Yuan/2_DataMining | f5338388b4f883233f350d4fb9c5903180883430 | [
"Apache-2.0"
] | 7 | 2019-06-25T13:26:16.000Z | 2020-10-27T02:05:03.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
__title__ = '__init__.py'
__author__ = 'JieYuan'
__mtime__ = '18-12-17'
"""
from .string import cprint, cstring
group_by_step = lambda ls, step=3: [ls[idx: idx + step] for idx in range(0, len(ls), step)]
| 21.333333 | 91 | 0.648438 | 40 | 256 | 3.7 | 0.825 | 0.081081 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.041667 | 0.15625 | 256 | 11 | 92 | 23.272727 | 0.643519 | 0.445313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 2 |
f4a7a85ae626d9f02c82466e4ce1b8012dbc35f2 | 6,100 | py | Python | frameworks/compile/slang/tests/test.py | touxiong88/92_mediatek | 5e96a7bb778fd9d9b335825584664e0c8b5ff2c7 | [
"Apache-2.0"
] | 1 | 2022-01-07T01:53:19.000Z | 2022-01-07T01:53:19.000Z | frameworks/compile/slang/tests/test.py | touxiong88/92_mediatek | 5e96a7bb778fd9d9b335825584664e0c8b5ff2c7 | [
"Apache-2.0"
] | null | null | null | frameworks/compile/slang/tests/test.py | touxiong88/92_mediatek | 5e96a7bb778fd9d9b335825584664e0c8b5ff2c7 | [
"Apache-2.0"
] | 1 | 2020-02-28T02:48:42.000Z | 2020-02-28T02:48:42.000Z | #!/usr/bin/python2.4
#
# Copyright 2010-2012 Google Inc. All Rights Reserved.
"""Renderscript Compiler Test.
Runs subdirectories of tests for the Renderscript compiler.
"""
import filecmp
import glob
import os
import re
import shutil
import subprocess
import sys
__author__ = 'Android'
class Options(object):
def __init__(self):
return
verbose = 0
cleanup = 1
updateCTS = 0
def CompareFiles(actual, expect):
"""Compares actual and expect for equality."""
if not os.path.isfile(actual):
if Options.verbose:
print 'Could not find %s' % actual
return False
if not os.path.isfile(expect):
if Options.verbose:
print 'Could not find %s' % expect
return False
return filecmp.cmp(actual, expect, False)
def UpdateFiles(src, dst):
"""Update dst if it is different from src."""
if not CompareFiles(src, dst):
print 'Copying from %s to %s' % (src, dst)
shutil.copyfile(src, dst)
def GetCommandLineArgs(filename):
"""Extracts command line arguments from first comment line in a file."""
f = open(filename, 'r')
line = f.readline()
if line[0] == '/' and line [1] == '/':
return line[2:].strip()
else:
return ''
def ExecTest(dirname):
"""Executes an llvm-rs-cc test from dirname."""
passed = True
if Options.verbose != 0:
print 'Testing %s' % dirname
os.chdir(dirname)
stdout_file = open('stdout.txt', 'w+')
stderr_file = open('stderr.txt', 'w+')
cmd_string = ('../../../../../../out/host/linux-x86/bin/llvm-rs-cc '
'-o tmp/ -p tmp/ '
'-MD '
'-I ../../../../../../frameworks/rs/scriptc/ '
'-I ../../../../../../mediatek/external/clang/lib/Headers/')
base_args = cmd_string.split()
rs_files = glob.glob('*.rs')
fs_files = glob.glob('*.fs')
rs_files += fs_files;
rs_files.sort()
# Extra command line arguments can be placed as // comments at the start of
# any .rs file. We automatically bundle up all of these extra args and invoke
# llvm-rs-cc with them.
extra_args_str = ''
for rs_file in rs_files:
extra_args_str += GetCommandLineArgs(rs_file)
extra_args = extra_args_str.split()
args = base_args + extra_args + rs_files
if Options.verbose > 1:
print 'Executing:',
for arg in args:
print arg,
print
# Execute the command and check the resulting shell return value.
# All tests that are expected to FAIL have directory names that
# start with 'F_'. Other tests that are expected to PASS have
# directory names that start with 'P_'.
ret = 0
try:
ret = subprocess.call(args, stdout=stdout_file, stderr=stderr_file)
except:
passed = False
stdout_file.flush()
stderr_file.flush()
if Options.verbose > 1:
stdout_file.seek(0)
stderr_file.seek(0)
for line in stdout_file:
print 'STDOUT>', line,
for line in stderr_file:
print 'STDERR>', line,
stdout_file.close()
stderr_file.close()
if dirname[0:2] == 'F_':
if ret == 0:
passed = False
if Options.verbose:
print 'Command passed on invalid input'
elif dirname[0:2] == 'P_':
if ret != 0:
passed = False
if Options.verbose:
print 'Command failed on valid input'
else:
passed = (ret == 0)
if Options.verbose:
print 'Test Directory name should start with an F or a P'
if not CompareFiles('stdout.txt', 'stdout.txt.expect'):
passed = False
if Options.verbose:
print 'stdout is different'
if not CompareFiles('stderr.txt', 'stderr.txt.expect'):
passed = False
if Options.verbose:
print 'stderr is different'
if Options.updateCTS:
# Copy resulting files to appropriate CTS directory (if different).
if passed and glob.glob('IN_CTS'):
cts_path = '../../../../../cts/'
cts_res_raw_path = cts_path + 'tests/res/raw/'
cts_src_path = cts_path + 'tests/tests/renderscript/src/'
for bc_src in glob.glob('tmp/*.bc'):
bc_dst = re.sub('tmp\/', cts_res_raw_path, bc_src, 1)
UpdateFiles(bc_src, bc_dst)
for java_src in glob.glob('tmp/android/renderscript/cts/*.java'):
java_dst = re.sub('tmp\/', cts_src_path, java_src, 1)
UpdateFiles(java_src, java_dst)
if Options.cleanup:
try:
os.remove('stdout.txt')
os.remove('stderr.txt')
shutil.rmtree('tmp/')
except:
pass
os.chdir('..')
return passed
def Usage():
"""Print out usage information."""
print ('Usage: %s [OPTION]... [TESTNAME]...'
'Renderscript Compiler Test Harness\n'
'Runs TESTNAMEs (all tests by default)\n'
'Available Options:\n'
' -h, --help Help message\n'
' -n, --no-cleanup Don\'t clean up after running tests\n'
' -u, --update-cts Update CTS test versions\n'
' -v, --verbose Verbose output\n'
) % (sys.argv[0]),
return
def main():
passed = 0
failed = 0
files = []
failed_tests = []
for arg in sys.argv[1:]:
if arg in ('-h', '--help'):
Usage()
return 0
elif arg in ('-n', '--no-cleanup'):
Options.cleanup = 0
elif arg in ('-u', '--update-cts'):
Options.updateCTS = 1
elif arg in ('-v', '--verbose'):
Options.verbose += 1
else:
# Test list to run
if os.path.isdir(arg):
files.append(arg)
else:
print >> sys.stderr, 'Invalid test or option: %s' % arg
return 1
if not files:
tmp_files = os.listdir('.')
# Only run tests that are known to PASS or FAIL
# Disabled tests can be marked D_ and invoked explicitly
for f in tmp_files:
if os.path.isdir(f) and (f[0:2] == 'F_' or f[0:2] == 'P_'):
files.append(f)
for f in files:
if os.path.isdir(f):
if ExecTest(f):
passed += 1
else:
failed += 1
failed_tests.append(f)
print 'Tests Passed: %d\n' % passed,
print 'Tests Failed: %d\n' % failed,
if failed:
print 'Failures:',
for t in failed_tests:
print t,
return failed != 0
if __name__ == '__main__':
sys.exit(main())
| 25.738397 | 79 | 0.609672 | 853 | 6,100 | 4.260258 | 0.264947 | 0.029719 | 0.044029 | 0.040451 | 0.131536 | 0.093561 | 0.066043 | 0.066043 | 0.024766 | 0.024766 | 0 | 0.010748 | 0.252623 | 6,100 | 236 | 80 | 25.847458 | 0.786357 | 0.106885 | 0 | 0.156069 | 0 | 0 | 0.209028 | 0.041021 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.080925 | 0.040462 | null | null | 0.115607 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f4ab38ac0d5423a09cca0a2ccc135af8a2198949 | 809 | py | Python | utils/quad.py | comp-physics/PyQBMMlib | ab53f49bd797b147a75e38bd7b64217940b5af8b | [
"MIT"
] | 4 | 2021-09-27T07:57:37.000Z | 2022-03-26T17:29:53.000Z | PyCav/unused/quad.py | comp-physics/PyCav | d118f64bb318055f751f96f92a52295c3a4edbb5 | [
"MIT"
] | 2 | 2020-11-18T03:10:14.000Z | 2020-12-13T05:31:19.000Z | PyCav/unused/quad.py | comp-physics/PyCav | d118f64bb318055f751f96f92a52295c3a4edbb5 | [
"MIT"
] | 2 | 2020-11-09T22:05:13.000Z | 2021-09-17T19:00:19.000Z | import math
import numpy as np
def quadrature_1d(weights, abscissas, moment_index):
xi_to_idx = abscissas ** moment_index
q = np.dot(weights, xi_to_idx)
return q
def quadrature_2d(weights, abscissas, moment_index, num_quadrature_nodes):
q = 0.0
for i in range(num_quadrature_nodes):
q += (
weights[i]
* (abscissas[0][i] ** moment_index[0])
* (abscissas[1][i] ** moment_index[1])
)
return q
def quadrature_3d(weights, abscissas, moment_index, num_quadrature_nodes):
q = 0.0
for i in range(num_quadrature_nodes):
q += (
weights[i]
* abscissas[0, i] ** moment_index[0]
* abscissas[1, i] ** moment_index[1]
* abscissas[2, i] ** moment_index[2]
)
return q
| 25.28125 | 74 | 0.588381 | 108 | 809 | 4.185185 | 0.268519 | 0.219027 | 0.132743 | 0.168142 | 0.584071 | 0.584071 | 0.584071 | 0.584071 | 0.584071 | 0.584071 | 0 | 0.029877 | 0.296663 | 809 | 31 | 75 | 26.096774 | 0.764499 | 0 | 0 | 0.44 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12 | false | 0 | 0.08 | 0 | 0.32 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f4aeaf181496d5324218c96bec0a06941cb33a68 | 3,345 | py | Python | test_algorithm.py | tykkz/hasherapp | 6ba44b0714021e405add5f326e777268ce135a04 | [
"MIT"
] | null | null | null | test_algorithm.py | tykkz/hasherapp | 6ba44b0714021e405add5f326e777268ce135a04 | [
"MIT"
] | 1 | 2020-11-03T19:30:39.000Z | 2020-11-03T19:30:39.000Z | test_algorithm.py | tykkz/hasherapp | 6ba44b0714021e405add5f326e777268ce135a04 | [
"MIT"
] | null | null | null | import unittest
from algorithm import hash_text
class TestHashText(unittest.TestCase):
def setUp(self):
self.avail_func_array = ['md5', 'sha1', 'sha256']
self.avail_func_array2 = ['sha256', 'sha224']
self.text = "hello"
self.hello_pass_1 = {'sha1': 'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d',
'sha256': '2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824',
'md5': '5d41402abc4b2a76b9719d911017c592'}
self.hello_pass_100 = {'sha1': '3005a7cebc1edda34127c87c7bc79074e113015d',
'sha256': '0c7e20c4751943a4cf4ec6a2d17a520c8aaf1d2a714e2af1ad6667fd06c056de',
'md5': '3b8ca2de573942e58426494e9fcf8766'}
def test_successful(self):
self.assertEqual(self.hello_pass_1, hash_text(self.avail_func_array, self.text, 1)[1],
"Correct hash values for one (1) pass of given hash algorithms")
self.assertCountEqual(self.hello_pass_100, hash_text(self.avail_func_array, self.text, 100)[1],
"Correct hash values for hundred (100) pass of given hash algorithms")
def test_empty_function_array(self):
self.assertFalse(hash_text([], self.text, 1)[0], "One should not use empty array for the hash algorithm list.")
def test_none_function_array_type(self):
self.assertFalse(hash_text(None, self.text, 1)[0], "Incorrect type for hash algorithms list.")
def test_diferent_function_array(self):
self.assertNotEqual(self.hello_pass_1, hash_text(self.avail_func_array2, self.text, 1),
"Incorrect list of results, different hash algorithms used.")
def test_none_text_type(self):
self.assertFalse(hash_text(self.avail_func_array, None, 1)[0], "Incorrect type for text to be hashed.")
def test_incorrect_text_type(self):
self.assertFalse(hash_text(self.avail_func_array, 9999, 1)[0],
"Incorrect type for the text parameter; should be of string type.")
def test_none_pass_count_type(self):
self.assertFalse(hash_text(self.avail_func_array, self.text, None)[0], "Incorrect type for pass count.")
def test_different_pass_count(self):
self.assertNotEqual(self.hello_pass_1, hash_text(self.avail_func_array, self.text, 9999),
"Incorrect list of results, different hash algorithms used.")
def test_incorrect_pass_count_type(self):
self.assertFalse(hash_text(self.avail_func_array, self.text, self.text)[0],
"Incorrect type for the pass count parameter; should be of integer type.")
def test_pass_count_value(self):
self.assertFalse(hash_text(self.avail_func_array, self.text, 0)[0],
"Incorrect value for the pass count parameter; should be larger than zero.")
self.assertFalse(hash_text(self.avail_func_array, self.text, -9999)[0],
"Incorrect value for the pass count parameter; should be larger than zero.")
self.assertFalse(hash_text(self.avail_func_array, self.text, 99999999)[0],
"Incorrect value for the pass count parameter; should be smaller than 1000000.")
if __name__ == '__main__':
unittest.main()
| 53.951613 | 119 | 0.666966 | 404 | 3,345 | 5.29703 | 0.190594 | 0.052336 | 0.078972 | 0.092523 | 0.534579 | 0.458411 | 0.429439 | 0.414486 | 0.398598 | 0.39486 | 0 | 0.097609 | 0.237369 | 3,345 | 61 | 120 | 54.836066 | 0.741278 | 0 | 0 | 0.085106 | 0 | 0 | 0.330045 | 0.081315 | 0 | 0 | 0 | 0 | 0.276596 | 1 | 0.234043 | false | 0.361702 | 0.042553 | 0 | 0.297872 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f4bb0cf77323eccc4671b98f9bb87e3bba54bed2 | 827 | py | Python | class3.py | itsforbasu/Python_class_files | 05890fddb7287e8b558dc4931fa725367dbf9ad5 | [
"MIT"
] | 1 | 2019-03-04T08:50:53.000Z | 2019-03-04T08:50:53.000Z | class3.py | itsforbasu/Python_class_files | 05890fddb7287e8b558dc4931fa725367dbf9ad5 | [
"MIT"
] | null | null | null | class3.py | itsforbasu/Python_class_files | 05890fddb7287e8b558dc4931fa725367dbf9ad5 | [
"MIT"
] | 2 | 2018-10-17T15:10:24.000Z | 2020-06-27T04:00:02.000Z | # print(range(5))
# a = range(1,10,5)
# for x in a:
# print(x)
# num =1
# rem = num%2
# if rem==0:
# print('The number is even')
# else:
# print('The number is odd')
# print("I am not inside if statement")
# num = -2
# if num>0:
# print("Number is positive")
# if (num % 2) == 0:
# print('Number is even')
# else:
# print('Number is odd')
# elif num<0:
# print("Number is negative")
# else:
# print("Number is zero")
# var = False
# if var:
# print('Hello')
# amount = int(input("Enter amount: "))
# if amount >= 1000:
# discount = amount*0.10
# print ("Discount",discount)
# else:
# discount = amount*0.05
# print ("Discount",discount)
# print ("Net payable:",amount-discount)
var = 100
if(var == 100): print ("This is 100")
| 18.795455 | 40 | 0.53688 | 117 | 827 | 3.794872 | 0.367521 | 0.126126 | 0.146396 | 0.094595 | 0.171171 | 0 | 0 | 0 | 0 | 0 | 0 | 0.053963 | 0.28295 | 827 | 44 | 41 | 18.795455 | 0.694772 | 0.818622 | 0 | 0 | 0 | 0 | 0.094017 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
f4ce3a8bfc78005eb207f76ea8594a1f71e6b623 | 2,439 | py | Python | parsons/controlshift/controlshift.py | cmc333333/parsons | 50804a3627117797570f1e9233c9bbad583f7831 | [
"Apache-2.0"
] | 3 | 2019-09-05T16:57:15.000Z | 2019-10-01T19:56:58.000Z | parsons/controlshift/controlshift.py | cmc333333/parsons | 50804a3627117797570f1e9233c9bbad583f7831 | [
"Apache-2.0"
] | 22 | 2019-09-03T13:23:37.000Z | 2019-10-03T20:32:48.000Z | parsons/controlshift/controlshift.py | cmc333333/parsons | 50804a3627117797570f1e9233c9bbad583f7831 | [
"Apache-2.0"
] | 2 | 2019-09-01T18:30:10.000Z | 2019-10-03T20:07:46.000Z | from parsons.etl.table import Table
from parsons.utilities import check_env
from parsons.utilities.oauth_api_connector import OAuth2APIConnector
class Controlshift(object):
"""
Instantiate the Controlshift class. Requires an API Application integration.
For more info on setup, see:
https://developers.controlshiftlabs.com/#authenticated-rest-api-quickstart-guide
`Args:`
hostname: str
The URL for the homepage/login page of the organization's Controlshift
instance (e.g. demo.controlshift.app). Not required if
``CONTROLSHIFT_HOSTNAME`` env variable is set.
client_id: str
The Client ID for your REST API Application. Not required if
``CONTROLSHIFT_CLIENT_ID`` env variable is set.
client_secret: str
The Client Secret for your REST API Application. Not required if
``CONTROLSHIFT_CLIENT_SECRET`` env variable is set.
`Returns:`
Controlshift Class
"""
def __init__(self, hostname=None, client_id=None, client_secret=None):
self.hostname = check_env.check('CONTROLSHIFT_HOSTNAME', hostname)
token_url = f'{self.hostname}/oauth/token'
self.client = OAuth2APIConnector(
self.hostname,
client_id=check_env.check('CONTROLSHIFT_CLIENT_ID', client_id),
client_secret=check_env.check('CONTROLSHIFT_CLIENT_SECRET', client_secret),
token_url=token_url,
auto_refresh_url=token_url
)
def get_petitions(self) -> Table:
"""
Get a full list of all petitions, including ones that are unlaunched or otherwise not
visible to the public.
`Return:`
Table Class
"""
next_page = 1
petitions = []
while next_page:
response = self.client.get_request(
f'{self.hostname}/api/v1/petitions', {'page': next_page})
next_page = response['meta']['next_page']
petitions.extend(response['petitions'])
return Table(petitions)
def get_signatures(self):
pass
def get_members(self):
pass
def get_partnerships(self):
pass
def get_events(self):
pass
def get_attendees(self):
pass
def get_calendars(self):
pass
def get_local_groups(self):
pass
| 31.675325 | 93 | 0.624846 | 279 | 2,439 | 5.286738 | 0.369176 | 0.032542 | 0.044746 | 0.056949 | 0.147797 | 0.075932 | 0.075932 | 0.075932 | 0.075932 | 0.075932 | 0 | 0.00233 | 0.296023 | 2,439 | 76 | 94 | 32.092105 | 0.856727 | 0.360804 | 0 | 0.189189 | 0 | 0 | 0.107242 | 0.089136 | 0 | 0 | 0 | 0 | 0 | 1 | 0.243243 | false | 0.189189 | 0.081081 | 0 | 0.378378 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f4d99c61b6c0161e836ae357ed392385bb80625b | 365 | py | Python | output/models/ms_data/particles/particles_jj004_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/ms_data/particles/particles_jj004_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/ms_data/particles/particles_jj004_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.ms_data.particles.particles_jj004_xsd.particles_jj004 import (
B as B,
R,
Doc,
)
from output.models.ms_data.particles.particles_jj004_xsd.particles_jj004_imp import (
B as ImpB,
ExtRefType,
ImpElem1,
ImpElem2,
)
__all__ = [
"B",
"R",
"Doc",
"ImpB",
"ExtRefType",
"ImpElem1",
"ImpElem2",
]
| 16.590909 | 85 | 0.632877 | 44 | 365 | 4.954545 | 0.431818 | 0.256881 | 0.146789 | 0.165138 | 0.568807 | 0.568807 | 0.568807 | 0.568807 | 0.568807 | 0.568807 | 0 | 0.057971 | 0.243836 | 365 | 21 | 86 | 17.380952 | 0.731884 | 0 | 0 | 0 | 0 | 0 | 0.09589 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f4ef53015e7ac8df146831a06f0b38895ccaf08d | 804 | py | Python | JSS/CustomCallbacks.py | Sa3doun13/PADL-2022 | a0581e07179f7d28617275ac8134fd03d1e69f4b | [
"MIT"
] | 29 | 2021-07-19T02:42:08.000Z | 2022-03-29T07:17:45.000Z | JSS/CustomCallbacks.py | Sa3doun13/PADL-2022 | a0581e07179f7d28617275ac8134fd03d1e69f4b | [
"MIT"
] | 15 | 2021-02-08T17:11:53.000Z | 2021-06-18T09:00:30.000Z | JSS/CustomCallbacks.py | Sa3doun13/PADL-2022 | a0581e07179f7d28617275ac8134fd03d1e69f4b | [
"MIT"
] | 18 | 2021-07-15T03:29:22.000Z | 2022-03-29T06:58:12.000Z | from typing import Dict
from ray.rllib.agents.callbacks import DefaultCallbacks
from ray.rllib.env import BaseEnv
from ray.rllib.evaluation import MultiAgentEpisode, RolloutWorker
from ray.rllib.policy import Policy
from ray.rllib.utils.typing import PolicyID
class CustomCallbacks(DefaultCallbacks):
def __init__(self, legacy_callbacks_dict: Dict[str, callable] = None):
super(CustomCallbacks, self).__init__(legacy_callbacks_dict)
def on_episode_end(self, worker: "RolloutWorker", base_env: BaseEnv,
policies: Dict[PolicyID, Policy],
episode: MultiAgentEpisode, **kwargs):
env = base_env.get_unwrapped()[0]
if env.last_time_step != float('inf'):
episode.custom_metrics['make_span'] = env.last_time_step
| 38.285714 | 74 | 0.716418 | 96 | 804 | 5.760417 | 0.489583 | 0.063291 | 0.108499 | 0.05425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001546 | 0.195274 | 804 | 20 | 75 | 40.2 | 0.853168 | 0 | 0 | 0 | 0 | 0 | 0.031133 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.4 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
f4fee38270658ece767acc57d0141477a47c6026 | 4,264 | py | Python | schema_org/models.py | jermnelson/aristotle-library-apps | f742847cd20c5b5c3b46dd53dfc395a2e1caa240 | [
"Apache-2.0"
] | 2 | 2015-03-30T16:36:51.000Z | 2016-06-15T01:39:47.000Z | schema_org/models.py | jermnelson/aristotle-library-apps | f742847cd20c5b5c3b46dd53dfc395a2e1caa240 | [
"Apache-2.0"
] | 2 | 2021-06-10T17:43:54.000Z | 2021-12-13T19:40:08.000Z | schema_org/models.py | jermnelson/aristotle-library-apps | f742847cd20c5b5c3b46dd53dfc395a2e1caa240 | [
"Apache-2.0"
] | 1 | 2015-11-08T00:40:11.000Z | 2015-11-08T00:40:11.000Z | __author__ = "Jeremy Nelson"
import json, os
from stdnet import odm
from aristotle.settings import PROJECT_HOME
from aristotle.settings import ANNOTATION_REDIS, AUTHORITY_REDIS, CREATIVE_WORK_REDIS, INSTANCE_REDIS
SCHEMA_RDFS = json.load(open(os.path.join(PROJECT_HOME,
'schema_org',
'fixures',
'all.json'),
'rb'))
class Thing(odm.StdModel):
"""
Schema.org Thing Base Model available at http://schema.org/Thing
"""
additionalType = odm.ForeignField()
description = odm.CharField()
image = odm.CharField()
name = odm.SymbolField()
url = odm.SymbolField()
def __unicode__(self):
return self.name
class Meta:
abstract = True
class CreativeWork(Thing):
"""
Schema.org Creative Work Model available at http://schema.org/CreativeWork
"""
about = odm.ForeignField()
accountablePerson = odm.ForeignField('Person')
aggregateRating = odm.SymbolField()
alternativeHeadline = odm.SymbolField()
associatedMedia = odm.SymbolField()
audience = odm.SymbolField()
audio = odm.CharField()
author = odm.ManyToManyField()
award = odm.ListField()
comment = odm.ManyToManyField('UserComments')
contentLocation = odm.ForeignField('Place')
contentRating = odm.SymbolField()
contributor = odm.ManyToManyField()
copyrightHolder = odm.ForeignField()
copyrightYear = odm.DateField()
creator = odm.ManyToManyField()
dateCreated = odm.SymbolField()
dateModified = odm.SymbolField()
datePublished = odm.SymbolField()
discussionUrl = odm.SymbolField()
editor = odm.ForeignField('Person')
encoding = odm.ForeignField('MediaObject')
genre = odm.SymbolField()
headline = odm.CharField()
inLanguage = odm.SymbolField()
interactionCount = odm.SymbolField()
isFamilyFriendly = odm.BooleanField()
keywords = odm.SetField()
mentions = odm.ManyToManyField()
offers = odm.ManyToManyField('Offer')
provider = odm.ManyToManyField()
publisher = odm.ManyToManyField()
publishingPrinciples = odm.CharField()
review = odm.SymbolField('Review')
sourceOrganization = odm.ForeignField('Organization')
text = odm.CharField()
thumbnailUrl = odm.CharField()
version = odm.FloatField()
video = odm.ForeignField('VideoObject')
class Person(Base):
additionalType = odm.SymbolField()
description = odm.SymbolField()
image = odm.SymbolField()
name = odm.SymbolField()
url = odm.SymbolField()
additionalName = odm.SymbolField()
address = odm.SymbolField()
affiliation = odm.SymbolField()
alumniOf = odm.SymbolField()
award = odm.SymbolField()
awards = odm.SymbolField()
birthDate = odm.SymbolField()
brand = odm.SymbolField()
children = odm.SymbolField()
colleague = odm.SymbolField()
colleagues = odm.SymbolField()
contactPoint = odm.SymbolField()
contactPoints = odm.SymbolField()
deathDate = odm.SymbolField()
duns = odm.SymbolField()
email = odm.SymbolField()
familyName = odm.SymbolField()
faxNumber = odm.SymbolField()
follows = odm.SymbolField()
gender = odm.SymbolField()
givenName = odm.SymbolField()
globalLocationNumber = odm.SymbolField()
hasPOS = odm.SymbolField()
homeLocation = odm.SymbolField()
honorificPrefix = odm.SymbolField()
honorificSuffix = odm.SymbolField()
interactionCount = odm.SymbolField()
isicV4 = odm.SymbolField()
jobTitle = odm.SymbolField()
knows = odm.SymbolField()
makesOffer = odm.SymbolField()
memberOf = odm.SymbolField()
naics = odm.SymbolField()
nationality = odm.SymbolField()
owns = odm.SymbolField()
parent = odm.SymbolField()
parents = odm.SymbolField()
performerIn = odm.SymbolField()
relatedTo = odm.SymbolField()
seeks = odm.SymbolField()
sibling = odm.SymbolField()
siblings = odm.SymbolField()
spouse = odm.SymbolField()
taxID = odm.SymbolField()
telephone = odm.SymbolField()
vatID = odm.SymbolField()
workLocation = odm.SymbolField()
worksFor = odm.SymbolField()
| 32.8 | 101 | 0.662523 | 387 | 4,264 | 7.255814 | 0.387597 | 0.339031 | 0.014957 | 0.019231 | 0.076923 | 0.045584 | 0 | 0 | 0 | 0 | 0 | 0.000303 | 0.225375 | 4,264 | 129 | 102 | 33.054264 | 0.849833 | 0.032599 | 0 | 0.052632 | 0 | 0 | 0.027846 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008772 | false | 0 | 0.035088 | 0.008772 | 0.938596 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
760464bc93238c76444c656b97568d6883bcc5fe | 2,885 | py | Python | tests/index_jsonfile_test.py | Stidsty/dismantle | 26fb8fe7ba97349a67498715bb47a19329b1a4c7 | [
"Apache-2.0"
] | null | null | null | tests/index_jsonfile_test.py | Stidsty/dismantle | 26fb8fe7ba97349a67498715bb47a19329b1a4c7 | [
"Apache-2.0"
] | null | null | null | tests/index_jsonfile_test.py | Stidsty/dismantle | 26fb8fe7ba97349a67498715bb47a19329b1a4c7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Gary Stidston-Broadbent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from json import JSONDecodeError
import pytest
from dismantle.index import IndexHandler, JsonFileIndexHandler
def test_notfound(datadir):
with pytest.raises(FileNotFoundError):
JsonFileIndexHandler(datadir.join('index_notfound.json'))
def test_blank(datadir):
with pytest.raises(JSONDecodeError):
JsonFileIndexHandler(datadir.join('index_blank.json'))
def test_empty(datadir):
index = JsonFileIndexHandler(datadir.join('index_empty.json'))
assert len(index) == 0
def test_broken(datadir):
with pytest.raises(JSONDecodeError):
JsonFileIndexHandler(datadir.join('index_broken.json'))
def test_populated(datadir):
index = JsonFileIndexHandler(datadir.join('index_populated.json'))
assert isinstance(index, IndexHandler) is True
def test_rfc8089(datadir):
path = 'file://' + str(datadir.join('index_populated.json'))
index = JsonFileIndexHandler(path)
assert isinstance(index, IndexHandler) is True
def test_outdated(datadir):
index = JsonFileIndexHandler(datadir.join('index_populated.json'))
assert index.outdated is False
def test_update(datadir):
index = JsonFileIndexHandler(datadir.join('index_populated.json'))
assert index.update() is True
def test_length(datadir):
index = JsonFileIndexHandler(datadir.join('index_populated.json'))
assert len(index) == 6
assert len(index.find('@scope-one/package-one')) == 1
assert len(index.find('package-one')) == 3
assert len(index.find('package-two')) == 2
assert len(index.find('package-three')) == 1
assert len(index.find('@scope-one')) == 3
assert len(index.find('@scope-two')) == 2
assert len(index.find('@scope-three')) == 1
def test_populated_package_exists(datadir):
index = JsonFileIndexHandler(datadir.join('index_populated.json'))
package = index["@scope-one/package-one"]
assert package["name"] == "@scope-one/package-one"
assert package["version"] == "0.1.0"
assert package["path"] == "@scope-one/package-one"
def test_populated_package_nonexistant(datadir):
index = JsonFileIndexHandler(datadir.join('index_populated.json'))
with pytest.raises(KeyError):
index["@scope-four/package-one"]
| 33.941176 | 79 | 0.710572 | 357 | 2,885 | 5.669468 | 0.305322 | 0.038043 | 0.086957 | 0.177866 | 0.474802 | 0.435277 | 0.313241 | 0.313241 | 0.210474 | 0.071146 | 0 | 0.009728 | 0.144887 | 2,885 | 84 | 80 | 34.345238 | 0.810701 | 0.221144 | 0 | 0.208333 | 0 | 0 | 0.185036 | 0.049731 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.229167 | false | 0 | 0.0625 | 0 | 0.291667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
76065c48cf54d50698fe18038c4e3fed59bd2812 | 1,549 | py | Python | 07-for-loop.py | deepaksamuel/python-tutorials | 9e08c7b589e6b00c2c1781ad09fc76944c2f7c87 | [
"MIT"
] | 1 | 2022-02-20T17:31:09.000Z | 2022-02-20T17:31:09.000Z | 07-for-loop.py | deepaksamuel/python-tutorials | 9e08c7b589e6b00c2c1781ad09fc76944c2f7c87 | [
"MIT"
] | null | null | null | 07-for-loop.py | deepaksamuel/python-tutorials | 9e08c7b589e6b00c2c1781ad09fc76944c2f7c87 | [
"MIT"
] | 1 | 2021-02-09T14:41:56.000Z | 2021-02-09T14:41:56.000Z | # in this tutorial, you will learn how to use for loop statement in python
import numpy as np
# Aim: We want to print "Hello" 10 times:
# np.arange creates a sequence from 0-9.
# in each loop i is given a number in the sequence (in order)
# the ":" is the beginning of the loop"
# The moment you press enter after the ":", a tab space (indent) is created for you in Spyder (and most python IDEs)
# All statments with an indent are considered to be a part of that loop.
# One should be careful not to create/delete extra spaces in the beginning of such statements. This will lead to problems. The indent space plays the role similar to that of curly brackets in C/C++
# To come out of the loop, at the end of the last statement of the loop, press shift + tab
for i in np.arange(0,10):
print("{0} Hello".format(i)) # observe the space before this statement (Don't delete it). Its called a indent which is auto created when you press enter after pressing the : characater. You can also press the tab button to create one.
print("loop over")
# Assignment
# write a python code using loops to print out series like:
# 1,1
# 1,2
# 1,4
# 1,5
# 2,1
# 2,2
# 2,3
# 2,4
# 2,5
# Hint: You will require a loop inside a loop. The second for loop statement must be singe indented and the content of the second for loop must be double indented.
# In case, you could not do it, the answer is given below.
#
#
#
#
#
#
#
# for i in np.arange(1,3):
# for j in np.arange(1,6):
# print("{0},{1}".format(i,j))
# print("loop over")
| 33.673913 | 239 | 0.696578 | 290 | 1,549 | 3.72069 | 0.448276 | 0.02317 | 0.025023 | 0.033364 | 0.02595 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026381 | 0.216914 | 1,549 | 45 | 240 | 34.422222 | 0.863149 | 0.886378 | 0 | 0 | 0 | 0 | 0.129496 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
760cca81cf5a888224f8ca8f9ef1f4746f77c966 | 215 | py | Python | app.py | VerdantFox/flask_authomatic_example | 4aaa35bfb9b16675b561541151019b7528a25757 | [
"MIT"
] | 1 | 2020-06-10T22:46:07.000Z | 2020-06-10T22:46:07.000Z | app.py | VerdantFox/flask_authomatic_example | 4aaa35bfb9b16675b561541151019b7528a25757 | [
"MIT"
] | null | null | null | app.py | VerdantFox/flask_authomatic_example | 4aaa35bfb9b16675b561541151019b7528a25757 | [
"MIT"
] | 1 | 2022-02-06T10:58:56.000Z | 2022-02-06T10:58:56.000Z | """This is the main file called to run the flask application"""
from dotenv import load_dotenv
from root.factory import create_app
if __name__ == "__main__":
load_dotenv()
app = create_app()
app.run()
| 21.5 | 63 | 0.711628 | 32 | 215 | 4.40625 | 0.625 | 0.141844 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.195349 | 215 | 9 | 64 | 23.888889 | 0.815029 | 0.265116 | 0 | 0 | 0 | 0 | 0.052632 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
7616c352204219b85bd25e7b51be85319a9dbf88 | 159 | py | Python | stringexercicolista1.1.py | CarolShiny/Estudo-Python | 3b9f791e7034abd83589d02e40695f1d972c045d | [
"MIT"
] | null | null | null | stringexercicolista1.1.py | CarolShiny/Estudo-Python | 3b9f791e7034abd83589d02e40695f1d972c045d | [
"MIT"
] | null | null | null | stringexercicolista1.1.py | CarolShiny/Estudo-Python | 3b9f791e7034abd83589d02e40695f1d972c045d | [
"MIT"
] | null | null | null | import stringExercicio
#questão 6: programa
frase = input("Digite uma frase:") .lower()
resultado = stringExercicio.quantPontuacao(frase)
print(resultado)
| 15.9 | 49 | 0.773585 | 17 | 159 | 7.235294 | 0.764706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007143 | 0.119497 | 159 | 9 | 50 | 17.666667 | 0.871429 | 0.119497 | 0 | 0 | 0 | 0 | 0.124088 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
762107436d6dd73e94bc0688afb514afbd72703e | 680 | py | Python | setup.py | tikazyq/google-analytics-api-wrapper | 91ffc1d6b4cb810aea20541153d64730873903b9 | [
"MIT"
] | 6 | 2015-03-11T01:38:54.000Z | 2021-07-09T21:40:12.000Z | setup.py | QPC-database/google-analytics-api-wrapper | 91ffc1d6b4cb810aea20541153d64730873903b9 | [
"MIT"
] | null | null | null | setup.py | QPC-database/google-analytics-api-wrapper | 91ffc1d6b4cb810aea20541153d64730873903b9 | [
"MIT"
] | 1 | 2021-07-09T21:40:13.000Z | 2021-07-09T21:40:13.000Z | from distutils.core import setup
setup(
name='google-analytics-api-wrapper',
version='0.1.4',
packages=['analytics_query'],
url='https://github.com/tikazyq/google-analytics-api-wrapper',
download_url='https://github.com/tikazyq/google-analytics-api-wrapper/tarball/master',
license='http://opensource.org/licenses/MIT',
author='Yeqing Zhang',
author_email='tikazyq@gmail.com',
description='The Goolge Analytics wrapper is a convenient tool to extract data from GA via API. It is especially '
'useful when the user has many GA profiles / web properties.',
keywords=['google-analytics', 'ga', 'pandas', 'dataframe', 'api']
)
| 42.5 | 118 | 0.7 | 89 | 680 | 5.314607 | 0.674157 | 0.12685 | 0.114165 | 0.158562 | 0.207188 | 0.207188 | 0.207188 | 0.207188 | 0.207188 | 0 | 0 | 0.005236 | 0.157353 | 680 | 15 | 119 | 45.333333 | 0.820244 | 0 | 0 | 0 | 0 | 0 | 0.633824 | 0.041176 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.071429 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
762bd538244a3b2885df930cc12fdb20bebc5149 | 1,415 | py | Python | hippy/objects/boolobject.py | shendel/hippyvm | 26cc6675612e4ddc4d1b425d2731d34c97355c45 | [
"MIT"
] | 1 | 2021-07-28T01:10:41.000Z | 2021-07-28T01:10:41.000Z | hippy/objects/boolobject.py | shendel/hippyvm | 26cc6675612e4ddc4d1b425d2731d34c97355c45 | [
"MIT"
] | null | null | null | hippy/objects/boolobject.py | shendel/hippyvm | 26cc6675612e4ddc4d1b425d2731d34c97355c45 | [
"MIT"
] | null | null | null |
from hippy.objects.base import W_Object
class W_BoolObject(W_Object):
_immutable_fields_ = ['boolval']
supports_arithmetics = True
def __init__(self, boolval):
self.boolval = boolval
def is_true(self, space):
return self.boolval
def str(self, space, quiet=False):
if self.boolval:
return '1'
return ''
def repr(self):
return 'true' if self.boolval else 'false'
def as_number(self, space):
return space.newint(int(self.boolval))
def int_w(self, space):
return int(self.boolval)
def __repr__(self):
return 'W_BoolObject(%s)' % self.boolval
def var_dump(self, space, indent, recursion):
if self.boolval:
s = '%sbool(true)\n' % indent
else:
s = '%sbool(false)\n' % indent
return s
def var_export(self, space, indent, recursion, suffix):
if self.boolval:
s = '%strue%s' % (indent, suffix)
else:
s = '%sfalse%s' % (indent, suffix)
return s
def is_empty_value(self):
return not self.boolval
def overflow_convert(self, space):
return self
def eval_static(self, space):
return self
def serialize(self, space, builder, memo):
builder.append("b:%d;" % self.boolval)
return True
w_True = W_BoolObject(True)
w_False = W_BoolObject(False)
| 22.822581 | 59 | 0.592226 | 178 | 1,415 | 4.544944 | 0.314607 | 0.163164 | 0.092707 | 0.070457 | 0.054388 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001006 | 0.297527 | 1,415 | 61 | 60 | 23.196721 | 0.812877 | 0 | 0 | 0.209302 | 0 | 0 | 0.059406 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.302326 | false | 0 | 0.023256 | 0.186047 | 0.697674 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
520dd3fd29330a1573443514868205af963608f5 | 1,425 | py | Python | day8/task1.py | kohoutekkk/advent_of_code_2019 | 84ec02616e1f52a63281fb7333ffbaeaeacdc15e | [
"MIT"
] | null | null | null | day8/task1.py | kohoutekkk/advent_of_code_2019 | 84ec02616e1f52a63281fb7333ffbaeaeacdc15e | [
"MIT"
] | null | null | null | day8/task1.py | kohoutekkk/advent_of_code_2019 | 84ec02616e1f52a63281fb7333ffbaeaeacdc15e | [
"MIT"
] | null | null | null |
op_str = 'acc +7'
def parse_operation(operation_str):
op_list = operation_str.strip().split()
operation = op_list[0]
op_sign = op_list[1][0]
steps = op_list[1][1:]
return {'operation': operation, 'sign': op_sign, 'steps': int(steps), 'visited': False}
#print(parse_operation(op_str))
#operations_file = open('data/test_operations.txt')
operations_file = open('data/operations.txt')
operations_dicts = [parse_operation(item) for item in operations_file.readlines()]
operations_file.close()
#print(operations_dicts[0])
accumulator = 0
location = 0
cycle = False
while 1 == 1:
if operations_dicts[location]['visited']:
break
operations_dicts[location]['visited'] = True
if operations_dicts[location]['operation'] == 'acc':
if operations_dicts[location]['sign'] == '+':
accumulator += operations_dicts[location]['steps']
elif operations_dicts[location]['sign'] == '-':
accumulator -= operations_dicts[location]['steps']
location += 1
elif operations_dicts[location]['operation'] == 'nop':
location += 1
elif operations_dicts[location]['operation'] == 'jmp':
if operations_dicts[location]['sign'] == '+':
location += operations_dicts[location]['steps']
elif operations_dicts[location]['sign'] == '-':
location -= operations_dicts[location]['steps']
print(accumulator)
| 26.886792 | 91 | 0.654035 | 161 | 1,425 | 5.583851 | 0.254658 | 0.250278 | 0.332592 | 0.111235 | 0.400445 | 0.395996 | 0.395996 | 0.295884 | 0.295884 | 0 | 0 | 0.011265 | 0.190175 | 1,425 | 52 | 92 | 27.403846 | 0.767764 | 0.074386 | 0 | 0.193548 | 0 | 0 | 0.106789 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032258 | false | 0 | 0 | 0 | 0.064516 | 0.032258 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
521faf2d7c3f99ab052738f2fa0d81a2572dc62c | 228 | py | Python | components/espcoredump/corefile/soc_headers/esp32s2.py | lovyan03/esp-idf | cd5d30b56a13b8f0933e8879be1f97724a88004a | [
"Apache-2.0"
] | 8,747 | 2016-08-18T14:58:24.000Z | 2022-03-31T20:58:55.000Z | components/espcoredump/corefile/soc_headers/esp32s2.py | lovyan03/esp-idf | cd5d30b56a13b8f0933e8879be1f97724a88004a | [
"Apache-2.0"
] | 8,603 | 2016-08-20T08:55:56.000Z | 2022-03-31T23:04:01.000Z | components/espcoredump/corefile/soc_headers/esp32s2.py | lovyan03/esp-idf | cd5d30b56a13b8f0933e8879be1f97724a88004a | [
"Apache-2.0"
] | 6,380 | 2016-08-18T18:17:00.000Z | 2022-03-31T22:25:57.000Z | SOC_IRAM_LOW = 0x40020000
SOC_IRAM_HIGH = 0x40070000
SOC_DRAM_LOW = 0x3ffb0000
SOC_DRAM_HIGH = 0x40000000
SOC_RTC_DRAM_LOW = 0x3ff9e000
SOC_RTC_DRAM_HIGH = 0x3ffa0000
SOC_RTC_DATA_LOW = 0x50000000
SOC_RTC_DATA_HIGH = 0x50002000
| 25.333333 | 30 | 0.859649 | 36 | 228 | 4.888889 | 0.416667 | 0.136364 | 0.113636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.308824 | 0.105263 | 228 | 8 | 31 | 28.5 | 0.553922 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.350877 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5238806be3b3ab2fa0f62f2c7a279c42b1dcd8cb | 669 | py | Python | companies/models.py | MattYu/ConcordiaAce | 35eff7614652eb548e532dcf00e3a7296855285c | [
"MIT"
] | 1 | 2021-06-14T06:54:16.000Z | 2021-06-14T06:54:16.000Z | companies/models.py | MattYu/ConcordiaAce | 35eff7614652eb548e532dcf00e3a7296855285c | [
"MIT"
] | 34 | 2020-04-05T01:14:31.000Z | 2022-03-12T00:23:02.000Z | dockerizing-django/web/companies/models.py | MattYu/ConcordiaAce | 35eff7614652eb548e532dcf00e3a7296855285c | [
"MIT"
] | null | null | null | from django.db import models
from ace.constants import COMPANY_STATUS
# Create your models here.
class Company(models.Model):
name = models.CharField(max_length = 100, default= "")
address = models.CharField(max_length = 100, default= "")
website = models.CharField(max_length = 100, default= "")
profile = models.TextField(max_length = 1000, default= "")
image = models.ImageField(upload_to='images/company/', default="images/company/company-logo-1")
status = models.CharField(max_length = 20, default= "Pending", choices= COMPANY_STATUS)
is_approved = models.BooleanField(default=False)
def __str__(self):
return self.name | 41.8125 | 101 | 0.718984 | 83 | 669 | 5.638554 | 0.518072 | 0.096154 | 0.153846 | 0.205128 | 0.217949 | 0.217949 | 0 | 0 | 0 | 0 | 0 | 0.02847 | 0.15994 | 669 | 16 | 102 | 41.8125 | 0.80427 | 0.035874 | 0 | 0 | 0 | 0 | 0.079193 | 0.045031 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.166667 | 0.083333 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
523fa16f81bc2455855cae4b25cab7921ab0c142 | 14,438 | py | Python | template/protocol/x509.py | clayne/syringe-1 | 4a431aa65c371a2018fca95145a3952ba802a609 | [
"BSD-2-Clause"
] | 25 | 2015-04-14T21:53:46.000Z | 2022-03-30T19:15:24.000Z | template/protocol/x509.py | clayne/syringe-1 | 4a431aa65c371a2018fca95145a3952ba802a609 | [
"BSD-2-Clause"
] | 5 | 2020-03-23T20:19:59.000Z | 2021-05-24T19:38:31.000Z | template/protocol/x509.py | clayne/syringe-1 | 4a431aa65c371a2018fca95145a3952ba802a609 | [
"BSD-2-Clause"
] | 7 | 2015-07-31T13:26:37.000Z | 2021-03-05T19:35:37.000Z | import ptypes
from . import ber
from ptypes import *
Protocol = ber.Protocol.copy(recurse=True)
Universal = Protocol.lookup(ber.Universal.Class)
Context = Protocol.lookup(ber.Context.Class)
Application = Protocol.lookup(ber.Application.Class)
@Universal.define
class OBJECT_IDENTIFIER(ber.OBJECT_IDENTIFIER):
id_at = lambda name, oid: tuple('.'.join([item, value]) for item, value in zip(['joint-iso-itu-t.ds.attributeType', '2.5.4'], [name, "{:d}".format(oid)]))
_values_ = [
id_at('title', 12),
id_at('dnQualifier', 46),
id_at('countryName', 6),
id_at('serialNumber', 5),
id_at('pseudonym', 65),
id_at('name', 41),
id_at('surname', 4),
id_at('givenName', 42),
id_at('initials', 43),
id_at('generationQualifier', 44),
id_at('commonName', 3),
id_at('localityName', 7),
id_at('stateOrProvinceName', 8),
id_at('organizationName', 10),
id_at('organizationUnitName', 11),
id_at('title', 12),
id_at('dnQualifier', 46),
]
# PKCS #7 & #9
_values_ += [
('md5', '1.2.840.113549.2.5'),
('rsa', '1.3.14.3.2.1.1'),
('desMAC', '1.3.14.3.2.10'),
('rsaSignature', '1.3.14.3.2.11'),
('dsa', '1.3.14.3.2.12'),
('dsaWithSHA', '1.3.14.3.2.13'),
('mdc2WithRSASignature', '1.3.14.3.2.14'),
('shaWithRSASignature', '1.3.14.3.2.15'),
('dhWithCommonModulus', '1.3.14.3.2.16'),
('desEDE', '1.3.14.3.2.17'),
('sha', '1.3.14.3.2.18'),
('mdc-2', '1.3.14.3.2.19'),
('dsaCommon', '1.3.14.3.2.20'),
('dsaCommonWithSHA', '1.3.14.3.2.21'),
('rsaKeyTransport', '1.3.14.3.2.22'),
('keyed-hash-seal', '1.3.14.3.2.23'),
('md2WithRSASignature', '1.3.14.3.2.24'),
('md5WithRSASignature', '1.3.14.3.2.25'),
('sha1', '1.3.14.3.2.26'),
('dsaWithSHA1', '1.3.14.3.2.27'),
('dsaWithCommandSHA1', '1.3.14.3.2.28'),
('sha-1WithRSAEncryption', '1.3.14.3.2.29'),
('contentType', '1.2.840.113549.1.9.3'),
('messageDigest', '1.2.840.113549.1.9.4'),
('signingTime', '1.2.840.113549.1.9.5'),
('counterSignature', '1.2.840.113549.1.9.6'),
('challengePassword', '1.2.840.113549.1.9.7'),
('unstructuredAddress', '1.2.840.113549.1.9.8'),
('extendedCertificateAttributes', '1.2.840.113549.1.9.9'),
('rsaEncryption', '1.2.840.113549.1.1.1'),
('md2withRSAEncryption', '1.2.840.113549.1.1.2'),
('md4withRSAEncryption', '1.2.840.113549.1.1.3'),
('md5withRSAEncryption', '1.2.840.113549.1.1.4'),
('sha1withRSAEncryption', '1.2.840.113549.1.1.5'),
('rsaOAEPEncryptionSET', '1.2.840.113549.1.1.6'),
('dsa', '1.2.840.10040.4.1'),
('dsaWithSha1', '1.2.840.10040.4.3'),
('joint-iso-itu-t.ds.certificateExtension.authorityKeyIdentifier', (2,5,29,1)),
]
class AttributeType(OBJECT_IDENTIFIER):
pass
class AttributeValue(ber.PrintableString):
pass
class AttributeTypeAndValue(ber.SEQUENCE):
def __AttributeValue(self):
p = self.getparent(AttributeTypeAndValue)
res = p['type']['value']
return AttributeValue
__AttributeValue.type = ber.PrintableString.type
_fields_ = [
(AttributeType, 'type'),
(__AttributeValue, 'value'),
]
class RelativeDistinguishedName(ber.SET):
def _object_(self):
return dyn.clone(Packet, __object__=lambda self, _: AttributeTypeAndValue)
class RDNSequence(ber.SEQUENCE):
def _object_(self):
return dyn.clone(Packet, __object__=lambda self, _: RelativeDistinguishedName)
class Name(RDNSequence):
pass
class Version(pint.enum, ber.INTEGER):
_values_ = [
('v1', 0),
('v2', 1),
('v3', 2),
]
class Version_DEFAULT(ber.Constructed):
_fields_ = [
(Version, 'DEFAULT'),
]
class CertificateSerialNumber(ber.INTEGER):
pass
class Time(ber.UTCTime):
pass
class Validity(ber.SEQUENCE):
_fields_ = [
(Time, 'notBefore'),
(Time, 'notAfter'),
]
class UniqueIdentifier(ber.BIT_STRING):
pass
class AlgorithmIdentifier(ber.SEQUENCE):
def __parameters(self):
return ber.SEQUENCE
__parameters.type = ber.SEQUENCE.type
_fields_ = [
(OBJECT_IDENTIFIER, 'algorithm'),
(__parameters, 'parameters'),
]
class RSAPublicKey(ber.SEQUENCE):
_fields_ = [
(ber.INTEGER, 'modulus'),
(ber.INTEGER, 'publicExponent'),
]
class SubjectPublicKeyInfo(ber.SEQUENCE):
def __subjectPublicKey(self):
p = self.getparent(SubjectPublicKeyInfo)
res = p['algorithm']['value']
algorithm, parameters = (res[fld]['value'] if res.has(fld) else None for fld in ['algorithm', 'parameters'])
algoIdentifier = tuple(algorithm.identifier())
# FIXME: this won't always be an RSAPublicKey as it depends
# on the algorithm.
t = dyn.clone(Packet, __object__=lambda self, _: RSAPublicKey)
return dyn.clone(ber.BIT_STRING, _object_=t)
__subjectPublicKey.type = ber.BIT_STRING.type
_fields_ = [
(AlgorithmIdentifier, 'algorithm'),
#(ber.BIT_STRING, 'subjectPublicKey'),
(__subjectPublicKey, 'subjectPublicKey'),
]
class KeyIdentifier(ber.OCTET_STRING):
pass
@Application.define
class CountryName(ber.Constructed):
tag = 1
_fields_ = [
(ber.NumericString, 'x121-dcc-code'),
(ber.PrintableString, 'iso-3166-alpha2-code'),
]
@Application.define
class AdministrationDomainName(ber.Constructed):
tag = 2
_fields_ = [
(ber.NumericString, 'numeric'),
(ber.PrintableString, 'printable'),
]
class X121Address(ber.NumericString):
pass
class NetworkAddress(ber.Constructed):
_fields_ = [
(X121Address, 'address'),
]
class TerminalIdentifier(ber.PrintableString):
pass
class PrivateDomainName(ber.Constructed):
_fields_ = [
(ber.NumericString, 'numeric'),
(ber.PrintableString, 'printable'),
]
class OrganizationName(ber.PrintableString):
pass
class NumericUserIdentifier(ber.NumericString):
pass
class PersonalName(ber.SET):
_fields_ = [
(dyn.clone(ber.PrintableString, type=(Context, 0)), 'surname'),
(dyn.clone(ber.PrintableString, type=(Context, 1)), 'given-name'),
(dyn.clone(ber.PrintableString, type=(Context, 2)), 'initials'),
(dyn.clone(ber.PrintableString, type=(Context, 3)), 'generation-qualifier'),
]
class OrganizationalUnitName(ber.PrintableString):
pass
class OrganizationalUnitNames(ber.SEQUENCE):
def _object_(self):
return dyn.clone(Packet, __object__=lambda self, _: OrganizationalUnitName)
class BuiltInStandardAttributes(ber.SEQUENCE):
_fields_ = [
(CountryName, 'country-name'),
(AdministrationDomainName, 'administration-domain-name'),
(dyn.clone(NetworkAddress, type=(Context, 0)), 'network-address'),
(dyn.clone(TerminalIdentifier, type=(Context, 1)), 'terminal-identifier'),
(dyn.clone(PrivateDomainName, type=(Context, 2)), 'private-domain-name'),
(dyn.clone(OrganizationName, type=(Context, 3)), 'organization-name'),
(dyn.clone(NumericUserIdentifier, type=(Context, 4)), 'numeric-user-identifier'),
(dyn.clone(PersonalName, type=(Context, 5)), 'personal-name'),
(dyn.clone(OrganizationalUnitNames, type=(Context, 6)), 'organizational-unit-names'),
]
class BuiltInDomainDefinedAttribute(ber.SEQUENCE):
_fields_ = [
(ber.PrintableString, 'type'),
(ber.PrintableString, 'value'),
]
class BuiltInDomainDefinedAttributes(ber.SEQUENCE):
def _object_(self):
return dyn.clone(Packet, __object__=lambda self, _: BuiltInDomainDefineAttribute)
class ExtensionAttribute(ber.SEQUENCE):
def __extension_attribute_value(self):
p = self.getparent(ExtensionAttribute)
t = p['extension-attribute-type']['value']
return ber.Constructed
__extension_attribute_value.type = (Context, 1)
_fields_ = [
(dyn.clone(ber.INTEGER, type=(Context, 0)), 'extension-attribute-type'),
(__extension_attribute_value, 'extension-attribute-value'),
]
class ExtensionAttributes(ber.SET):
def _object_(self):
return dyn.clone(Packet, __object__=lambda self, _: ExtensionAttribute)
class ORAddress(ber.SEQUENCE):
_fields_ = [
(BuiltInStandardAttributes, 'built-in-standard-attributes'),
(BuiltInDomainDefinedAttributes, 'built-in-domain-defined-attributes'),
(ExtensionAttributes, 'extension-attributes'),
]
class TeletexString(ber.T61String):
pass
class DirectoryString(ber.Constructed):
_fields_ = [
(TeletexString, 'teletexString'),
(ber.PrintableString, 'printableString'),
(ber.UniversalString, 'universalString'),
(ber.UTF8String, 'utf8String'),
(ber.BMPString, 'bmpString'),
]
class EDIPartyName(ber.SEQUENCE):
_fields_ = [
(dyn.clone(DirectoryString, type=(Context, 0)), 'nameAssigner'),
(dyn.clone(DirectoryString, type=(Context, 1)), 'partyName'),
]
class AnotherName(ber.SEQUENCE):
def __value(self):
p = self.getparent(AnotherName)
t = p['type-id']['value']
return ber.Constructed
__value.type = (Context, 0)
_fields_ = [
(OBJECT_IDENTIFIER, 'type-id'),
(__value, 'value'),
]
class GeneralName(ber.Constructed):
_fields_ = [
(dyn.clone(AnotherName, type=(Context, 0)), 'otherName'),
(dyn.clone(ber.IA5String, type=(Context, 1)), 'rfc822Name'),
(dyn.clone(ber.IA5String, type=(Context, 2)), 'dNSName'),
(dyn.clone(ORAddress, type=(Context, 3)), 'x400Address'),
(dyn.clone(Name, type=(Context, 4)), 'directoryName'),
(dyn.clone(EDIPartyName, type=(Context, 5)), 'ediPartyName'),
(dyn.clone(ber.IA5String, type=(Context, 6)), 'uniformResourceIdentifier'),
(dyn.clone(ber.OCTET_STRING, type=(Context, 7)), 'iPAddress'),
(dyn.clone(OBJECT_IDENTIFIER, type=(Context, 8)), 'registeredID'),
]
class GeneralNames(ber.SEQUENCE):
def _object_(self):
return dyn.clone(Packet, __object__=lambda self, _: GeneralName)
class AuthorityKeyIdentifier(ber.SEQUENCE):
_fields_ = [
(dyn.clone(KeyIdentifier, type=(Context, 0)), 'keyIdentifier'),
(dyn.clone(GeneralNames, type=(Context, 1)), 'authorityCertIssuer'),
(dyn.clone(CertificateSerialNumber, type=(Context, 2)), 'authorityCertSerialNumber'),
]
class Extension(ber.SEQUENCE):
def __extnValue(self):
p = self.getparent(Extension)
extensionId = p['extnID']['value']
id = extensionId.identifier()
# This octet string is actually der-encoded, so we should
# return a packet type specifically for the x509 extensions.
# However, since that isn't implemented we'll just return a
# packet so that we can force its decoding.
return Packet
__extnValue.type = ber.OCTET_STRING.type
_fields_ = [
(OBJECT_IDENTIFIER, 'extnID'),
(ber.BOOLEAN, 'critical'),
(__extnValue, 'extnValue'),
]
class ExtensionList(ber.SEQUENCE):
def _object_(self):
return dyn.clone(Packet, __object__=lambda self, _: Extension)
class Extensions(ber.SEQUENCE):
_fields_ = [
(ExtensionList, 'items'),
]
class TBSCertificate(ber.SEQUENCE):
_fields_ = [
(dyn.clone(Version_DEFAULT, type=(Context, 0)), 'version'),
(CertificateSerialNumber, 'serialNumber'),
(AlgorithmIdentifier, 'signature'),
(Name, 'issuer'),
(Validity, 'validity'),
(Name, 'subject'),
(SubjectPublicKeyInfo, 'subjectPublicKeyInfo'),
(dyn.clone(UniqueIdentifier, type=(Context, 1)), 'issuerUniqueID'),
(dyn.clone(UniqueIdentifier, type=(Context, 2)), 'subjectUniqueID'),
(dyn.clone(Extensions, type=(Context, 3)), 'extensions'),
]
class Certificate(ber.SEQUENCE):
_fields_ = [
(TBSCertificate, 'tbsCertificate'),
(AlgorithmIdentifier, 'signatureAlgorithm'),
(ber.BIT_STRING, 'signatureValue'),
]
class Packet(ber.Packet):
Protocol = Protocol
def __object__(self, _):
return Certificate
if __name__ == '__main__':
import sys, operator, ptypes, protocol.x509 as x509
from ptypes import *
fromhex = operator.methodcaller('decode', 'hex') if sys.version_info.major < 3 else bytes.fromhex
data = '308202a13082020ea003020102021019af5d26ef02acb448ea8886a359af0a300906052b0e03021d0500301e311c301a0603550403131363686d757468752d77372d747374332d636133301e170d3130303231303032323830335a170d3339313233313233353935395a301a311830160603550403130f63686d757468752d77372d7473743330820122300d06092a864886f70d01010105000382010f003082010a0282010100a5583ba38c6a21642334d91657c7cc8f7deea7b2b453cb4bf95a5e537e069036a95ad11700e17cb46340af803b7bbff966fb2af57fddff47f94db6105b63ffaf6bb026fa2a317d4fa652cfaf06f787658f2f1316b38b02eb39c6caf4ca68502f89e23ba8c2fc5e56671fc0d8eb9bc65ae2148df5730ff66cd9f940d22bea4b0b5a17264baf264f34e48c875bf4110a8c1f80647798cc5c54c03bb2b3c534384ad335f48f94a45f39d69508ad7c88f69bbc7d161b3f8e9351b6ba90ac065c2a7f9cbf6da82ef22808cb1c0bca30e15df47d958ac2d726a4c6489c0363459c84940310ce4af43acff707025ca0d502f6ff63b3b94cf78307930b6f38d9d68c7de90203010001a368306630130603551d25040c300a06082b06010505070301304f0603551d010448304680103c8db6418a8b1b208f76cc07c6724d5ca120301e311c301a0603550403131363686d757468752d77372d747374332d6361338210db048065d808f69f48fa85880a505184300906052b0e03021d0500038181002ba86f466e4a180dec1445a021bcd261ea1b31a7cbd8363b9464dc4dac8d9fb40aaab1f78509f048b360c07188c8ae59f8f5be8b7f31da4a4a31b2c16c0cf9e57827b5f1c5b46a4b52c89d6cdde1475e7f00d87cd426b581f989272aefd876edfed253a6e61c8d5a5d1572ecb91a8f4e4f4eba82e66ee3e825410c21e6425751'
z = x509.Packet(source=ptypes.prov.bytes(fromhex(data)))
z=z.l
cert = z['value'][0]
print(cert['value']['version'])
x = z['value']['tbscertificate']['value']['extensions']['value']['items']['value']
k = z.at(0x99).getparent(ber.Element)
print(z['value']['tbsCertificate']['value']['issuer']['value'][0]['value'][0])
print(z['value']['tbsCertificate']['value']['issuer']['value'][0]['value'][0]['value']['type'])
| 36.737913 | 1,367 | 0.663665 | 1,412 | 14,438 | 6.643768 | 0.229462 | 0.033259 | 0.008954 | 0.011193 | 0.165334 | 0.119817 | 0.072913 | 0.072913 | 0.053939 | 0.053939 | 0 | 0.127065 | 0.186729 | 14,438 | 392 | 1,368 | 36.831633 | 0.671862 | 0.023895 | 0 | 0.185759 | 0 | 0 | 0.282346 | 0.126331 | 0 | 1 | 0.000284 | 0.002551 | 0 | 1 | 0.043344 | false | 0.043344 | 0.01548 | 0.027864 | 0.340557 | 0.018576 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
524fcf50fc6494b5d18340e9fd29179b1787171f | 1,073 | py | Python | js_locations/migrations/0007_auto_20190709_2309.py | compoundpartners/js-locations | 3c0f756b2cad3867a7eec30b3c707d69a2dbdbe2 | [
"BSD-3-Clause"
] | null | null | null | js_locations/migrations/0007_auto_20190709_2309.py | compoundpartners/js-locations | 3c0f756b2cad3867a7eec30b3c707d69a2dbdbe2 | [
"BSD-3-Clause"
] | null | null | null | js_locations/migrations/0007_auto_20190709_2309.py | compoundpartners/js-locations | 3c0f756b2cad3867a7eec30b3c707d69a2dbdbe2 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-07-09 23:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('js_locations', '0006_location_featured_image'),
]
operations = [
migrations.AddField(
model_name='location',
name='nofollow',
field=models.BooleanField(default=False, verbose_name='nofollow'),
),
migrations.AddField(
model_name='location',
name='noindex',
field=models.BooleanField(default=False, verbose_name='noindex'),
),
migrations.AddField(
model_name='location',
name='show_on_sitemap',
field=models.BooleanField(default=True, verbose_name='Show on sitemap'),
),
migrations.AddField(
model_name='location',
name='show_on_xml_sitemap',
field=models.BooleanField(default=True, verbose_name='Show on xml sitemap'),
),
]
| 29.805556 | 88 | 0.605778 | 108 | 1,073 | 5.814815 | 0.425926 | 0.11465 | 0.146497 | 0.171975 | 0.619427 | 0.598726 | 0.474522 | 0.328025 | 0.184713 | 0.184713 | 0 | 0.028461 | 0.27959 | 1,073 | 35 | 89 | 30.657143 | 0.783959 | 0.064306 | 0 | 0.428571 | 1 | 0 | 0.16983 | 0.027972 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.178571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
525e7e9d9138bcf85bfae7005527016132ee3722 | 230 | py | Python | setup.py | fbraza/dsti-adaltas-testing-todo | 1324c3d0f281c2de52ef7ee5d7352b42a0af41ff | [
"MIT"
] | null | null | null | setup.py | fbraza/dsti-adaltas-testing-todo | 1324c3d0f281c2de52ef7ee5d7352b42a0af41ff | [
"MIT"
] | null | null | null | setup.py | fbraza/dsti-adaltas-testing-todo | 1324c3d0f281c2de52ef7ee5d7352b42a0af41ff | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name="clitodo",
version="1.0",
py_modules=["todo"],
install_requires=["Click", "plyvel"],
entry_points="""
[console_scripts]
todo=clitodo:cli
"""
)
| 16.428571 | 41 | 0.573913 | 24 | 230 | 5.333333 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011834 | 0.265217 | 230 | 13 | 42 | 17.692308 | 0.745562 | 0 | 0 | 0 | 0 | 0 | 0.369565 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.090909 | 0 | 0.090909 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5260daa4a94ce19c135584869f06cda3d4113aa8 | 1,299 | py | Python | sbvar/utils.py | racng/sbvar | 0db38c2888c88b7c0a51d273e28ddb2955de23b1 | [
"MIT"
] | null | null | null | sbvar/utils.py | racng/sbvar | 0db38c2888c88b7c0a51d273e28ddb2955de23b1 | [
"MIT"
] | null | null | null | sbvar/utils.py | racng/sbvar | 0db38c2888c88b7c0a51d273e28ddb2955de23b1 | [
"MIT"
] | null | null | null | import numpy as np
def meshes_to_meshvector(mesh_list):
"""
Reshape 2D mesh into tidy meshvector.
Parameters
----------
mesh_list: list of arrays
List of n mesh grids with matching dimension (ixj).
Returns
-------
mesh_vector: np.array
Reshaped 2D array with ixj rows and n columns.
"""
return np.array(mesh_list).reshape(len(mesh_list), -1).T
def vector_to_mesh(vector, dim1, dim2):
"""
Reshape 1D vector to mesh form
Parameters
----------
vector: np.array
1D vector to be reshaped
Returns
-------
mesh: np.array
Reshaped 2D mesh with `dim1` columns and `dim2` rows.
This is consistent with np.meshgrid where the first dimension
varies on the column axis.
"""
return np.array(vector).reshape((dim2, dim1))
def meshvector_to_meshes(mesh_vector, dim1, dim2):
"""
Reshape meshvector into list of meshes.
Parameters
----------
mesh_vector: np.array
2D array with dim1 x dim2 rows and n columns.
Returns
-------
mesh_list: list of np.array
List of n mesh grids, each with `dim1` columns and `dim2` rows.
"""
n = mesh_vector.shape[1]
return [vector_to_mesh(mesh_vector[:, i], dim1, dim2) for i in range(n)]
| 27.0625 | 76 | 0.616628 | 180 | 1,299 | 4.35 | 0.316667 | 0.06258 | 0.049808 | 0.03576 | 0.171137 | 0.066411 | 0 | 0 | 0 | 0 | 0 | 0.023158 | 0.268668 | 1,299 | 47 | 77 | 27.638298 | 0.801053 | 0.588915 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.375 | false | 0 | 0.125 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
5267cf080c5bf152d6a0ba91dd0ca34026c200f5 | 408 | py | Python | Day 6/hurdle2.py | bhargavpydimalla/100-Days-of-Python | f840b93f8cc67e036dcd0ac7c76b82711f4b641c | [
"MIT"
] | null | null | null | Day 6/hurdle2.py | bhargavpydimalla/100-Days-of-Python | f840b93f8cc67e036dcd0ac7c76b82711f4b641c | [
"MIT"
] | null | null | null | Day 6/hurdle2.py | bhargavpydimalla/100-Days-of-Python | f840b93f8cc67e036dcd0ac7c76b82711f4b641c | [
"MIT"
] | null | null | null | '''
Visit the link : https://reeborg.ca/reeborg.html?lang=en&mode=python&menu=worlds%2Fmenus%2Freeborg_intro_en.json&name=Hurdle%202&url=worlds%2Ftutorial_en%2Fhurdle2.json
'''
def turn_right():
turn_left()
turn_left()
turn_left()
def complete():
move()
turn_left()
move()
turn_right()
move()
turn_right()
move()
turn_left()
while not at_goal():
complete() | 18.545455 | 168 | 0.656863 | 56 | 408 | 4.571429 | 0.589286 | 0.15625 | 0.09375 | 0.125 | 0.132813 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024169 | 0.188725 | 408 | 22 | 169 | 18.545455 | 0.749245 | 0.411765 | 0 | 0.733333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | true | 0 | 0 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
528995dc6b5ee3e0e1d63b669ee8fbf4f580e54a | 2,082 | py | Python | apps/users/models.py | jhelison/challenge-weef-twitter | 3b3b970d9edd435e780b9747baee944b935eda5c | [
"MIT"
] | null | null | null | apps/users/models.py | jhelison/challenge-weef-twitter | 3b3b970d9edd435e780b9747baee944b935eda5c | [
"MIT"
] | null | null | null | apps/users/models.py | jhelison/challenge-weef-twitter | 3b3b970d9edd435e780b9747baee944b935eda5c | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import (
BaseUserManager,
AbstractBaseUser,
PermissionsMixin,
)
class AccountManager(BaseUserManager):
def create_user(self, email, name=None, password=None):
if not email:
raise ValueError("Users must have an email address")
user = self.model(email=self.normalize_email(email), name=name)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, name, email, password):
user = self.create_user(
name=name, email=self.normalize_email(email), password=password
)
user.is_superuser = True
user.is_staff = True
user.save(using=self._db)
class UserFollowing(models.Model):
user_id = models.ForeignKey(
"User", related_name="following", on_delete=models.CASCADE
)
following_user_id = models.ForeignKey(
"User", related_name="followers", on_delete=models.CASCADE
)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ["-created_at"]
unique_together = (("user_id", "following_user_id"),)
def __str__(self):
return f"{self.user_id.email} | {self.following_user_id.email}"
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(verbose_name="email", unique=True)
name = models.CharField(max_length=50)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
is_superuser = models.BooleanField(default=False)
is_staff = models.BooleanField(default=False)
USERNAME_FIELD = "email"
REQUIRED_FIELDS = ["name"]
objects = AccountManager()
def __str__(self):
return str(f"{self.email} | {self.name}")
def count_followers(self):
return UserFollowing.objects.filter(user_id=self).count()
def count_following(self):
return UserFollowing.objects.filter(following_user_id=self).count()
| 30.173913 | 75 | 0.6878 | 248 | 2,082 | 5.560484 | 0.314516 | 0.034808 | 0.04351 | 0.054387 | 0.255257 | 0.114576 | 0.114576 | 0.060914 | 0 | 0 | 0 | 0.001206 | 0.20317 | 2,082 | 68 | 76 | 30.617647 | 0.830018 | 0 | 0 | 0.117647 | 0 | 0 | 0.089337 | 0.014409 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0.078431 | 0.039216 | 0.078431 | 0.588235 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
529096a59ae8266c47bcf7f7a53fd55aa10045cf | 5,470 | py | Python | pyews/endpoint/getusersettings.py | swimlane/pyews | 61cc60226b347a881ce653acc7af276c26b37de9 | [
"MIT"
] | 26 | 2019-05-04T03:02:53.000Z | 2022-02-04T14:56:30.000Z | pyews/endpoint/getusersettings.py | swimlane/pyews | 61cc60226b347a881ce653acc7af276c26b37de9 | [
"MIT"
] | 14 | 2019-07-30T15:32:27.000Z | 2022-02-10T20:49:44.000Z | pyews/endpoint/getusersettings.py | swimlane/pyews | 61cc60226b347a881ce653acc7af276c26b37de9 | [
"MIT"
] | 12 | 2019-10-18T14:14:09.000Z | 2021-11-15T09:29:30.000Z | from ..service.autodiscover import Autodiscover, Authentication
class GetUserSettings(Autodiscover):
"""GetUserSettings EWS Autodiscover endpoint
retrieves the authenticated or provided users settings
"""
RESULTS_KEY = 'UserSettings'
def __init__(self, user=None):
"""Retrieves the user settings for the authenticated or provided user.
Args:
user (str, optional): A user to retrieve user settings for. Defaults to None.
"""
self.user = user
def soap(self):
if not self.user:
self.user = Authentication.credentials[0]
return self.A_NAMESPACE.GetUserSettingsRequestMessage(
self.A_NAMESPACE.Request(
self.A_NAMESPACE.Users(
self.A_NAMESPACE.User(
self.A_NAMESPACE.Mailbox(self.user)
)
),
self.A_NAMESPACE.RequestedSettings(
self.A_NAMESPACE.Setting('InternalEwsUrl'),
self.A_NAMESPACE.Setting('ExternalEwsUrl'),
self.A_NAMESPACE.Setting('UserDisplayName'),
self.A_NAMESPACE.Setting('UserDN'),
self.A_NAMESPACE.Setting('UserDeploymentId'),
self.A_NAMESPACE.Setting('InternalMailboxServer'),
self.A_NAMESPACE.Setting('MailboxDN'),
self.A_NAMESPACE.Setting('ActiveDirectoryServer'),
self.A_NAMESPACE.Setting('EwsSupportedSchemas'),
self.A_NAMESPACE.Setting('InternalRpcClientServer'),
self.A_NAMESPACE.Setting('InternalEcpUrl'),
self.A_NAMESPACE.Setting('InternalEcpVoicemailUrl'),
self.A_NAMESPACE.Setting('InternalEcpEmailSubscriptionsUrl'),
self.A_NAMESPACE.Setting('InternalEcpTextMessagingUrl'),
self.A_NAMESPACE.Setting('InternalEcpDeliveryReportUrl'),
self.A_NAMESPACE.Setting('InternalEcpRetentionPolicyTagsUrl'),
self.A_NAMESPACE.Setting('InternalEcpPublishingUrl'),
self.A_NAMESPACE.Setting('InternalOABUrl'),
self.A_NAMESPACE.Setting('InternalUMUrl'),
self.A_NAMESPACE.Setting('InternalWebClientUrls'),
self.A_NAMESPACE.Setting('PublicFolderServer'),
self.A_NAMESPACE.Setting('ExternalMailboxServer'),
self.A_NAMESPACE.Setting('ExternalMailboxServerRequiresSSL'),
self.A_NAMESPACE.Setting('ExternalMailboxServerAuthenticationMethods'),
self.A_NAMESPACE.Setting('EcpVoicemailUrlFragment'),
self.A_NAMESPACE.Setting('EcpEmailSubscriptionsUrlFragment'),
self.A_NAMESPACE.Setting('EcpTextMessagingUrlFragment'),
self.A_NAMESPACE.Setting('EcpDeliveryReportUrlFragment'),
self.A_NAMESPACE.Setting('EcpRetentionPolicyTagsUrlFragment'),
self.A_NAMESPACE.Setting('ExternalEcpUrl'),
self.A_NAMESPACE.Setting('EcpPublishingUrlFragment'),
self.A_NAMESPACE.Setting('ExternalEcpVoicemailUrl'),
self.A_NAMESPACE.Setting('ExternalEcpEmailSubscriptionsUrl'),
self.A_NAMESPACE.Setting('ExternalEcpTextMessagingUrl'),
self.A_NAMESPACE.Setting('ExternalEcpDeliveryReportUrl'),
self.A_NAMESPACE.Setting('EcpEmailSubscriptionsUrlFragment'),
self.A_NAMESPACE.Setting('ExternalEcpRetentionPolicyTagsUrl'),
self.A_NAMESPACE.Setting('ExternalEcpPublishingUrl'),
self.A_NAMESPACE.Setting('ExternalOABUrl'),
self.A_NAMESPACE.Setting('ExternalUMUrl'),
self.A_NAMESPACE.Setting('ExternalWebClientUrls'),
self.A_NAMESPACE.Setting('CrossOrganizationSharingEnabled'),
self.A_NAMESPACE.Setting('AlternateMailboxes'),
self.A_NAMESPACE.Setting('CasVersion'),
self.A_NAMESPACE.Setting('InternalPop3Connections'),
self.A_NAMESPACE.Setting('ExternalPop3Connections'),
self.A_NAMESPACE.Setting('InternalImap4Connections'),
self.A_NAMESPACE.Setting('ExternalImap4Connections'),
self.A_NAMESPACE.Setting('InternalSmtpConnections'),
self.A_NAMESPACE.Setting('ExternalSmtpConnections'),
self.A_NAMESPACE.Setting('InternalServerExclusiveConnect'),
self.A_NAMESPACE.Setting('ExternalServerExclusiveConnect'),
self.A_NAMESPACE.Setting('ExchangeRpcUrl'),
self.A_NAMESPACE.Setting('ShowGalAsDefaultView'),
self.A_NAMESPACE.Setting('AutoDiscoverSMTPAddress'),
self.A_NAMESPACE.Setting('InteropExternalEwsUrl'),
self.A_NAMESPACE.Setting('ExternalEwsVersion'),
self.A_NAMESPACE.Setting('InteropExternalEwsVersion'),
self.A_NAMESPACE.Setting('MobileMailboxPolicyInterop'),
self.A_NAMESPACE.Setting('GroupingInformation'),
self.A_NAMESPACE.Setting('UserMSOnline'),
self.A_NAMESPACE.Setting('MapiHttpEnabled')
)
),
)
| 57.578947 | 91 | 0.608044 | 404 | 5,470 | 8.05198 | 0.259901 | 0.104519 | 0.292653 | 0.400246 | 0.045496 | 0.045496 | 0.045496 | 0.045496 | 0.045496 | 0 | 0 | 0.001304 | 0.299269 | 5,470 | 94 | 92 | 58.191489 | 0.847378 | 0.046252 | 0 | 0.049383 | 0 | 0 | 0.269142 | 0.205143 | 0 | 0 | 0 | 0 | 0 | 1 | 0.024691 | false | 0 | 0.012346 | 0 | 0.074074 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5291f1a67e8484b315c8a4244c2a08dbb0a15c56 | 283 | py | Python | app/start.py | somkiatlee/myIoT | 95fa3e721f31b7f7489b390ac53705a552979621 | [
"MIT"
] | null | null | null | app/start.py | somkiatlee/myIoT | 95fa3e721f31b7f7489b390ac53705a552979621 | [
"MIT"
] | 1 | 2021-05-04T03:15:37.000Z | 2021-05-04T03:15:37.000Z | app/start.py | somkiatlee/myIoT | 95fa3e721f31b7f7489b390ac53705a552979621 | [
"MIT"
] | null | null | null | from machine import Pin
from time import sleep
print('Version 2 installed using USB V2')
led = Pin(2, Pin.OUT)
while True:
led.value(1)
sleep(0.4)
led.value(0)
sleep(0.4)
led.value(1)
sleep(0.4)
led.value(0)
sleep(0.4)
led.value(1)
sleep(2) | 15.722222 | 41 | 0.607774 | 51 | 283 | 3.372549 | 0.411765 | 0.232558 | 0.162791 | 0.232558 | 0.453488 | 0.453488 | 0.453488 | 0.453488 | 0.453488 | 0.453488 | 0 | 0.080569 | 0.254417 | 283 | 18 | 42 | 15.722222 | 0.734597 | 0 | 0 | 0.6 | 0 | 0 | 0.112676 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.133333 | 0 | 0.133333 | 0.066667 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bfe3b77de51b76cba7bb75f120490bdb308fb93e | 181 | py | Python | delog.py | honglan3/dji-sdk-dji-ftpd | d00b1a8c7189f3534dd63a58e4490cec18c67b45 | [
"Unlicense"
] | 23 | 2018-08-07T01:13:16.000Z | 2021-02-25T00:42:37.000Z | delog.py | honglan3/dji-sdk-dji-ftpd | d00b1a8c7189f3534dd63a58e4490cec18c67b45 | [
"Unlicense"
] | 3 | 2018-08-03T12:26:02.000Z | 2018-09-03T12:01:54.000Z | delog.py | honglan3/dji-sdk-dji-ftpd | d00b1a8c7189f3534dd63a58e4490cec18c67b45 | [
"Unlicense"
] | 3 | 2018-08-02T20:29:40.000Z | 2018-08-13T14:53:51.000Z | #!/usr/bin/env python
import sys
import re
if __name__ == '__main__':
regex = re.compile('\x1b\[[0-9]*;[0-9]*H')
for line in sys.stdin:
print(regex.sub('', line).strip())
| 18.1 | 44 | 0.61326 | 30 | 181 | 3.433333 | 0.766667 | 0.038835 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032895 | 0.160221 | 181 | 9 | 45 | 20.111111 | 0.644737 | 0.110497 | 0 | 0 | 0 | 0 | 0.175 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
bfec7dee88c1cd8b2dfb30daf42647bac897d386 | 2,693 | py | Python | OBD2_HACK/lib/obd/protocols/__init__.py | educampos28/OBD2_HACK | 03dfb80f38e867a7447eceacc316ebfa8ead3a32 | [
"MIT"
] | 23 | 2018-06-04T22:26:00.000Z | 2022-03-03T10:31:07.000Z | OBD2_HACK/lib/obd/protocols/__init__.py | educampos28/OBD2_HACK | 03dfb80f38e867a7447eceacc316ebfa8ead3a32 | [
"MIT"
] | null | null | null | OBD2_HACK/lib/obd/protocols/__init__.py | educampos28/OBD2_HACK | 03dfb80f38e867a7447eceacc316ebfa8ead3a32 | [
"MIT"
] | 10 | 2018-12-10T16:17:34.000Z | 2021-12-20T10:09:18.000Z | # -*- coding: utf-8 -*-
########################################################################
# #
# python-OBD: A python OBD-II serial module derived from pyobd #
# #
# Copyright 2004 Donour Sizemore (donour@uchicago.edu) #
# Copyright 2009 Secons Ltd. (www.obdtester.com) #
# Copyright 2009 Peter J. Creath #
# Copyright 2016 Brendan Whitfield (brendan-w.com) #
# #
########################################################################
# #
# protocols/__init__.py #
# #
# This file is part of python-OBD (a derivative of pyOBD) #
# #
# python-OBD is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# python-OBD is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with python-OBD. If not, see <http://www.gnu.org/licenses/>. #
# #
########################################################################
from .protocol import ECU
from .protocol_unknown import UnknownProtocol
from .protocol_legacy import SAE_J1850_PWM, \
SAE_J1850_VPW, \
ISO_9141_2, \
ISO_14230_4_5baud, \
ISO_14230_4_fast
from .protocol_can import ISO_15765_4_11bit_500k, \
ISO_15765_4_29bit_500k, \
ISO_15765_4_11bit_250k, \
ISO_15765_4_29bit_250k, \
SAE_J1939
| 56.104167 | 72 | 0.379502 | 214 | 2,693 | 4.607477 | 0.565421 | 0.054767 | 0.036511 | 0.057809 | 0.083164 | 0.056795 | 0 | 0 | 0 | 0 | 0 | 0.067301 | 0.492388 | 2,693 | 47 | 73 | 57.297872 | 0.653987 | 0.656888 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
bfeeb9cce01b975064d94a00a5fb18acd7106402 | 18,826 | py | Python | library/_builtins.py | creativemindplus/skybison | d1740e08d8de85a0a56b650675717da67de171a0 | [
"CNRI-Python-GPL-Compatible"
] | 278 | 2021-08-31T00:46:51.000Z | 2022-02-13T19:43:28.000Z | library/_builtins.py | creativemindplus/skybison | d1740e08d8de85a0a56b650675717da67de171a0 | [
"CNRI-Python-GPL-Compatible"
] | 9 | 2021-11-05T22:28:43.000Z | 2021-11-23T08:39:04.000Z | library/_builtins.py | tekknolagi/skybison | bea8fc2af0a70e7203b4c19f36c14a745512a335 | [
"CNRI-Python-GPL-Compatible"
] | 12 | 2021-08-31T07:49:54.000Z | 2021-10-08T01:09:01.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
# $builtin-init-module$
# These values are injected by our boot process. flake8 has no knowledge about
# their definitions and will complain without these circular assignments.
_Unbound = _Unbound # noqa: F821
def _ContextVar_guard(obj):
_builtin()
def _Token_guard(obj):
_builtin()
def _builtin():
"""This function acts as a marker to `freeze_modules.py` it should never
actually be called."""
_unimplemented()
def _address(c):
_builtin()
def _anyset_check(obj):
_builtin()
def _async_generator_finalizer(obj):
_builtin()
def _async_generator_guard(obj):
_builtin()
def _async_generator_op_iter_get_state(obj):
_builtin()
def _base_exception_cause(self):
_builtin()
def _base_exception_context(self):
_builtin()
def _base_exception_set_cause(self, value):
_builtin()
def _base_exception_set_context(self, value):
_builtin()
def _base_exception_set_traceback(self, value):
_builtin()
def _base_exception_traceback(self):
_builtin()
def _bool_check(self):
"$intrinsic$"
_builtin()
def _bool_guard(self):
"$intrinsic$"
_builtin()
def _bound_method(fn, owner):
_builtin()
def _bound_method_guard(obj):
_builtin()
def _builtin_type(name):
"""Returns the builtin type with name `name`. This even works before the
type is initialized via a `class` statement and is intended to be used when
a builtin type definition requires to reference itself."""
_builtin()
def _byte_guard(obj):
_builtin()
def _bytearray_append(obj, item):
_builtin()
def _bytearray_check(obj):
"$intrinsic$"
_builtin()
def _bytearray_clear(obj):
_builtin()
def _bytearray_contains(obj, key):
_builtin()
def _bytearray_contains_byteslike(obj, key):
_builtin()
def _bytearray_copy(obj):
_builtin()
def _bytearray_delitem(self, key):
_builtin()
def _bytearray_delslice(self, start, stop, step):
_builtin()
def _bytearray_getitem(self, key):
_builtin()
def _bytearray_getslice(self, start, stop, step):
_builtin()
def _bytearray_guard(obj):
"$intrinsic$"
_builtin()
def _bytearray_join(self, iterable):
_builtin()
def _bytearray_len(self):
"$intrinsic$"
_builtin()
def _bytearray_ljust(self, width, fillbyte):
_builtin()
def _bytearray_rjust(self, width, fillbyte):
_builtin()
def _bytearray_setitem(self, key, value):
_builtin()
def _bytearray_setslice(self, start, stop, step, value):
_builtin()
def _bytes_check(obj):
"$intrinsic$"
_builtin()
def _bytes_contains(obj, key):
_builtin()
def _bytes_decode(obj, encoding):
_builtin()
def _bytes_decode_ascii(obj):
_builtin()
def _bytes_decode_utf_8(obj):
_builtin()
def _bytes_from_bytes(cls, value):
_builtin()
def _bytes_from_ints(source):
_builtin()
def _bytes_getitem(self, index):
_builtin()
def _bytes_getslice(self, start, stop, step):
_builtin()
def _bytes_guard(obj):
"$intrinsic$"
_builtin()
def _bytes_join(self, iterable):
_builtin()
def _bytes_len(self):
"$intrinsic$"
_builtin()
def _bytes_ljust(self, width, fillbyte):
_builtin()
def _bytes_maketrans(frm, to):
_builtin()
def _bytes_repeat(self, count):
_builtin()
def _bytes_replace(self, old, new, count):
_builtin()
def _bytes_split(self, sep, maxsplit):
_builtin()
def _bytes_split_whitespace(self, maxsplit):
_builtin()
def _byteslike_check(obj):
"$intrinsic$"
_builtin()
def _byteslike_compare_digest(a, b):
_builtin()
def _byteslike_count(self, sub, start, end):
_builtin()
def _byteslike_endswith(self, suffix, start, end):
_builtin()
def _byteslike_find_byteslike(self, sub, start, end):
_builtin()
def _byteslike_find_int(self, sub, start, end):
_builtin()
def _byteslike_guard(obj):
"$intrinsic$"
_builtin()
def _byteslike_rfind_byteslike(self, sub, start, end):
_builtin()
def _byteslike_rfind_int(self, sub, start, end):
_builtin()
def _byteslike_startswith(self, prefix, start, end):
_builtin()
def _caller_function():
_builtin()
def _caller_locals():
_builtin()
def _classmethod(function):
_builtin()
def _classmethod_isabstract(self):
_builtin()
def _code_check(obj):
_builtin()
def _code_guard(c):
_builtin()
def _code_new(
cls,
argcount,
posonlyargcount,
kwonlyargcount,
nlocals,
stacksize,
flags,
code,
consts,
names,
varnames,
filename,
name,
firstlineno,
lnotab,
freevars,
cellvars,
):
_builtin()
def _code_set_filename(code, filename):
_builtin()
def _complex_check(obj):
"$intrinsic$"
_builtin()
def _complex_checkexact(obj):
_builtin()
def _complex_imag(c):
_builtin()
def _complex_new(cls, imag, real):
_builtin()
def _complex_real(c):
_builtin()
def _compute_mro(type):
_builtin()
def _deque_guard(obj):
"$intrinsic$"
_builtin()
def _dict_check(obj):
"$intrinsic$"
_builtin()
def _dict_check_exact(obj):
"$intrinsic$"
_builtin()
# TODO(T56301601): Move this into a type-specific file.
def _dict_get(self, key, default=None):
_builtin()
def _dict_guard(obj):
"$intrinsic$"
_builtin()
def _dict_items_guard(self):
_builtin()
def _dict_keys_guard(self):
_builtin()
def _dict_len(self):
"$intrinsic$"
_builtin()
# TODO(T56301601): Move this into a type-specific file.
def _dict_setitem(self, key, value):
_builtin()
# TODO(T56301601): Move this into a type-specific file.
def _dict_update(self, other, kwargs):
_builtin()
def _divmod(number, divisor):
_builtin()
def _exec(code, module, implicit_globals):
_builtin()
def _float_check(obj):
"$intrinsic$"
_builtin()
def _float_check_exact(obj):
"$intrinsic$"
_builtin()
def _float_divmod(number, divisor):
_builtin()
def _float_format(
value, format_code, precision, skip_sign, add_dot_0, use_alt_formatting
):
_builtin()
def _float_guard(obj):
"$intrinsic$"
_builtin()
def _float_new_from_byteslike(cls, obj):
_builtin()
def _float_new_from_float(cls, obj):
_builtin()
def _float_new_from_str(cls, obj):
_builtin()
def _float_signbit(value):
_builtin()
def _frozenset_check(obj):
"$intrinsic$"
_builtin()
def _frozenset_guard(obj):
"$intrinsic$"
_builtin()
def _function_annotations(obj):
_builtin()
def _function_closure(obj):
_builtin()
def _function_defaults(obj):
_builtin()
def _function_globals(obj):
_builtin()
def _function_guard(obj):
"$intrinsic$"
_builtin()
def _function_kwdefaults(obj):
_builtin()
def _function_lineno(function, pc):
_builtin()
def _function_new(self, code, mod, name, defaults, closure):
_builtin()
def _function_set_annotations(obj, annotations):
_builtin()
def _function_set_defaults(obj, defaults):
_builtin()
def _function_set_kwdefaults(obj, kwdefaults):
_builtin()
def _gc():
_builtin()
def _get_asyncgen_hooks():
_builtin()
def _get_member_byte(addr):
_builtin()
def _get_member_char(addr):
_builtin()
def _get_member_double(addr):
_builtin()
def _get_member_float(addr):
_builtin()
def _get_member_int(addr):
_builtin()
def _get_member_long(addr):
_builtin()
def _get_member_pyobject(addr, name):
_builtin()
def _get_member_short(addr):
_builtin()
def _get_member_string(addr):
_builtin()
def _get_member_ubyte(addr):
_builtin()
def _get_member_uint(addr):
_builtin()
def _get_member_ulong(addr):
_builtin()
def _get_member_ushort(addr):
_builtin()
def _heap_dump(filename):
_builtin()
def _instance_dunder_dict_set(obj, dict):
_builtin()
def _instance_delattr(obj, name):
_builtin()
def _instance_getattr(obj, name):
_builtin()
def _instance_guard(obj):
_builtin()
def _instance_overflow_dict(obj):
_builtin()
def _instance_setattr(obj, name, value):
_builtin()
def _instancemethod_func(obj):
_builtin()
def _int_check(obj):
"$intrinsic$"
_builtin()
def _int_check_exact(obj):
"$intrinsic$"
_builtin()
def _int_ctor(cls, x=_Unbound, base=_Unbound):
_builtin()
def _int_ctor_obj(cls, x):
_builtin()
def _int_from_bytes(cls, bytes, byteorder_big, signed):
_builtin()
def _int_guard(obj):
"$intrinsic$"
_builtin()
def _int_new_from_byteslike(cls, x, base):
_builtin()
def _int_new_from_int(cls, value):
_builtin()
def _int_new_from_str(cls, x, base):
_builtin()
def _jit(func):
"""Compile the function's body to native code. Return the function. Useful
as a decorator:
@_jit
def foo:
pass
"""
_builtin()
def _jit_fromlist(funcs):
"""Compile a list of function objects to native code."""
for func in funcs:
_jit(func)
def _jit_fromtype(type):
_type_guard(type)
for item in type.__dict__.values():
_jit(item)
def _jit_iscompiled(func):
"""Return True if the given function is compiled and False otherwise."""
_builtin()
def _list_append(self, item):
"$intrinsic$"
_builtin()
def _list_check(obj):
"$intrinsic$"
_builtin()
def _list_check_exact(obj):
"$intrinsic$"
_builtin()
def _list_ctor(cls, iterable=()):
_builtin()
def _list_delitem(self, key):
_builtin()
def _list_delslice(self, start, stop, step):
_builtin()
def _list_extend(self, other):
_builtin()
def _list_getitem(self, key):
"$intrinsic$"
_builtin()
def _list_getslice(self, start, stop, step):
_builtin()
def _list_guard(obj):
"$intrinsic$"
_builtin()
def _list_len(self):
"$intrinsic$"
_builtin()
def _list_new(size, fill=None):
_builtin()
def _list_setitem(self, key, value):
"$intrinsic$"
_builtin()
def _list_setslice(self, start, stop, step, value):
_builtin()
def _list_sort(list):
_builtin()
def _list_sort_by_key(list):
_builtin()
def _list_swap(list, i, j):
_builtin()
def _lt(a, b):
"Same as a < b."
return a < b
def _lt_key(obj, other):
return _tuple_getitem(obj, 0) < _tuple_getitem(other, 0)
def _mappingproxy_guard(obj):
_builtin()
def _mappingproxy_mapping(obj):
_builtin()
def _mappingproxy_set_mapping(obj, mapping):
_builtin()
def _memoryview_check(obj):
_builtin()
def _memoryview_getitem(obj, key):
_builtin()
def _memoryview_getslice(self, start, stop, step):
_builtin()
def _memoryview_guard(obj):
"$intrinsic$"
_builtin()
def _memoryview_itemsize(obj):
_builtin()
def _memoryview_nbytes(self):
_builtin()
def _memoryview_setitem(self, key, value):
_builtin()
def _memoryview_setslice(self, start, stop, step, value):
_builtin()
def _memoryview_start(self):
_builtin()
def _mmap_check(obj):
_builtin()
def _module_dir(module):
_builtin()
def _module_proxy(module):
_builtin()
def _module_proxy_check(obj):
_builtin()
def _module_proxy_guard(module):
_builtin()
def _module_proxy_keys(self):
_builtin()
def _module_proxy_setitem(self, key, value):
_builtin()
def _module_proxy_values(self):
_builtin()
def _iter(self):
_builtin()
def _object_class_set(obj, name):
_builtin()
def _object_keys(self):
_builtin()
def _object_type_getattr(obj, name):
"""Looks up the named attribute on the object's type, resolving descriptors.
Behaves like _PyObject_LookupSpecial."""
_builtin()
def _object_type_hasattr(obj, name):
_builtin()
def _os_write(fd, buf):
_builtin()
def _os_error_subclass_from_errno(errno):
_builtin()
def _profiler_install(new_thread_func, call_func, return_func):
_builtin()
def _profiler_exclude(callable):
"""Call `callable` and disable opcode counting in the current thread for the
duration of the call."""
_builtin()
def _property(fget=None, fset=None, fdel=None, doc=None):
"""Has the same effect as property(), but can be used for bootstrapping."""
_builtin()
def _property_isabstract(self):
_builtin()
def _pyobject_offset(instance, offset):
_builtin()
def _range_check(obj):
"$intrinsic$"
_builtin()
def _range_guard(obj):
"$intrinsic$"
_builtin()
def _range_len(self):
_builtin()
def _readline(prompt):
_builtin()
def _repr_enter(obj):
_builtin()
def _repr_leave(obj):
_builtin()
def _seq_index(obj):
"$intrinsic$"
_builtin()
def _seq_iterable(obj):
"$intrinsic$"
_builtin()
def _seq_set_index(obj, index):
"$intrinsic$"
_builtin()
def _seq_set_iterable(obj, iterable):
"$intrinsic$"
_builtin()
def _set_check(obj):
"$intrinsic$"
_builtin()
def _set_function_flag_iterable_coroutine(code):
_builtin()
def _set_guard(obj):
"$intrinsic$"
_builtin()
def _set_len(self):
"$intrinsic$"
_builtin()
def _set_member_double(addr, value):
_builtin()
def _set_member_float(addr, value):
_builtin()
def _set_member_integral(addr, value, num_bytes):
_builtin()
def _set_member_integral_unsigned(addr, value, num_bytes):
_builtin()
def _set_member_pyobject(addr, value):
_builtin()
def _slice_check(obj):
"$intrinsic$"
_builtin()
def _slice_guard(obj):
"$intrinsic$"
_builtin()
def _slice_start(start, step, length):
_builtin()
def _staticmethod(func):
_builtin()
def _slice_start_long(start, step, length):
_builtin()
def _slice_step(step):
_builtin()
def _slice_step_long(step):
_builtin()
def _slice_stop(stop, step, length):
_builtin()
def _slice_stop_long(stop, step, length):
_builtin()
def _staticmethod_isabstract(self):
_builtin()
def _stop_iteration_ctor(cls, *args):
_builtin()
def _str_array_clear(self):
_builtin()
def _str_array_ctor(cls, source=_Unbound):
_builtin()
def _str_array_iadd(self, other):
_builtin()
def _str_center(self, width, fillchar):
_builtin()
def _str_check(obj):
"$intrinsic$"
_builtin()
def _str_check_exact(obj):
"$intrinsic$"
_builtin()
def _str_compare_digest(a, b):
_builtin()
def _str_count(self, sub, start, end):
_builtin()
def _str_ctor(cls, obj=_Unbound, encoding=_Unbound, errors=_Unbound):
"$intrinsic$"
_builtin()
def _str_ctor_obj(cls, obj):
_builtin()
def _str_encode(self, encoding):
_builtin()
def _str_encode_ascii(self):
_builtin()
def _str_endswith(self, suffix, start, end):
_builtin()
def _str_getitem(self, key):
_builtin()
def _str_getslice(self, start, stop, step):
_builtin()
def _str_guard(obj):
"$intrinsic$"
_builtin()
def _str_ischr(obj):
_builtin()
def _str_join(sep, iterable):
_builtin()
def _str_ljust(self, width, fillchar):
_builtin()
def _str_escape_non_ascii(s):
_builtin()
def _str_find(self, sub, start, end):
_builtin()
def _str_from_str(cls, value):
_builtin()
def _str_len(self):
"$intrinsic$"
_builtin()
def _str_mod_fast_path(self, other):
_builtin()
def _str_partition(self, sep):
_builtin()
def _str_replace(self, old, newstr, count):
_builtin()
def _str_rfind(self, sub, start, end):
_builtin()
def _str_rjust(self, width, fillchar):
_builtin()
def _str_rpartition(self, sep):
_builtin()
def _str_split(self, sep, maxsplit):
_builtin()
def _str_splitlines(self, keepends):
_builtin()
def _str_startswith(self, prefix, start, end):
_builtin()
def _str_translate(obj, table):
_builtin()
def _structseq_getitem(structseq, index):
_builtin()
def _structseq_new_type(name, field_names, is_heaptype=True, num_in_sequence=_Unbound):
_builtin()
def _structseq_setitem(structseq, index, value):
_builtin()
def _super(cls):
_builtin()
def _super_ctor(cls, type=_Unbound, type_or_obj=_Unbound):
_builtin()
def _traceback_frame_get(self):
_builtin()
def _traceback_lineno_get(self):
_builtin()
def _traceback_next_get(self):
_builtin()
def _traceback_next_set(self, new_next):
_builtin()
def _tuple_check(obj):
"$intrinsic$"
_builtin()
def _tuple_check_exact(obj):
"$intrinsic$"
_builtin()
def _tuple_getitem(self, index):
"$intrinsic$"
_builtin()
def _tuple_getslice(self, start, stop, step):
_builtin()
def _tuple_guard(obj):
"$intrinsic$"
_builtin()
def _tuple_len(self):
"$intrinsic$"
_builtin()
def _tuple_new(cls, old_tuple):
_builtin()
def _type(obj):
"$intrinsic$"
_builtin()
def _type_ctor(cls, obj):
_builtin()
def _type_abstractmethods_del(self):
_builtin()
def _type_abstractmethods_get(self):
_builtin()
def _type_abstractmethods_set(self, value):
_builtin()
def _type_bases_del(self):
_builtin()
def _type_bases_get(self):
_builtin()
def _type_bases_set(self, value):
_builtin()
def _type_check(obj):
"$intrinsic$"
_builtin()
def _type_check_exact(obj):
"$intrinsic$"
_builtin()
def _type_dunder_call(self, *args, **kwargs):
_builtin()
def _type_guard(obj):
"$intrinsic$"
_builtin()
def _type_issubclass(subclass, superclass):
"$intrinsic$"
_builtin()
def _type_module_get(self):
_builtin()
def _type_module_set(self, value):
_builtin()
def _type_name_get(self):
_builtin()
def _type_name_set(self, value):
_builtin()
def _type_proxy(type_obj):
_builtin()
def _type_new(cls, name, bases, dict, is_heaptype):
_builtin()
def _type_proxy_check(obj):
_builtin()
def _type_proxy_get(self, key, default):
_builtin()
def _type_proxy_guard(obj):
_builtin()
def _type_proxy_keys(self):
_builtin()
def _type_proxy_len(self):
_builtin()
def _type_proxy_values(self):
_builtin()
def _type_qualname_get(self):
_builtin()
def _type_qualname_set(self, value):
_builtin()
def _type_subclass_guard(subclass, superclass):
"$intrinsic$"
_builtin()
def _unimplemented():
"""Prints a message and a stacktrace, and stops the program execution."""
_builtin()
def _warn(message, category=None, stacklevel=1, source=None):
"""Calls warnings.warn."""
_builtin()
def _weakref_callback(self):
_builtin()
def _weakref_check(self):
"$intrinsic$"
_builtin()
def _weakref_guard(self):
"$intrinsic$"
_builtin()
def _weakref_referent(self):
_builtin()
maxunicode = maxunicode # noqa: F821
| 13.71158 | 87 | 0.676405 | 2,306 | 18,826 | 5.042064 | 0.178231 | 0.26146 | 0.098048 | 0.077578 | 0.463662 | 0.197557 | 0.107164 | 0.046272 | 0.015997 | 0.015997 | 0 | 0.002466 | 0.203017 | 18,826 | 1,372 | 88 | 13.721574 | 0.772461 | 0.111389 | 0 | 0.521008 | 0 | 0 | 0.04003 | 0 | 0 | 0 | 0 | 0.000729 | 0 | 1 | 0.438375 | false | 0 | 0 | 0.001401 | 0.441176 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bff76153095890d110b5a4fa0482b74c8d4c65ac | 412 | py | Python | geopoll/settings.py | callowayproject/django-geopoll | 13e84f4d68b5f963bccbb3ca8f2530d1adaee0f9 | [
"Apache-2.0"
] | 1 | 2020-05-19T07:35:35.000Z | 2020-05-19T07:35:35.000Z | geopoll/settings.py | callowayproject/django-geopoll | 13e84f4d68b5f963bccbb3ca8f2530d1adaee0f9 | [
"Apache-2.0"
] | null | null | null | geopoll/settings.py | callowayproject/django-geopoll | 13e84f4d68b5f963bccbb3ca8f2530d1adaee0f9 | [
"Apache-2.0"
] | null | null | null | from django.conf import settings
from django.db.models import get_model
DEFAULT_SETTINGS = {
'MULTIPLE_SITES': False,
'USER_MODEL': 'auth.User',
'URL_MONTH_FORMAT': r'%b',
}
DEFAULT_SETTINGS.update(getattr(settings, GEOPOLL_SETTINGS, {}))
MULTIPLE_SITES = DEFAULT_SETTINGS['MULTIPLE_SITES']
USER_MODEL = get_model(DEFAULT_SETTINGS['USER_MODEL'])
MONTH_FORMAT = DEFAULT_SETTINGS['URL_MONTH_FORMAT'] | 29.428571 | 64 | 0.771845 | 54 | 412 | 5.537037 | 0.425926 | 0.250836 | 0.210702 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106796 | 412 | 14 | 65 | 29.428571 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0.220339 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
87110df7b32b5a645d8053aa9955685b80f6cef5 | 4,849 | py | Python | podcast-backend/src/app/pcasts/__init__.py | cuappdev/archives | 061d0f9cccf278363ffaeb27fc655743b1052ae5 | [
"MIT"
] | null | null | null | podcast-backend/src/app/pcasts/__init__.py | cuappdev/archives | 061d0f9cccf278363ffaeb27fc655743b1052ae5 | [
"MIT"
] | null | null | null | podcast-backend/src/app/pcasts/__init__.py | cuappdev/archives | 061d0f9cccf278363ffaeb27fc655743b1052ae5 | [
"MIT"
] | null | null | null | from flask import Blueprint
from app import *
# PCasts Blueprint
pcasts = Blueprint('pcasts', __name__, url_prefix='/api/v1')
# Import all models
from app.pcasts.models._all import *
# Import all controllers
from app.pcasts.controllers.google_sign_in_controller import *
from app.pcasts.controllers.get_me_controller import *
from app.pcasts.controllers.series_subscriptions_controller import *
from app.pcasts.controllers.get_user_subscriptions_controller import *
from app.pcasts.controllers.create_delete_bookmark_controller import *
from app.pcasts.controllers.get_bookmarks_controller import *
from app.pcasts.controllers.get_create_delete_recommendation_controller import *
from app.pcasts.controllers.get_user_recommendations_controller import *
from app.pcasts.controllers.create_delete_following_controller import *
from app.pcasts.controllers.get_user_followers_controller import *
from app.pcasts.controllers.get_user_followings_controller import *
from app.pcasts.controllers.delete_listening_history_controller import *
from app.pcasts.controllers.listening_history_controller import *
from app.pcasts.controllers.clear_listening_history_controller import *
from app.pcasts.controllers.get_feed_controller import *
from app.pcasts.controllers.update_session_controller import *
from app.pcasts.controllers.sign_out_controller import *
from app.pcasts.controllers.search_episode_controller import *
from app.pcasts.controllers.search_series_controller import *
from app.pcasts.controllers.search_users_controller import *
from app.pcasts.controllers.search_all_controller import *
from app.pcasts.controllers.get_episodes_controller import *
from app.pcasts.controllers.series_controller import *
from app.pcasts.controllers.get_user_by_id_controller import *
from app.pcasts.controllers.update_username_controller import *
from app.pcasts.controllers.discover_series_controller import *
from app.pcasts.controllers.discover_episodes_controller import *
from app.pcasts.controllers.facebook_sign_in_controller import *
from app.pcasts.controllers.merge_account_controller import *
from app.pcasts.controllers.search_itunes_controller import *
from app.pcasts.controllers.get_facebook_friends import *
from app.pcasts.controllers.discover_series_for_topic_controller import *
from app.pcasts.controllers.discover_series_for_user_controller import *
from app.pcasts.controllers.discover_episodes_for_topic_controller import *
from app.pcasts.controllers.discover_episodes_for_user_controller import *
from app.pcasts.controllers.search_facebook_friends_controller import *
from app.pcasts.controllers.get_topics_controller import *
from app.pcasts.controllers.get_shares_controller import *
from app.pcasts.controllers.create_delete_share_controller import *
from app.pcasts.controllers.listening_history_dismiss_controller import *
from app.pcasts.controllers.ignore_facebook_friends_controller import *
from app.pcasts.controllers.subscribe_new_episode_controller import *
from app.pcasts.controllers.get_new_episode_notification_controller import *
from app.pcasts.controllers.update_new_episode_has_read_controller import *
controllers = [
GoogleSignInController(),
GetMeController(),
SeriesSubscriptionsController(),
GetUserSubscriptionsController(),
CreateDeleteBookmarkController(),
GetBookmarksController(),
GetCreateDeleteRecommendationController(),
GetUserRecommendationsController(),
CreateDeleteFollowingController(),
GetUserFollowersController(),
GetUserFollowingsController(),
DeleteListeningHistoryController(),
ListeningHistoryController(),
ClearListeningHistoryController(),
GetFeedController(),
UpdateSessionController(),
SignOutController(),
SearchEpisodeController(),
SearchSeriesController(),
SearchUsersController(),
SearchAllController(),
GetEpisodesController(),
SeriesController(),
GetUserByIdController(),
UpdateUsernameController(),
DiscoverSeriesController(),
DiscoverEpisodesController(),
FacebookSignInController(),
MergeAccountController(),
SearchiTunesController(),
GetFacebookFriends(),
DiscoverSeriesForTopicController(),
DiscoverSeriesForUserController(),
DiscoverEpisodesForTopicController(),
DiscoverEpisodesForUserController(),
SearchFacebookFriends(),
GetTopicsController(),
GetSharesController(),
CreateDeleteShareController(),
ListeningHistoryDismissController(),
IgnoreFacebookFriendsController(),
SubscribeNewEpisodeController(),
GetNewEpisodeNotificationController(),
UpdateNewEpisodeHasReadController(),
]
# Setup all controllers
for controller in controllers:
pcasts.add_url_rule(
controller.get_path(),
controller.get_name(),
controller.response,
methods=controller.get_methods()
)
| 43.294643 | 80 | 0.829656 | 480 | 4,849 | 8.110417 | 0.2375 | 0.082713 | 0.15027 | 0.271256 | 0.553301 | 0.553301 | 0.522476 | 0.310044 | 0.04598 | 0 | 0 | 0.000229 | 0.098165 | 4,849 | 111 | 81 | 43.684685 | 0.890007 | 0.016292 | 0 | 0 | 0 | 0 | 0.002728 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.465347 | 0 | 0.465347 | 0.019802 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
871c4c6416472ba9266b92945dd4db0eed61a195 | 1,101 | py | Python | cryptoanalysis/cipher/caesar.py | CermakM/cryptoanalysis | 02c157daa046915b46451eeec3bde93860082abc | [
"MIT"
] | null | null | null | cryptoanalysis/cipher/caesar.py | CermakM/cryptoanalysis | 02c157daa046915b46451eeec3bde93860082abc | [
"MIT"
] | null | null | null | cryptoanalysis/cipher/caesar.py | CermakM/cryptoanalysis | 02c157daa046915b46451eeec3bde93860082abc | [
"MIT"
] | null | null | null | """
Python API for encoding and decoding a stream with a given key using Caesar cipher
Makes use of Vigener cipher - Caesar is just a simplification to key of lenght 1
"""
import cryptoanalysis.cipher.vigener as vigener
def encode(string: str, key: str) -> str:
"""
Encode string using the Caesar cipher with the given key
:param string: string to be encoded
:param key: letter to be used as given shift
:return: encoded string
:raises: ValueError if key len is invalid
"""
if len(key) > 1:
raise ValueError("[ERROR] Length of a key may not exceed 1 for Caesar cipher")
return vigener.encode(string, key)
def decode(cipher: str, key: str) -> str:
"""
Decode string using the Caesar cipher with the given key
:param cipher: ciphered text stream to be decoded
:param key: letter to be used as given shift
:return: decoded string
:raises: ValueError if key len is invalid
"""
if len(key) > 1:
raise ValueError("[ERROR] Length of a key may not exceed 1 for Caesar cipher")
return vigener.decode(cipher, key)
| 30.583333 | 86 | 0.686649 | 168 | 1,101 | 4.5 | 0.321429 | 0.079365 | 0.02381 | 0.031746 | 0.547619 | 0.547619 | 0.547619 | 0.547619 | 0.547619 | 0.547619 | 0 | 0.005974 | 0.239782 | 1,101 | 35 | 87 | 31.457143 | 0.897252 | 0.532243 | 0 | 0.444444 | 0 | 0 | 0.260674 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
871fdd8a110c57802360f6844430070e0d89001f | 1,436 | py | Python | lib/django-1.5/tests/regressiontests/queryset_pickle/models.py | enpi/Test | 5fb2055c7cfd4cc91ff97471c529b041f21abeb6 | [
"Apache-2.0"
] | 3 | 2019-01-28T03:57:20.000Z | 2020-02-20T01:37:33.000Z | lib/django-1.5/tests/regressiontests/queryset_pickle/models.py | enpi/Test | 5fb2055c7cfd4cc91ff97471c529b041f21abeb6 | [
"Apache-2.0"
] | 1 | 2021-09-15T12:25:30.000Z | 2021-09-15T12:25:30.000Z | lib/django-1.5/tests/regressiontests/queryset_pickle/models.py | enpi/Test | 5fb2055c7cfd4cc91ff97471c529b041f21abeb6 | [
"Apache-2.0"
] | 3 | 2019-01-18T11:33:56.000Z | 2020-01-05T10:44:05.000Z | from __future__ import absolute_import
import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
def standalone_number(self):
return 1
class Numbers(object):
@staticmethod
def get_static_number(self):
return 2
@classmethod
def get_class_number(self):
return 3
def get_member_number(self):
return 4
nn = Numbers()
class Group(models.Model):
name = models.CharField(_('name'), max_length=100)
class Event(models.Model):
group = models.ForeignKey(Group)
class Happening(models.Model):
when = models.DateTimeField(blank=True, default=datetime.datetime.now)
name = models.CharField(blank=True, max_length=100, default=lambda:"test")
number1 = models.IntegerField(blank=True, default=standalone_number)
number2 = models.IntegerField(blank=True, default=Numbers.get_static_number)
number3 = models.IntegerField(blank=True, default=Numbers.get_class_number)
number4 = models.IntegerField(blank=True, default=nn.get_member_number)
class Person(models.Model):
name = models.CharField(max_length=200)
class SocialProfile(models.Model):
person = models.ForeignKey(Person)
friends = models.ManyToManyField('self')
class Post(models.Model):
post_date = models.DateTimeField(default=datetime.datetime.now)
class Material(models.Model):
post = models.ForeignKey(Post, related_name='materials')
| 27.615385 | 80 | 0.747214 | 180 | 1,436 | 5.811111 | 0.355556 | 0.073614 | 0.076482 | 0.10325 | 0.206501 | 0.08413 | 0.08413 | 0 | 0 | 0 | 0 | 0.013934 | 0.150418 | 1,436 | 51 | 81 | 28.156863 | 0.843443 | 0 | 0 | 0 | 0 | 0 | 0.014624 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0.111111 | 0.916667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
872fd7e3c6cbd1ca7524bbde6c5bb81e7d12765b | 751 | py | Python | src/model.py | Lukasz1928/sentence-analogies | 3168f0e41bde941ccf311d49d3d0024b2293d65f | [
"MIT"
] | null | null | null | src/model.py | Lukasz1928/sentence-analogies | 3168f0e41bde941ccf311d49d3d0024b2293d65f | [
"MIT"
] | null | null | null | src/model.py | Lukasz1928/sentence-analogies | 3168f0e41bde941ccf311d49d3d0024b2293d65f | [
"MIT"
] | null | null | null | from gensim.models import KeyedVectors
class WordVectorModel:
def __init__(self):
self.model = KeyedVectors.load_word2vec_format('resources/cc.en.300.vec', binary=False)
def get_word_vector(self, word):
return self.model[word]
def get_sentence_vectors(self, sentence):
return [self.get_word_vector(w) for w in sentence]
def predict_word(self, s1, s2, s3):
s1 = self.get_sentence_vectors(s1)
s2 = self.get_sentence_vectors(s2)
s3 = self.get_sentence_vectors(s3)
s1_sum = sum(s1)
s2_sum = sum(s2)
s3_sum = sum(s3)
predicted_word_vector = [s2_sum - s1_sum + s3_sum]
ms = self.model.most_similar(predicted_word_vector, topn=3)
return ms
| 30.04 | 95 | 0.663116 | 107 | 751 | 4.373832 | 0.383178 | 0.08547 | 0.153846 | 0.141026 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04028 | 0.23968 | 751 | 24 | 96 | 31.291667 | 0.779335 | 0 | 0 | 0 | 0 | 0 | 0.030626 | 0.030626 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.055556 | 0.111111 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.