hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
24b7c41cba674d3c5a96735fae3e61b4d3db7195 | 1,376 | py | Python | i3configger/bindings.py | obestwalter/i3-configger | c981a01f5b89fd37ab5a98af1229819cea305f6a | [
"MIT"
] | 30 | 2017-05-20T12:27:37.000Z | 2021-09-27T17:01:20.000Z | i3configger/bindings.py | obestwalter/i3configger | c981a01f5b89fd37ab5a98af1229819cea305f6a | [
"MIT"
] | 9 | 2017-05-21T23:17:04.000Z | 2019-05-09T13:24:32.000Z | i3configger/bindings.py | obestwalter/i3-configger | c981a01f5b89fd37ab5a98af1229819cea305f6a | [
"MIT"
] | 3 | 2017-06-04T20:29:29.000Z | 2021-09-27T17:01:23.000Z | """WARNING Just an experiment - please ignore this."""
from i3configger import config
BINDCODE = "bindcode"
BINDSYM = "bindsym"
class Bindings:
"""
bindsym | bindcode
[--release] [<Group>+][<Modifiers>+]<keysym> command
[--release] [--border] [--whole-window] [<Modifiers>+]button<n> command
"""
def __init__(self, content):
self.content = content
def get_all_bindings(self):
lines = [l.strip() for l in self.content.splitlines()]
lines = [l for l in lines if any(m in l for m in [BINDCODE, BINDSYM])]
lines = [l for l in lines if not l.startswith(config.MARK.COMMENT)]
return sorted(set(lines))
def translate_bindings(self):
"""translate bindcode to bindsym assignments
this need to be done the moment the information is asked because it
depends on the currently active layout.
"""
raise NotImplementedError()
def write_bindings_info(self):
"""Write info in some format that can be nicely displayed"""
raise NotImplementedError()
if __name__ == "__main__":
# use partials and account for modes
# a naming convention would make this quite easy
# mode-<modename>.conf -> bindings active in <modename>
p = config.I3configgerConfig().targetPath
b = Bindings(p.read_text())
print("\n".join(b.get_all_bindings()))
| 30.577778 | 78 | 0.652616 | 173 | 1,376 | 5.075145 | 0.578035 | 0.037585 | 0.020501 | 0.022779 | 0.04328 | 0.04328 | 0.04328 | 0 | 0 | 0 | 0 | 0.001894 | 0.232558 | 1,376 | 44 | 79 | 31.272727 | 0.829545 | 0.389535 | 0 | 0.105263 | 0 | 0 | 0.0323 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0 | 0.052632 | 0 | 0.368421 | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24ba97b99cd37843fd8b359e53696bd9e75b0abb | 1,867 | py | Python | musketeer/fitSignals.py | TChis/Musketeer | bd67b2e7f4e1827c96d10bbf278c781ce22681f3 | [
"MIT"
] | null | null | null | musketeer/fitSignals.py | TChis/Musketeer | bd67b2e7f4e1827c96d10bbf278c781ce22681f3 | [
"MIT"
] | null | null | null | musketeer/fitSignals.py | TChis/Musketeer | bd67b2e7f4e1827c96d10bbf278c781ce22681f3 | [
"MIT"
] | 2 | 2021-05-07T12:29:02.000Z | 2022-01-10T11:57:22.000Z | import numpy as np
from numpy.linalg import lstsq
from scipy.optimize import lsq_linear
from . import moduleFrame
class FitSignals(moduleFrame.Strategy):
def __call__(self, signalVars, knownSpectra):
# rows are additions, columns are contributors
knownMask = ~np.isnan(knownSpectra[:, 0])
knownSignals = signalVars[:, knownMask]
unknownSignals = signalVars[:, ~knownMask]
knownSpectrum = knownSignals @ knownSpectra[knownMask, :]
unknownSpectrum = self.titration.processedData - knownSpectrum
fittedSignals, residuals, _, _ = lstsq(
unknownSignals, unknownSpectrum, rcond=None
)
fittedCurves = unknownSignals @ fittedSignals + knownSpectrum
allSignals = knownSpectra.copy()
allSignals[~knownMask, :] = fittedSignals
return allSignals, residuals, fittedCurves
class FitSignalsNonnegative(moduleFrame.Strategy):
# TODO: account for known spectra
def __call__(self, signalVars, knownSpectra):
fittedSignals = np.empty((0, signalVars.shape[1]))
residuals = np.empty((1, 0))
for signal in self.titration.processedData.T:
result = lsq_linear(signalVars, signal, (0, np.inf), method="bvls")
fittedSignals = np.vstack((fittedSignals, result.x))
residuals = np.append(residuals, result.cost)
fittedSignals = fittedSignals.T
fittedCurves = signalVars @ fittedSignals
return fittedSignals, residuals, fittedCurves
class ModuleFrame(moduleFrame.ModuleFrame):
frameLabel = "Fit signals"
dropdownLabelText = "Fit signals to curve using:"
# TODO: add least squares with linear constraints
dropdownOptions = {
"Ordinary least squares": FitSignals,
"Nonnegative least squares": FitSignalsNonnegative,
}
attributeName = "fitSignals"
| 38.102041 | 79 | 0.689877 | 170 | 1,867 | 7.505882 | 0.458824 | 0.028213 | 0.017241 | 0.032915 | 0.051724 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004144 | 0.224424 | 1,867 | 48 | 80 | 38.895833 | 0.877072 | 0.066417 | 0 | 0.054054 | 0 | 0 | 0.056929 | 0 | 0 | 0 | 0 | 0.020833 | 0 | 1 | 0.054054 | false | 0 | 0.108108 | 0 | 0.405405 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24bce25b915ba22957eca242f8aa3b075851e90b | 588 | py | Python | translations/migrations/0025_move_category_m2m.py | TranslateForSG/translateforsg-backend | 319d90229fa0b22e4a6e77b321276e0e93cb6413 | [
"MIT"
] | 2 | 2020-05-08T07:18:05.000Z | 2020-05-13T13:22:19.000Z | translations/migrations/0025_move_category_m2m.py | aniruddha-adhikary/translateforsg-backend | 319d90229fa0b22e4a6e77b321276e0e93cb6413 | [
"MIT"
] | 7 | 2021-03-19T02:03:21.000Z | 2021-09-22T18:54:02.000Z | translations/migrations/0025_move_category_m2m.py | aniruddha-adhikary/translateforsg-backend | 319d90229fa0b22e4a6e77b321276e0e93cb6413 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.5 on 2020-04-21 11:08
from django.db import migrations
def move_category_m2m(apps, schema_editor):
Category = apps.get_model('translations', 'Category')
Phrase = apps.get_model('translations', 'Phrase')
for category in Category.objects.all():
for phrase in Phrase.objects.filter(category=category):
phrase.categories.add(category)
class Migration(migrations.Migration):
dependencies = [
('translations', '0024_auto_20200421_1908'),
]
operations = [
migrations.RunPython(move_category_m2m)
]
| 25.565217 | 63 | 0.692177 | 70 | 588 | 5.671429 | 0.614286 | 0.060453 | 0.075567 | 0.120907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.069915 | 0.197279 | 588 | 22 | 64 | 26.727273 | 0.771186 | 0.076531 | 0 | 0 | 1 | 0 | 0.134935 | 0.042514 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.071429 | 0 | 0.357143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24c51f4ab48e95013feb31263b800f59b0194d64 | 4,028 | py | Python | mipkit/dl/metrics.py | congvmit/mipkit | d65a5083852dcfc5db766175aa402a5e3a506f21 | [
"MIT"
] | 8 | 2021-06-17T08:13:51.000Z | 2022-02-21T13:31:18.000Z | mipkit/dl/metrics.py | congvmit/mipkit | d65a5083852dcfc5db766175aa402a5e3a506f21 | [
"MIT"
] | null | null | null | mipkit/dl/metrics.py | congvmit/mipkit | d65a5083852dcfc5db766175aa402a5e3a506f21 | [
"MIT"
] | null | null | null | """
The MIT License (MIT)
Copyright (c) 2021 Cong Vo
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
Provided license texts might have their own copyrights and restrictions
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import scipy.stats as scs
def pearsonr(
x,
y,
dim
):
r"""Computes Pearson Correlation Coefficient across rows.
Pearson Correlation Coefficient (also known as Linear Correlation
Coefficient or Pearson's :math:`\rho`) is computed as:
.. math::
\rho = \frac {E[(X-\mu_X)(Y-\mu_Y)]} {\sigma_X\sigma_Y}
If inputs are matrices, then then we assume that we are given a
mini-batch of sequences, and the correlation coefficient is
computed for each sequence independently and returned as a vector. If
`batch_fist` is `True`, then we assume that every row represents a
sequence in the mini-batch, otherwise we assume that batch information
is in the columns.
Warning:
We do not account for the multi-dimensional case. This function has
been tested only for the 2D case, either in `batch_first==True` or in
`batch_first==False` mode. In the multi-dimensional case,
it is possible that the values returned will be meaningless.
Args:
x (torch.Tensor): input tensor
y (torch.Tensor): target tensor
batch_first (bool, optional): controls if batch dimension is first.
Default: `True`
Returns:
torch.Tensor: correlation coefficient between `x` and `y`
Note:
:math:`\sigma_X` is computed using **PyTorch** builtin
**Tensor.std()**, which by default uses Bessel correction:
.. math::
\sigma_X=\displaystyle\frac{1}{N-1}\sum_{i=1}^N({x_i}-\bar{x})^2
We therefore account for this correction in the computation of the
covariance by multiplying it with :math:`\frac{1}{N-1}`.
Shape:
- Input: :math:`(N, M)` for correlation between matrices,
or :math:`(M)` for correlation between vectors
- Target: :math:`(N, M)` or :math:`(M)`. Must be identical to input
- Output: :math:`(N, 1)` for correlation between matrices,
or :math:`(1)` for correlation between vectors
Examples:
>>> import torch
>>> _ = torch.manual_seed(0)
>>> input = torch.rand(3, 5)
>>> target = torch.rand(3, 5)
>>> output = pearsonr(input, target)
>>> print('Pearson Correlation between input and target is {0}'.format(output[:, 0]))
Pearson Correlation between input and target is tensor([ 0.2991, -0.8471, 0.9138])
"""
# noqa: E501
assert x.shape == y.shape
centered_x = x - x.mean(dim=dim, keepdim=True)
centered_y = y - y.mean(dim=dim, keepdim=True)
covariance = (centered_x * centered_y).sum(dim=dim, keepdim=True)
bessel_corrected_covariance = covariance / (x.shape[dim] - 1)
x_std = x.std(dim=dim, keepdim=True)
y_std = y.std(dim=dim, keepdim=True)
corr = bessel_corrected_covariance / (x_std * y_std + 10e-7)
return corr
| 41.958333 | 93 | 0.680238 | 584 | 4,028 | 4.64726 | 0.405822 | 0.032424 | 0.02395 | 0.031319 | 0.08622 | 0.056006 | 0.030214 | 0 | 0 | 0 | 0 | 0.013518 | 0.228649 | 4,028 | 95 | 94 | 42.4 | 0.859994 | 0.815789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0625 | 1 | 0.0625 | false | 0 | 0.0625 | 0 | 0.1875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24cccb183a4ac3f9434412a945c1d0bccd279086 | 4,709 | py | Python | app/scripts/config_check.py | PromoFaux/plex-utills | 570e2e4525b992978780b6a195df94c674c94ac3 | [
"MIT"
] | 179 | 2020-02-27T01:09:32.000Z | 2022-03-28T21:56:20.000Z | app/scripts/config_check.py | PromoFaux/plex-utills | 570e2e4525b992978780b6a195df94c674c94ac3 | [
"MIT"
] | 94 | 2020-03-03T03:22:42.000Z | 2022-03-28T20:13:22.000Z | app/scripts/config_check.py | PromoFaux/plex-utills | 570e2e4525b992978780b6a195df94c674c94ac3 | [
"MIT"
] | 36 | 2020-02-28T13:58:54.000Z | 2022-03-26T10:04:25.000Z | #!/usr/local/bin/python3
import os
import subprocess
from subprocess import Popen, PIPE, STDOUT
from configparser import ConfigParser
import subprocess
import plexapi
import schedule
import time
from datetime import datetime
import re
from colorama import Fore, Back, Style
import socket
from urllib import parse
from plexapi.server import PlexServer
config_object = ConfigParser()
config_object.read("/config/config.ini")
server = config_object["PLEXSERVER"]
schedules = config_object["SCHEDULES"]
options = config_object["OPTIONS"]
hdr_4k_posters = str.lower((options["4k_hdr_posters"]))
poster_3d = str.lower((options["3D_posters"]))
Disney = str.lower((options["Disney"]))
Pixar = (str.lower(options["Pixar"]))
hide_4k = str.lower((options["hide_4k"]))
pbak = str.lower((options["POSTER_BU"]))
HDR_BANNER = str.lower((options["HDR_BANNER"]))
optimise = str.lower((options["transcode"]))
mini_4k = str.lower((options["mini_4k"]))
mini_3d = str.lower((options["mini_3D"]))
t1 = (schedules["4k_poster_schedule"])
t2 = (schedules["disney_schedule"])
t3 = (schedules["pixar_schedule"])
t4 = (schedules["hide_poster_schedule"])
t5 = (schedules["3d_poster_schedule"])
url = parse.urlparse(server["PLEX_URL"]).hostname
try:
url = parse.urlparse(server["PLEX_URL"]).hostname
socket.inet_aton(url)
except socket.error:
raise Exception("Uh-Oh, it looks like your PLEX_URL is not correct in the config file \n Make sure you enter it as 'http://ip-address:plex-port'")
if server["TOKEN"] == '<token>':
raise Exception("You must add your Plex Token to the config file.")
try:
print("Your Server's Friendly name is ", PlexServer((server["PLEX_URL"]), (server["TOKEN"])).friendlyName)
except :
print('Cannot access your Plex account, please make sure that your Plex URL and Token are correct')
exit()
if pbak == 'true':
pass
elif pbak == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if HDR_BANNER == 'true':
pass
elif HDR_BANNER == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if mini_4k == 'true':
pass
elif mini_4k == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if hdr_4k_posters == 'true':
pass
elif hdr_4k_posters == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if poster_3d == 'true':
pass
elif poster_3d == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if Disney == 'true':
pass
elif Disney == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if Pixar == 'true':
pass
elif Pixar == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if hide_4k == 'true':
pass
elif hide_4k == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if optimise == 'true':
pass
elif optimise == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
a = re.compile("^[0-9]{2}:[0-9]{2}$")
if a.match(t1) and hdr_4k_posters == 'true':
pass
elif hdr_4k_posters != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t5) and poster_3d == 'true':
pass
elif poster_3d != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t2) and Disney == 'true':
pass
elif Disney != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t3) and Pixar == 'true':
pass
elif Pixar != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t4) and hide_4k == 'true':
pass
elif hide_4k != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
print('Config check passed')
p = Popen('python -u ./run_all.py', shell=True)
output = p.communicate()
print(output[0])
| 30.185897 | 151 | 0.674241 | 668 | 4,709 | 4.672156 | 0.208084 | 0.048702 | 0.053829 | 0.103172 | 0.534124 | 0.497597 | 0.497597 | 0.441205 | 0.441205 | 0.419096 | 0 | 0.011705 | 0.201741 | 4,709 | 156 | 152 | 30.185897 | 0.818569 | 0.004884 | 0 | 0.455882 | 0 | 0.007353 | 0.401898 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.213235 | 0.102941 | 0 | 0.102941 | 0.029412 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
24cf41a00623d50e881d78fdc44ceca7f693a7f5 | 1,371 | py | Python | feed/models.py | mehDkhan/zngol | 3a6449cb1b83e4a9707d103e8b9c9748a8ba5810 | [
"MIT"
] | null | null | null | feed/models.py | mehDkhan/zngol | 3a6449cb1b83e4a9707d103e8b9c9748a8ba5810 | [
"MIT"
] | 8 | 2020-02-12T01:08:53.000Z | 2022-02-10T08:31:06.000Z | feed/models.py | mehDkhan/zngol | 3a6449cb1b83e4a9707d103e8b9c9748a8ba5810 | [
"MIT"
] | null | null | null | from django.db import models
from account.models import User
from django.utils import timezone
from django.utils.text import slugify
class Post(models.Model):
author = models.ForeignKey(to=User,
on_delete=models.SET_NULL,
related_name='feed_posts',
null=True
)
title = models.CharField(max_length=140,blank=False,null=False)
body = models.TextField(max_length=250,blank=False,null=False)
slug = models.SlugField(max_length=250,unique_for_date='created')
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created','-updated')
def __str__(self):
return self.title
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.title)
super().save(*args, **kwargs)
class Comment(models.Model):
post = models.ForeignKey(Post, related_name='comments', on_delete=models.CASCADE)
author = models.ForeignKey(User, on_delete=models.CASCADE)
body = models.TextField()
created = models.DateTimeField(auto_now_add=timezone.now())
class Meta:
ordering = ('-created',)
def __str__(self):
return 'Comment by {} on {}'.format(self.author,self.post) | 32.642857 | 85 | 0.635303 | 163 | 1,371 | 5.190184 | 0.392638 | 0.035461 | 0.049645 | 0.092199 | 0.085106 | 0.085106 | 0 | 0 | 0 | 0 | 0 | 0.008763 | 0.250912 | 1,371 | 42 | 86 | 32.642857 | 0.814995 | 0 | 0 | 0.125 | 0 | 0 | 0.049563 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.125 | 0.0625 | 0.71875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
24d041644a0972c3d43a61ae3680b9999e219c63 | 898 | py | Python | setup.py | ebruagbay/dsmlbc6_ebruagbay | b58ade5c808ded057595b9c607b745971580b3dd | [
"MIT"
] | null | null | null | setup.py | ebruagbay/dsmlbc6_ebruagbay | b58ade5c808ded057595b9c607b745971580b3dd | [
"MIT"
] | null | null | null | setup.py | ebruagbay/dsmlbc6_ebruagbay | b58ade5c808ded057595b9c607b745971580b3dd | [
"MIT"
] | null | null | null | import setuptools
setuptools.setup(name="dsmlbc6_ebruagbay",
version="0.0.2",
license="MIT",
author="Ebru Topsakal Agbay",
author_mail="ebrugeo@gmail.com",
description="Data Science Tools",
url="https://github.com/ebruagbay/dsmlbc6_ebruagbay.git",
keywords=["datascience","machine learning","bootcamp"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering",
],
package=setuptools.find_packages()
) | 44.9 | 74 | 0.481069 | 69 | 898 | 6.202899 | 0.73913 | 0.133178 | 0.175234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018727 | 0.405345 | 898 | 20 | 75 | 44.9 | 0.782772 | 0 | 0 | 0 | 0 | 0 | 0.408231 | 0.024472 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.052632 | 0 | 0.052632 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24d17cdc96014255022efc31f4e73c1c00af18cc | 996 | py | Python | outputs/admin.py | jayvdb/django-outputs | fcd1386b5dd95d71655e44fa49b766941bbcad43 | [
"Apache-2.0"
] | null | null | null | outputs/admin.py | jayvdb/django-outputs | fcd1386b5dd95d71655e44fa49b766941bbcad43 | [
"Apache-2.0"
] | null | null | null | outputs/admin.py | jayvdb/django-outputs | fcd1386b5dd95d71655e44fa49b766941bbcad43 | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from outputs.models import Export, Scheduler
@admin.register(Export)
class ExportAdmin(admin.ModelAdmin):
date_hierarchy = 'created'
search_fields = ['creator__first_name', 'creator__last_name']
list_select_related = ['creator', 'content_type']
list_filter = ['format', 'context', 'content_type']
list_display = ('id', 'content_type', 'format', 'creator', 'total', 'created')
actions = ['send_mail']
def send_mail(self, request, queryset):
for obj in queryset.all():
obj.send_mail(language=request.LANGUAGE_CODE)
@admin.register(Scheduler)
class SchedulerAdmin(admin.ModelAdmin):
date_hierarchy = 'created'
search_fields = ['creator__first_name', 'creator__last_name']
list_select_related = ['creator', 'content_type']
list_filter = ['routine', 'is_active', 'format', 'context', 'content_type']
list_display = ('id', 'routine', 'is_active', 'content_type', 'format', 'creator', 'created')
| 36.888889 | 97 | 0.699799 | 115 | 996 | 5.747826 | 0.434783 | 0.099849 | 0.090772 | 0.08472 | 0.484115 | 0.484115 | 0.484115 | 0.372163 | 0.372163 | 0.372163 | 0 | 0 | 0.15261 | 996 | 26 | 98 | 38.307692 | 0.783175 | 0 | 0 | 0.3 | 0 | 0 | 0.291165 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.1 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
24d19ac974b3536f36cf25e1c8b1eefc826bb152 | 1,124 | py | Python | scrapeProject/spiders/pragativadi.py | OdiaNLP/DataScraper | a065d350602fc370cacde3f8ab62c3cc5b9e1ba9 | [
"MIT"
] | null | null | null | scrapeProject/spiders/pragativadi.py | OdiaNLP/DataScraper | a065d350602fc370cacde3f8ab62c3cc5b9e1ba9 | [
"MIT"
] | null | null | null | scrapeProject/spiders/pragativadi.py | OdiaNLP/DataScraper | a065d350602fc370cacde3f8ab62c3cc5b9e1ba9 | [
"MIT"
] | null | null | null | from scrapy.linkextractors import LinkExtractor
from scrapy.loader import ItemLoader
from scrapy.loader.processors import MapCompose, Join
from scrapy.spiders import CrawlSpider, Rule
from scrapeProject.items import ScrapeprojectItem
class PragativadiSpider(CrawlSpider):
name = 'pragativadi'
start_urls = ["https://pragativadinews.com/blog/"]
# Rules for horizontal and vertical scrolling
rules = (
Rule(LinkExtractor(restrict_xpaths="//div[@class='older']/a"), follow=True),
Rule(LinkExtractor(restrict_xpaths="//h2/a[@class='post-url post-title']"), follow=True, callback='parse_item'),
)
def parse_item(self, response):
l = ItemLoader(item=ScrapeprojectItem(), response=response)
# Load fields using XPath expressions
l.add_xpath('header', "//h1[@class='single-post-title']//text()",
MapCompose(lambda text: text.strip()), Join())
l.add_xpath('content', "//div[@class='entry-content clearfix single-post-content']/p//text()",
MapCompose(lambda text: text.strip()), Join())
return l.load_item()
| 40.142857 | 120 | 0.682384 | 128 | 1,124 | 5.929688 | 0.523438 | 0.052701 | 0.042161 | 0.081686 | 0.097497 | 0.097497 | 0.097497 | 0 | 0 | 0 | 0 | 0.002169 | 0.179715 | 1,124 | 27 | 121 | 41.62963 | 0.821041 | 0.070285 | 0 | 0.105263 | 0 | 0 | 0.224568 | 0.138196 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.263158 | 0 | 0.578947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
24dba7886b98a74cae8aad667f6a9c05f67ebc42 | 1,745 | py | Python | deeplearning/ml4pl/filesystem_paths.py | island255/ProGraML | 6c4ea50639773009e7c287feb62c6994fa4f3445 | [
"Apache-2.0"
] | 1 | 2020-07-14T12:17:45.000Z | 2020-07-14T12:17:45.000Z | deeplearning/ml4pl/filesystem_paths.py | island255/ProGraML | 6c4ea50639773009e7c287feb62c6994fa4f3445 | [
"Apache-2.0"
] | null | null | null | deeplearning/ml4pl/filesystem_paths.py | island255/ProGraML | 6c4ea50639773009e7c287feb62c6994fa4f3445 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 the ProGraML authors.
#
# Contact Chris Cummins <chrisc.101@gmail.com>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for generating filesystem paths.
We occasionally want to create or read files. When doing so, use this module to
generate the path. This module contains a default hardcoded location for files
which can be overridden by setting the ${ML4PL_TMP_ROOT} environment variable.
"""
import os
import pathlib
from typing import Union
from labm8.py import app
FLAGS = app.FLAGS
# The root directory for storing temporary files.
# We use /tmp/ as the default root because other plausible locations like
# ~/.cache are sandboxed by bazel during testing.
TMP_ROOT = pathlib.Path(
os.environ.get(
"ML4PL_TMP_ROOT", f"/tmp/ml4pl/{os.environ.get('USER', 'anon')}"
)
).absolute()
os.environ["ML4PL_TMP_ROOT"] = str(TMP_ROOT)
def TemporaryFilePath(relpath: Union[str, pathlib.Path]):
"""Generate an absolute path for a temporary file.
Args:
relpath: A relative path.
Returns:
A concatenation of the ${ML4PL_TMP_ROOT} directory and the relative path. No
assumption is made on the type of path, or whether it (or any parent
directories) exist.
"""
return TMP_ROOT / relpath
| 32.924528 | 80 | 0.750143 | 266 | 1,745 | 4.879699 | 0.556391 | 0.03775 | 0.03698 | 0.024653 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014493 | 0.169628 | 1,745 | 52 | 81 | 33.557692 | 0.881297 | 0.75702 | 0 | 0 | 0 | 0 | 0.186842 | 0.089474 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.307692 | 0 | 0.461538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
24e20217d793c96b06ef2e09e523e4525409a89a | 4,565 | py | Python | var/www/cgi-bin/ewmethodConf.py | DanielAndreasen/FASMA-web | 4b87b2ac0be98817825fc94e5f287e5eb968d392 | [
"MIT"
] | 1 | 2017-01-24T14:15:22.000Z | 2017-01-24T14:15:22.000Z | var/www/cgi-bin/ewmethodConf.py | DanielAndreasen/FASMA-web | 4b87b2ac0be98817825fc94e5f287e5eb968d392 | [
"MIT"
] | 2 | 2016-11-02T15:07:13.000Z | 2018-03-10T12:20:09.000Z | var/www/cgi-bin/ewmethodConf.py | DanielAndreasen/FASMA-web | 4b87b2ac0be98817825fc94e5f287e5eb968d392 | [
"MIT"
] | null | null | null | #!/home/daniel/Software/anaconda3/bin/python
# Import modules for CGI handling
import os
import cgi, cgitb
from ewDriver import ewdriver
from emailSender import sendEmail
def cgi2dict(form):
"""Convert the form from cgi.FieldStorage to a python dictionary"""
params = {'initial': False,
'fixteff': False,
'fixlogg': False,
'fixfeh': False,
'fixvt': False,
'refine': False,
'outlier': False,
'teffrange': False,
'autofixvt': False}
outlier = {'None': None,
'All once': 'allOnce',
'All iteratively': 'allIter',
'One iteratively': '1Iter'}
for key in form.keys():
params[key] = form[key].value
params['outlier'] = outlier[params['outlier']] # Translate to FASMA
# Adjust the model atmosphere for FASMA
if params['atmosphere'] == 'Kurucz':
params['atmosphere'] = 'kurucz95'
params['atmosphere'] = params['atmosphere'].lower()
return params
def ew(form, name=None):
"""Create the configuration file for running the ARES driver"""
fout = 'linelist.moog '
fout += '%s ' % form['Teff']
fout += '%s ' % form['logg']
fout += '%s ' % form['feh']
fout += '%s ' % form['vt']
fout += 'model:%s' % form['atmosphere']
fout += ',iterations:%s' % form['iterations']
fout += ',EPcrit:%s' % form['EPslope']
fout += ',RWcrit:%s' % form['RWslope']
fout += ',Abdiffcrit:%s' % form['feDiff']
if form['teffrange']:
fout += ',teffrange'
if form['autofixvt']:
fout += ',autofixvt'
if form['refine']:
fout += ',refine'
if form['initial']:
fout += ',tmcalc'
if form['outlier']:
fout += ',outlier:%s' % form['outlier']
fout += ',sigma:%s' % form['sigma']
if form['fixteff']:
fout += ',teff'
if form['fixlogg']:
fout += ',logg'
if form['fixfeh']:
fout += ',feh'
if form['fixvt']:
fout += ',vt'
with open('/tmp/StarMe_ew.cfg', 'w') as f:
f.writelines(fout + '\n')
parameters = ewdriver(starLines='/tmp/StarMe_ew.cfg', overwrite=True, name=name)
return parameters
def parameters2HTML(parameters):
"""Convert the parameters to HTML in a table"""
data = {'teff': parameters[0], 'tefferr': parameters[1],
'logg': parameters[2], 'loggerr': parameters[3],
'feh': parameters[4], 'feherr': parameters[5],
'vt': parameters[6], 'vterr': parameters[7]}
table = '''<table class="table table-hover table-bordered table-striped">
<thead>
<tr>
<th>Parameters</th>
<th>Value</th>
</tr>
</thead>
<tbody>
<tr>
<td>T<sub>eff</sub></td>
<td>{teff}±{tefferr}</td>
</tr>
<tr>
<td>logg</td>
<td>{logg}±{loggerr}</td>
</tr>
<tr>
<td>[Fe/H]</td>
<td>{feh}±{feherr}</td>
</tr>
<tr>
<td>ξ<sub>tur</sub></td>
<td>{vt}±{vterr}</td>
</tr>
</tbody>
</table>'''.format(**data)
print table
if __name__ == '__main__':
# Enable debugging
cgitb.enable()
form = cgi.FieldStorage()
# Run the minimization for a line list
formDict = cgi2dict(form)
parameters = ew(formDict, name=formDict['linelist'])
sendEmail(to=formDict['email'], driver='EWmethod', data='/tmp/EWresults.dat')
os.remove('/tmp/EWresults.dat')
os.remove('/tmp/linelist.moog')
os.remove('/tmp/StarMe_ew.cfg')
os.remove('/tmp/batch.par')
os.remove('/tmp/out.atm')
os.remove('/tmp/result.out')
os.remove('/tmp/summary.out')
os.remove('/tmp/error_summary.out')
# Show the finished html page
print 'Content-type: text/html\n\n'
with open('../html/finish.html', 'r') as lines:
for line in lines:
if 'Congratulations' in line:
print line,
print '<h2 class="text-secondary text-center">Results for %s</h2>' % formDict['linelist'].rpartition('.')[0]
print '<br>'
parameters2HTML(parameters)
continue
print line,
| 31.923077 | 124 | 0.50471 | 483 | 4,565 | 4.745342 | 0.351967 | 0.023997 | 0.038394 | 0.018325 | 0.021379 | 0.021379 | 0 | 0 | 0 | 0 | 0 | 0.006201 | 0.328806 | 4,565 | 142 | 125 | 32.147887 | 0.741841 | 0.046878 | 0 | 0.104348 | 0 | 0 | 0.421886 | 0.051836 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.034783 | null | null | 0.052174 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24e30f0e72c5758202069450d33b77fffecaba08 | 5,964 | py | Python | common/utils.py | quentin-xia/Maticv | 76d599b68ef5bdab10e8dbc0c120657610933ad8 | [
"MIT"
] | null | null | null | common/utils.py | quentin-xia/Maticv | 76d599b68ef5bdab10e8dbc0c120657610933ad8 | [
"MIT"
] | null | null | null | common/utils.py | quentin-xia/Maticv | 76d599b68ef5bdab10e8dbc0c120657610933ad8 | [
"MIT"
] | null | null | null | #/usr/bin/env python
#-*- coding:utf-8 -*-
import math,os
import numpy as np
from adb import Adb
from screencap import MinicapStream
import tempfile
import hashlib
import gl
import platform
if platform.system() is "Windows":
try:
import maticv.common.opencv.x32.cv2 as cv2
except:
import maticv.common.opencv.x64.cv2 as cv2
else:
import maticv.common.opencv.linux.cv2 as cv2
class Utils(Adb):
def __init__(self):
pass
#super(Utils,self).__init__()
#旋转图片函数
def rotate_about_center(self,src,angle=90,scale=1.0):
w,h = src.shape[1::-1]
rangle = np.deg2rad(angle)
nw = (abs(np.sin(rangle)*h) + abs(np.cos(rangle)*w))*scale
nh = (abs(np.cos(rangle)*h) + abs(np.sin(rangle)*w))*scale
rot_mat = cv2.getRotationMatrix2D((nw*0.5, nh*0.5), angle, scale)
rot_move = np.dot(rot_mat, np.array([(nw-w)*0.5, (nh-h)*0.5,0]))
rot_mat[0,2] += rot_move[0]
rot_mat[1,2] += rot_move[1]
return cv2.warpAffine(src, rot_mat, (int(math.ceil(nw)), int(math.ceil(nh))), flags=cv2.INTER_LANCZOS4)
#获取矩形坐标
def get_rectangle_point(self,strX,strY,endX,endY,rate):
if strX > endX:
x1,x2 = endX,strX
else:
x1,x2 = strX,endX
if strY > endY:
y1,y2 = endY,strY
else:
y1,y2 = strY,endY
if rate:
x1 = self.reduction_point(x1,rate)
x2 = self.reduction_point(x2,rate)
y1 = self.reduction_point(y1,rate)
y2 = self.reduction_point(y2,rate)
return x1,y1,x2,y2
#获取目标位置坐标
def get_circle_point(self,strX,strY,endX,endY,width=0,window=0):
centerX = int((endX - strX) / 2) + strX
centerY = int((endY - strY) / 2) + strY
if window:
centerX = centerY
centerY = int((endX - strX) / 2) + (width - endX)
return centerX,centerY
#根据比例还原坐标
def reduction_point(self,point,rate):
return int(round(point * (1 / float(rate))))
#根据比例缩小图片
def zoom(self,image,rate):
return cv2.resize(image,None,fx=rate,fy=rate,interpolation=cv2.INTER_AREA)
#生成文件名
def get_img_name(self,project):
path = tempfile.mktemp(".png","%s_" % project,"projects/%s" % project)
path = path.replace("\\","/")
return path
#截屏
def screenshot(self):
#self.shell("screencap -p /sdcard/screenshot.png")
#self.pull("/sdcard/screenshot.png",gl.TEMP_IMAGE_PATH)
screencap = MinicapStream()
screencap.ReadImageStream(gl.TEMP_IMAGE_PATH)
#安装app
def install_app_for_test(self,apk=None,pkg=None,clr=True):
if apk:
installed = self.is_app_installed(pkg)
remoteApk = self._remote_apk_is_exists(apk)
if installed and remoteApk:
self._reset_app(pkg,clr)
else:
self._mk_remote_dir()
remoteApk,md5 = self._get_remote_path(apk)
#print remoteApk,md5
self._remove_temp_apks(md5)
self._install_remote_with_retry(remoteApk,pkg,apk)
else:
self._reset_app(pkg,clr)
#private
#安装app
def _install_remote_with_retry(self,remoteApk,pkg,localApk):
installed = self.is_app_installed(pkg)
if installed:
self.uninstall_app(pkg)
print "Install APK should to wait for a few minutes."
self.push(localApk,remoteApk)
self.install_remote(remoteApk)
#删除app安装包
def _remove_temp_apks(self,md5):
remoteTempPath = self._remote_temp_path()
cmd = "ls %s*.apk" % remoteTempPath
try:
stdout = self.shell(cmd)
if "No such file" in stdout:
apks = []
else:
apks = stdout.split("\n")
except:
if len(apks) < 1:
#print "No apks to examine"
return False
noMd5Matched = True
for path in apks:
path = path.strip()
if path != "":
noMd5Matched = True
if not md5 in path:
noMd5Matched = False
if noMd5Matched:
filePath = remoteTempPath + path
self.rimraf(filePath)
#手机上创建临时目录
def _mk_remote_dir(self):
path = self._remote_temp_path()
self.mkdir(path)
#重置app
def _reset_app(self,pkg,clr):
if clr:
self.stop_and_clear(pkg)
else:
self.force_stop(pkg)
#手机上是否存在安装包
def _remote_apk_is_exists(self,apk):
remoteApk,appMd5Hash = self._get_remote_path(apk)
cmd = "ls %s" % remoteApk
stdout = self.shell(cmd)
if not "No such file" in stdout:
return stdout.strip()
else:
return False
#获取apk路径和md5
def _get_remote_path(self,apk):
appMd5 = self._get_md5(apk)
remoteTempPath = self._remote_temp_path()
remoteApk = "%s%s.apk" % (remoteTempPath,appMd5)
return remoteApk,appMd5
#手机临时目录
def _remote_temp_path(self):
return "/data/local/tmp/"
#获取md5
def _get_md5(self,apk):
appMd5Hash = self._get_app_md5(apk)
appMd5 = "%s%s%s" % (appMd5Hash[0],appMd5Hash,appMd5Hash[-1])
return appMd5
#获取app md5
def _get_app_md5(self,apk):
app = None
ret = False
strMd5 = ""
try:
app = open(apk,"rb")
md5 = hashlib.md5()
strRead = ""
while True:
strRead = app.read(8096)
if not strRead:
break
md5.update(strRead)
ret = True
strMd5 = md5.hexdigest()
except Exception,ex:
#print ex
ret = False
finally:
if app:
app.close()
return strMd5
| 29.092683 | 111 | 0.557176 | 736 | 5,964 | 4.361413 | 0.285326 | 0.011215 | 0.02243 | 0.02243 | 0.101558 | 0.034268 | 0 | 0 | 0 | 0 | 0 | 0.026606 | 0.331992 | 5,964 | 204 | 112 | 29.235294 | 0.779116 | 0.058015 | 0 | 0.178808 | 0 | 0 | 0.026118 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.006623 | 0.072848 | null | null | 0.006623 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24ec8172b6e1c2fe9947b2843a4b87484a6b677e | 6,510 | py | Python | code/ner/model/net.py | gilbert98xD/mtextos2122 | 4811575134344c0bf255fa592c3b82efdb59d867 | [
"CC-BY-4.0"
] | 2 | 2022-03-07T13:47:36.000Z | 2022-03-07T16:10:34.000Z | code/ner/model/net.py | gilbert98xD/mtextos2122 | 4811575134344c0bf255fa592c3b82efdb59d867 | [
"CC-BY-4.0"
] | null | null | null | code/ner/model/net.py | gilbert98xD/mtextos2122 | 4811575134344c0bf255fa592c3b82efdb59d867 | [
"CC-BY-4.0"
] | null | null | null | """
## Minería de textos
Universidad de Alicante, curso 2021-2022
Esta documentación forma parte de la práctica "[Lectura y documentación de un sistema de
extracción de entidades](https://jaspock.github.io/mtextos2122/bloque2_practica.html)" y se
basa en el código del curso [CS230](https://github.com/cs230-stanford/cs230-code-examples)
de la Universidad de Stanford.
**Autores de los comentarios:** Gilbert Lurduy & Enrique Moreno
Este módulo define la red neuronal, la función de pérdida y la métrica de aciertos
para la evaluación del modelo. Se hace uso de la libería torch.
"""
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
"""
### Clase 'Net'
Definición de la clase red neuronal
"""
def __init__(self, params):
"""
### Constructor
Se define una red neuronal recurrente para la obtención de entidades
nombradas de un texto. Se compone de tres capas: capa lineal de embedding,
capa LSTM y capa 'fully-connceted'.
#### Parámetros:
* 'params': parámetros con 'vocab_size', 'embedding_dim' y 'lstm_hidden_dim'
#### Devuelve:
* Tres capas para la red nuronal
"""
"""
Llama al constructor de la clase 'Params', se construye su clase y a
continuación la clase hija 'Net'
"""
super(Net, self).__init__()
"""
Se le da el tamaño del vocabulario y las dimensiones del embedding
a la capa de embedding
"""
self.embedding = nn.Embedding(params.vocab_size, params.embedding_dim)
"""
Capa LSTM que recibe como parámetros las dimensiones del embedding
y las dimensiones del estado 'hidden' que no tienen porqué coincidir
batch_first = True -> hace que los tensores de entrada y salida se den
de forma batch,seq,feature
"""
self.lstm = nn.LSTM(params.embedding_dim,
params.lstm_hidden_dim, batch_first=True)
"""
Capa 'fully-connected', es la capa que da el output final, me dice la
probabilidad de que la palabra sea una ner (named entitty recognition) tag
de cierto tipo (nombre, tiempo, lugar...)
"""
self.fc = nn.Linear(params.lstm_hidden_dim, params.number_of_tags)
"""
En resumen la primera capa, dada una palabra, me da su embedding, en la segunda ese embedding
se lleva a otros espacio de embeddings que no tiene porque tener la misma dimension, y la tercera
capa se lleva este nuevo embedding a otro espacio, el número de etiqueta
"""
def forward(self, s):
"""
### Función 'forward'
A partir de un batch input obtiene las probablidades logits de los tokens
#### Parámetros:
* 's': argumento con un 'lote' de oraciones organizados en filas
y de dimensión tamaño del batch x longitud frase más larga. A las
frases más cortas se le aplica padding.
#### Devuelve:
* probabilidades logits de los tokens
"""
"""
aplicamos una capa de embedding
las dimensiones resultantes son(x,dimension de los embeddings)
"""
s = self.embedding(s)
"""
Aplicación de la LSTM
"""
s, _ = self.lstm(s)
"""
Se hace una copia del tensor en memoria
"""
s = s.contiguous()
"""
Cambiamos la forma de la variable s (es una matriz) de tal manera que cada fila tiene un token.
Con el -1 le indicamos que calcule la dimensión automáticamente para obtener dos dimensiones. Y el
s.shape[2] es lstm_hidden_dim. Se le pone el [2] porque el [0] es el tamaño de batch y el [1] es
el máximo de la secuencia
"""
s = s.view(-1, s.shape[2])
"""
Última capa 'fully-connected'proyecta el nuevo embedding hacia un espacio con el número de etqiuetas
"""
s = self.fc(s)
"""
No obstante, aun no tenemos probabilidades hay que aplicar una softmax. Por una mayor
eficiencia se aplica un log(softmax) por lo que las probabilidades de 0 a 1 pasan a ser
negativas. Cuanto más cerca estemos del cero más alta es la probabilidad.
"""
return F.log_softmax(s, dim=1)
def loss_fn(outputs, labels):
"""
### Función 'loss_fn'
Método función de pérdida
#### Parámetros:
* 'outputs': resultados del modelo
* 'labels': las etiqeutas para evaluar la pérdida
#### Devuelve:
* La entro`pía cruzada de todos los tokens, menos los de padding
"""
"""
aplana la variable
"""
labels = labels.view(-1)
"""
Los inputs de una red neuronal deben tener la misma forma y tamaño, para que esto sea así al pasar oraciones
se hace 'padding', que añade ceros a las secuencias o corta oraciones largas. Estos token tienen -1 como etiqueta,
por lo que podemos usar una máscara que los excluya del cálculo de la función de pérdida.
"""
mask = (labels >= 0).float()
"""
Conversión de las etiquetas en positivas (por los padding tokens)
"""
labels = labels % outputs.shape[1]
num_tokens = int(torch.sum(mask))
return -torch.sum(outputs[range(outputs.shape[0]), labels]*mask)/num_tokens
"""
Se devuelve la entropía cruzada de todos los tokens, menos los de padding, mediante el uso
de la variable 'mask' que hace de máscara, la cual hemos definido antes
"""
def accuracy(outputs, labels):
"""
### Función 'accuracy'
Cálculo de la precisión a partir de las etiquetas y las salidas teniendo en cuenta los términos
de padding
#### Parámetros:
* 'outputs': resultados del modelo
* 'labels': las etiqeutas para evaluar la pérdida
#### Devuelve:
* Tasa de acierto
"""
"""
Aplanamiento de la variable
"""
labels = labels.ravel()
"""
Máscara similar al anterior método 'loss_fn'
"""
mask = (labels >= 0)
"""
Índices con los mayores valores, es decir, obtención de las clases más probables de cada token
"""
outputs = np.argmax(outputs, axis=1)
return np.sum(outputs == labels)/float(np.sum(mask))
metrics = {
'accuracy': accuracy,
}
| 29.726027 | 119 | 0.618126 | 862 | 6,510 | 4.62993 | 0.37007 | 0.012027 | 0.013029 | 0.00902 | 0.061138 | 0.061138 | 0.061138 | 0.061138 | 0.061138 | 0.041092 | 0 | 0.008564 | 0.300461 | 6,510 | 218 | 120 | 29.862385 | 0.867809 | 0.286022 | 0 | 0 | 0 | 0 | 0.005895 | 0 | 0 | 0 | 0 | 0.018349 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24f4d7e1005a990eff108660ee3c6c03d85c23a0 | 439 | py | Python | neosis_telephone_directory/telephone_directory/migrations/0003_contacts_profile_pic.py | borkarfaiz/neosis_telephone_directory | d4a0f7197ac15f4993488e21459a744c370fde0f | [
"MIT"
] | null | null | null | neosis_telephone_directory/telephone_directory/migrations/0003_contacts_profile_pic.py | borkarfaiz/neosis_telephone_directory | d4a0f7197ac15f4993488e21459a744c370fde0f | [
"MIT"
] | null | null | null | neosis_telephone_directory/telephone_directory/migrations/0003_contacts_profile_pic.py | borkarfaiz/neosis_telephone_directory | d4a0f7197ac15f4993488e21459a744c370fde0f | [
"MIT"
] | null | null | null | # Generated by Django 3.0.11 on 2020-12-09 06:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('telephone_directory', '0002_auto_20201208_1801'),
]
operations = [
migrations.AddField(
model_name='contacts',
name='profile_pic',
field=models.ImageField(blank=True, null=True, upload_to='profile_pic/'),
),
]
| 23.105263 | 85 | 0.626424 | 49 | 439 | 5.44898 | 0.816327 | 0.074906 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09816 | 0.257403 | 439 | 18 | 86 | 24.388889 | 0.720859 | 0.104784 | 0 | 0 | 1 | 0 | 0.186701 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24fce3d07a8f2fe74c6a87ee177cf440ad8e2e09 | 1,704 | py | Python | backend/pah_fm/urls.py | w1stler/pah-fm | e69600ba602715ae0b61dfa0bead934a0ed7f36f | [
"MIT"
] | 8 | 2019-08-09T11:06:16.000Z | 2021-10-05T14:56:31.000Z | backend/pah_fm/urls.py | w1stler/pah-fm | e69600ba602715ae0b61dfa0bead934a0ed7f36f | [
"MIT"
] | 382 | 2018-10-17T19:05:30.000Z | 2022-02-10T07:09:45.000Z | backend/pah_fm/urls.py | w1stler/pah-fm | e69600ba602715ae0b61dfa0bead934a0ed7f36f | [
"MIT"
] | 45 | 2018-10-17T17:04:04.000Z | 2021-10-05T14:30:35.000Z | """pah_fm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
from rest_framework.documentation import include_docs_urls
from pah_fm.views import CustomObtainJSONWebToken
from fleet_management.api import (
CarListView,
CurrentUserRetrieveView,
DriveView,
PassengerListView,
ProjectView,
RefuelView,
)
urlpatterns = [
path("admin/", admin.site.urls),
path("api/docs/", include_docs_urls(title="PAH-FM", public=False)),
path("api/api-token-auth/", CustomObtainJSONWebToken.as_view(), name="jwt"),
path("api/users/me", CurrentUserRetrieveView.as_view(), name="me"),
path("api/passengers", PassengerListView.as_view(), name="passengers"),
path("api/cars", CarListView.as_view(), name="cars"),
path("api/drives", DriveView.as_view(), name="drives"),
path("api/projects", ProjectView.as_view(), name="projects"),
path("api/refuels", RefuelView.as_view(), name="refuels"),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 37.866667 | 80 | 0.723592 | 231 | 1,704 | 5.246753 | 0.354978 | 0.039604 | 0.066007 | 0.019802 | 0.096535 | 0.096535 | 0.061881 | 0 | 0 | 0 | 0 | 0.005476 | 0.142606 | 1,704 | 44 | 81 | 38.727273 | 0.824093 | 0.365023 | 0 | 0 | 0 | 0 | 0.136744 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.08 | 0.28 | 0 | 0.28 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
24fd6c41fe4fbfb1df5a007cfef9baca40a53801 | 172 | py | Python | deciphon_cli/console/env.py | EBI-Metagenomics/deciphon-cli | aa090c886db1f4dacc6bc88b46b6ebcecb79eaab | [
"MIT"
] | null | null | null | deciphon_cli/console/env.py | EBI-Metagenomics/deciphon-cli | aa090c886db1f4dacc6bc88b46b6ebcecb79eaab | [
"MIT"
] | null | null | null | deciphon_cli/console/env.py | EBI-Metagenomics/deciphon-cli | aa090c886db1f4dacc6bc88b46b6ebcecb79eaab | [
"MIT"
] | null | null | null | import typer
import deciphon_cli.data as data
__all__ = ["app"]
app = typer.Typer()
@app.command()
def default():
typer.echo(data.env_example_content(), nl=False)
| 13.230769 | 52 | 0.703488 | 25 | 172 | 4.56 | 0.68 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151163 | 172 | 12 | 53 | 14.333333 | 0.780822 | 0 | 0 | 0 | 0 | 0 | 0.017442 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.285714 | 0 | 0.428571 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24fd822b165bd2a82075658d81849e6e2daa014f | 4,158 | py | Python | wftests/ci/pylint_checker.py | YutakaMizugaki/warriorframework | 685761cf044182ec88ce86a942d4be1e150a1256 | [
"Apache-2.0"
] | 24 | 2017-06-06T15:48:08.000Z | 2021-03-17T07:52:52.000Z | wftests/ci/pylint_checker.py | YutakaMizugaki/warriorframework | 685761cf044182ec88ce86a942d4be1e150a1256 | [
"Apache-2.0"
] | 272 | 2017-05-19T20:39:12.000Z | 2021-12-13T19:34:51.000Z | wftests/ci/pylint_checker.py | pavithra-gowda/warrior | 19b153310552b986b86b5470fcfea9547a74c3a9 | [
"Apache-2.0"
] | 37 | 2017-05-17T21:24:37.000Z | 2021-07-24T18:09:22.000Z | """
Get a list of file and run pylint on each of the files
on pull request source branch and target branch
"""
import sys
import subprocess
# from pylint import epylint as lint
def process_file_list(input_file, rc_file):
"""
Generate a list of files that need to be pylint
"""
filelist = open(input_file).readlines()
filelist = [x.strip() for x in filelist]
pylintrc = open(rc_file).readlines()
ignore = [x for x in pylintrc if x.startswith("ignore=")]
if ignore:
ignore = ignore[0][7:].replace('\n', '').split(',')
result = [x for x in filelist if all([y not in x for y in ignore])]
if result:
print "The following files will be tested with Pylint:\n", "\n".join(result), "\n"
else:
print "No file requires pylint check, exiting"
exit(0)
return result
def pylint(file_list):
"""
Pylint files from file list
"""
file_score = {}
for fi in file_list:
print "linting", fi
try:
output = subprocess.check_output('pylint --rcfile=.pylintrc {}'.format(fi), shell=True)
except subprocess.CalledProcessError as e:
output = e.output
score = output.split('\n')
score = [x.replace("Your code has been rated at ", "") for x in score if x.startswith("Your code has been")]
if score:
# code has been rated
file_score[fi] = [score[0], output]
else:
print fi, "doesn't get a Pylint score on this branch"
return file_score
def report(branch_file_score):
"""
print out pylint result for each file
"""
print "\n\n\n!---------- Detail score for branch {} ----------!\n".format(sys.argv[4])
for k, v in branch_file_score.items():
print k, "\n", v[1]
print "\n\n\n!---------- Summary score for branch {} ----------!\n".format(sys.argv[4])
for k, v in branch_file_score.items():
print k, v[0]
def judge(branch_file_score):
"""
Check the score and difference for each file
"""
status = True
for k, v in branch_file_score.items():
# print k, v[0]
score = v[0].split("/")[0]
if float(score) < 5:
status = False
print k, "failed with a score lower than 5"
if "previous" in v[0]:
improvement = float(v[0].split(",")[1][:-1])
if improvement < -0.1:
status = False
print k, "failed with a decreasing score"
if float(score) >= 5 and "previous" in v[0] and improvement >= -0.1:
print k, "pass"
return status
def custom_rules(file_list):
"""
Invoke custom rules checker on each file
"""
status = True
for fi in file_list:
try:
output = subprocess.check_output('python wftests/ci/custom_rules.py {}'.format(fi), shell=True)
except subprocess.CalledProcessError as e:
output = e.output
status = False
print output
return status
def main():
"""
main function to process logic
"""
if len(sys.argv) > 4:
file_list = process_file_list(sys.argv[1], sys.argv[2])
print "target branch:", sys.argv[3], "\nsource branch:", sys.argv[4]
subprocess.check_output("git checkout {}".format(sys.argv[3]), shell=True)
print "Running pylint on", sys.argv[3]
pylint(file_list)
print "\n"
subprocess.check_output("git checkout {}".format(sys.argv[4]), shell=True)
print "\nRunning pylint on", sys.argv[4]
branch_file_score = pylint(file_list)
report(branch_file_score)
print "\n\n\n!---------- Judging score for branch {} ----------!\n".format(sys.argv[4])
status = judge(branch_file_score)
print "\n\n\n!---------- Custom Rules Checker for branch {} ----------!\n".format(sys.argv[4])
status &= custom_rules(file_list)
if status:
exit(0)
else:
exit(1)
else:
print "Missing arguments, require filenames, pylintrc_file, target_branch, source_branch"
if __name__ == "__main__":
main()
| 31.5 | 116 | 0.575998 | 565 | 4,158 | 4.145133 | 0.238938 | 0.038856 | 0.051238 | 0.029889 | 0.30316 | 0.235696 | 0.235696 | 0.192143 | 0.125961 | 0.125961 | 0 | 0.012097 | 0.284271 | 4,158 | 131 | 117 | 31.740458 | 0.774866 | 0.016354 | 0 | 0.275862 | 0 | 0 | 0.213715 | 0.007161 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.011494 | 0.022989 | null | null | 0.218391 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24ff72339ada2faf39d38cc9fe1209fcafd6136d | 3,269 | py | Python | draw_macros/drawEnvelope.py | nkarast/WWTheoryUncertainties | d2d3e5cab4cd72256cdc572fee05acfbe3372f5a | [
"MIT"
] | null | null | null | draw_macros/drawEnvelope.py | nkarast/WWTheoryUncertainties | d2d3e5cab4cd72256cdc572fee05acfbe3372f5a | [
"MIT"
] | null | null | null | draw_macros/drawEnvelope.py | nkarast/WWTheoryUncertainties | d2d3e5cab4cd72256cdc572fee05acfbe3372f5a | [
"MIT"
] | null | null | null | import ROOT as rt
import glob
debug = 1
def setstyle():
rt.gStyle.SetOptStat(0);
rt.gStyle.SetFillColor(10);
rt.gStyle.SetFrameFillColor(10);
rt.gStyle.SetCanvasColor(10);
rt.gStyle.SetPadColor(10);
rt.gStyle.SetTitleFillColor(0);
rt.gStyle.SetStatColor(10);
rt.gStyle.SetCanvasBorderMode(0);
rt.gStyle.SetFrameBorderMode(0);
rt.gStyle.SetPadBorderMode(0);
rt.gStyle.SetDrawBorder(0);
rt.gStyle.SetTitleBorderSize(0);
rt.gStyle.SetFuncWidth(2);
rt.gStyle.SetHistLineWidth(2);
rt.gStyle.SetFuncColor(2);
rt.gStyle.SetPadTopMargin(0.05)
rt.gStyle.SetPadBottomMargin(0.16);
rt.gStyle.SetPadLeftMargin(0.16);
rt.gStyle.SetPadRightMargin(0.05);
rt.gStyle.SetPadTickX(1);
rt.gStyle.SetPadTickY(1);
rt.gStyle.SetFrameLineWidth(1);
rt.gStyle.SetLineWidth(1);
def HSG3HistStyle(histo):
histo.SetTitle("");
histo.SetLineWidth(3);
histo.SetTitleSize(0.07,"x");
histo.SetTitleSize(0.07,"y");
histo.SetNdivisions(505,"x");
histo.SetNdivisions(505,"y");
histo.SetLabelSize(0.07,"x");
histo.SetLabelSize(0.07,"y");
histo.SetTitleOffset(1.,"x");
histo.SetTitleOffset(1.,"y");
for filename in glob.glob("/Users/nkarast/Documents/Higgs/Work/CPMixing/TheoryNtuples/finalVersionOfCode/UEPS/*.dat"):
setstyle()
file = open(filename, 'read')
print 'Working with ', filename
# bin_contents = bincontents[]
# nbins = len(bincontents)
bincontents = []
nominalContent = []
for line in file.readlines():
if "#" in line : continue
bincontents.append(float(line.split()[0]))
nominalContent.append(1.)
file.close()
hist_nom = rt.TH1F("Nominal","Nominal", len(bincontents), 0, len(bincontents))
hist_one = rt.TH1F("Ones","Ones", len(bincontents), 0, len(bincontents))
for bin in range(len(bincontents)):
hist_nom.SetBinContent(bin+1, 1.)
hist_one.SetBinContent(bin+1, 1.)
if bincontents[bin]==1 :
bincontents[bin]=0.
hist_nom.SetBinError(bin+1, bincontents[bin])
canvas = rt.TCanvas("WW","WW", 800, 600)
canvas.cd()
HSG3HistStyle(hist_nom)
HSG3HistStyle(hist_nom)
hist_nom.SetTitle("")
rt.gStyle.SetOptStat(0)
hist_nom.SetMinimum(0)
hist_nom.GetXaxis().SetTitle("BDT Output")
hist_nom.GetYaxis().SetTitle("Variation")
hist_nom.SetMaximum(2.5)
hist_nom.SetFillColor(46)
hist_nom.SetFillStyle(3001)
hist_nom.SetLineColor(46)
hist_nom.Draw("E2")
hist_one.SetLineColor(rt.kBlack)
hist_one.SetLineWidth(2)
hist_one.Draw("same")
leg = rt.TLegend(0.60, 0.70, 0.90, 0.90)
leg.SetBorderSize(0)
leg.SetTextFont(42)
leg.SetTextSize(0.045)
leg.SetFillColor(0)
leg.SetNColumns(1)
leg.AddEntry(hist_one, "Nominal", "l")
leg.AddEntry(hist_nom, "WW UE/PS", "f")
leg.Draw("same")
lumi = rt.TLatex();
lumi.SetNDC();
lumi.SetTextFont(42);
lumi.SetTextSize(0.045);
lumi.SetTextColor(1);
lumi.DrawLatex(0.22, 0.8, "Simulation #sqrt{s} = 8 TeV")
lumi.DrawLatex(0.2, 0.70, "#sqrt{s} = 8 TeV, #int L dt = 20.3 fb^{-1}")
savename = filename[:-4]+"_wwUEPS.pdf"
canvas.SaveAs(savename)
print bincontents | 28.181034 | 118 | 0.653105 | 424 | 3,269 | 4.981132 | 0.353774 | 0.090909 | 0.02983 | 0.017992 | 0.027462 | 0 | 0 | 0 | 0 | 0 | 0 | 0.052256 | 0.186296 | 3,269 | 116 | 119 | 28.181034 | 0.741729 | 0.016213 | 0 | 0.021505 | 0 | 0.010753 | 0.083696 | 0.02738 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.021505 | null | null | 0.021505 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7002ef7c1c1b47d0e90a43b630eeacf894b6e9ba | 3,071 | py | Python | code/nearest_neighbor_classify.py | lionelmessi6410/Scene-recognition-with-bag-of-words | 1bbc11cd060f792b54b86baa8a5f7483133b9f2c | [
"MIT"
] | 22 | 2019-11-27T04:14:07.000Z | 2022-01-10T08:16:58.000Z | code/nearest_neighbor_classify.py | lionelmessi6410/Scene-recognition-with-bag-of-words | 1bbc11cd060f792b54b86baa8a5f7483133b9f2c | [
"MIT"
] | null | null | null | code/nearest_neighbor_classify.py | lionelmessi6410/Scene-recognition-with-bag-of-words | 1bbc11cd060f792b54b86baa8a5f7483133b9f2c | [
"MIT"
] | 10 | 2019-12-13T07:31:09.000Z | 2021-12-18T18:21:20.000Z | from __future__ import print_function
import numpy as np
import scipy.spatial.distance as distance
def nearest_neighbor_classify(train_image_feats, train_labels, test_image_feats):
###########################################################################
# TODO: #
# This function will predict the category for every test image by finding #
# the training image with most similar features. Instead of 1 nearest #
# neighbor, you can vote based on k nearest neighbors which will increase #
# performance (although you need to pick a reasonable value for k). #
###########################################################################
###########################################################################
# NOTE: Some useful functions #
# distance.cdist : #
# This function will calculate the distance between two list of features#
# e.g. distance.cdist(? ?) #
###########################################################################
'''
Input :
train_image_feats :
image_feats is an (N, d) matrix, where d is the
dimensionality of the feature representation.
train_labels :
image_feats is a list of string, each string
indicate the ground truth category for each training image.
test_image_feats :
image_feats is an (M, d) matrix, where d is the
dimensionality of the feature representation.
Output :
test_predicts :
a list(M) of string, each string indicate the predict
category for each testing image.
'''
CATEGORIES = ['Kitchen', 'Store', 'Bedroom', 'LivingRoom', 'Office',
'Industrial', 'Suburb', 'InsideCity', 'TallBuilding', 'Street',
'Highway', 'OpenCountry', 'Coast', 'Mountain', 'Forest']
K = 1
N = train_image_feats.shape[0]
M = test_image_feats.shape[0]
d = train_image_feats.shape[1] # d are same in both train and test
dist = distance.cdist(test_image_feats, train_image_feats, metric='euclidean')
#dist = distance.cdist(train_image_feats, test_image_feats, metric='euclidean')
test_predicts = []
for each in dist:
label = []
idx = np.argsort(each)
for i in range(K):
label.append(train_labels[idx[i]])
#print(label)
amount = 0
for item in CATEGORIES:
if label.count(item) > amount:
label_final = item
test_predicts.append(label_final)
#############################################################################
# END OF YOUR CODE #
#############################################################################
return test_predicts | 43.871429 | 83 | 0.471508 | 288 | 3,071 | 4.885417 | 0.4375 | 0.099502 | 0.063966 | 0.028429 | 0.157783 | 0.157783 | 0.082445 | 0.082445 | 0.082445 | 0.082445 | 0 | 0.002874 | 0.320091 | 3,071 | 70 | 84 | 43.871429 | 0.670977 | 0.451319 | 0 | 0 | 0 | 0 | 0.112108 | 0 | 0 | 0 | 0 | 0.014286 | 0 | 1 | 0.041667 | false | 0 | 0.125 | 0 | 0.208333 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
700d715f42f1fc60d05299634ce5489d3b1d246c | 1,178 | py | Python | programs/pygame/dwarf_fight/utils.py | xzpeter/pylibs | d4aa451e5ecb1cfb160a7e39846f9ae148e5c3d6 | [
"BSD-3-Clause"
] | null | null | null | programs/pygame/dwarf_fight/utils.py | xzpeter/pylibs | d4aa451e5ecb1cfb160a7e39846f9ae148e5c3d6 | [
"BSD-3-Clause"
] | null | null | null | programs/pygame/dwarf_fight/utils.py | xzpeter/pylibs | d4aa451e5ecb1cfb160a7e39846f9ae148e5c3d6 | [
"BSD-3-Clause"
] | null | null | null | import time
import math
import random
# Define some colors
black = ( 0, 0, 0)
white = ( 255, 255, 255)
green = ( 0, 255, 0)
blue = ( 0, 0, 255)
red = ( 255, 0, 0)
yellow = ( 255, 255, 0)
def debug (msg):
print "%s: %s" % (time.strftime("%D %H:%m:%S"), msg)
def warn (msg):
debug("WARN: " + msg)
def err (msg):
debug("ERROR: " + msg)
raise Exception(msg)
def point_in_rect (point, rect):
x = point[0]
y = point[1]
if x >= rect.left and x <= rect.right and \
y >= rect.top and y <= rect.bottom:
return True
return False
def rect_collide (rect1, rect2):
points = [rect1.topleft, rect1.topright,
rect1.bottomleft, rect1.bottomright]
for point in points:
if point_in_rect(point, rect2):
return True
return False
def vector_norm (vector):
x = vector[0]
y = vector[1]
len = math.sqrt(x**2 + y**2)
return [x/len, y/len]
def vector_mul (vector, n):
return [vector[0]*n, vector[1]*n]
def random_vector (norm):
x = random.random() * 2 - 1
y = random.random() * 2 - 1
return vector_mul(vector_norm([x,y]), norm)
| 22.653846 | 56 | 0.556876 | 177 | 1,178 | 3.649718 | 0.338983 | 0.012384 | 0.034056 | 0.049536 | 0.074303 | 0 | 0 | 0 | 0 | 0 | 0 | 0.063702 | 0.293718 | 1,178 | 51 | 57 | 23.098039 | 0.71274 | 0.01528 | 0 | 0.097561 | 0 | 0 | 0.025907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.073171 | null | null | 0.02439 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
70155480f5510345df2cb67b5bd92fa6a2bec8a6 | 4,564 | py | Python | yearn/vaults_v2.py | nymmrx/yearn-exporter | 64b87128b76cd637965abb56e421bfd67238e2a7 | [
"MIT"
] | null | null | null | yearn/vaults_v2.py | nymmrx/yearn-exporter | 64b87128b76cd637965abb56e421bfd67238e2a7 | [
"MIT"
] | null | null | null | yearn/vaults_v2.py | nymmrx/yearn-exporter | 64b87128b76cd637965abb56e421bfd67238e2a7 | [
"MIT"
] | 1 | 2021-06-04T19:07:16.000Z | 2021-06-04T19:07:16.000Z | from dataclasses import dataclass
from typing import List
from brownie import interface, web3
from brownie.network.contract import InterfaceContainer
from packaging import version
from yearn import strategies
from yearn import uniswap
from yearn.mutlicall import fetch_multicall
VAULTS_EVENT_TOPIC = '0xce089905ba4a4d622553bcb5646fd23e895c256f0376eee04e99e61cec1dc7e8'
EXPERIMENTAL_VAULTS_EVENT_TOPIC = '0x57a9cdc2a05e05f66e76769bdbe88e21ec45d9ee0f97d4cb60395d4c75dcbcda'
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
MIN_VERSION = version.parse("0.2.0")
VAULT_VIEWS = [
"decimals",
"totalAssets",
"maxAvailableShares",
"pricePerShare",
"debtOutstanding",
"creditAvailable",
"expectedReturn",
"totalSupply",
"emergencyShutdown",
"depositLimit",
"debtRatio",
"totalDebt",
"lastReport",
"managementFee",
"performanceFee",
]
VAULT_VIEWS_SCALED = [
"totalAssets",
"maxAvailableShares",
"pricePerShare",
"debtOutstanding",
"creditAvailable",
"expectedReturn",
"totalSupply",
"depositLimit",
"totalDebt",
]
@dataclass
class VaultV2:
name: str
vault: InterfaceContainer
strategies: List[strategies.Strategy]
def __post_init__(self):
api_version = version.parse(self.vault.apiVersion())
assert api_version >= MIN_VERSION, f"{self.name} unsupported vault api version {api_version}"
def describe(self):
scale = 10 ** self.vault.decimals()
strats = [str(strat.strategy) for strat in self.strategies]
strats.extend([ZERO_ADDRESS] * (40 - len(strats)))
try:
results = fetch_multicall(*[[self.vault, view] for view in VAULT_VIEWS])
info = dict(zip(VAULT_VIEWS, results))
for name in VAULT_VIEWS_SCALED:
info[name] /= scale
info['strategies'] = {}
except ValueError as e:
info = {"strategies": {}}
for strat in self.strategies:
info["strategies"][strat.name] = strat.describe()
try:
info["token price"] = uniswap.token_price(self.vault.token())
except ValueError:
info["token price"] = 0
if "totalAssets" in info:
info["tvl"] = info["token price"] * info["totalAssets"]
return info
vaults = {
"DAI 0.3.0": "0x19D3364A399d251E894aC732651be8B0E4e85001",
"USDC 0.3.0": "0x5f18C75AbDAe578b483E5F43f12a39cF75b973a9",
"HEGIC 0.3.0": "0xe11ba472F74869176652C35D30dB89854b5ae84D",
"curve.fi/steth 0.3.0": "0xdCD90C7f6324cfa40d7169ef80b12031770B4325",
"WBTC 0.3.1": "0xcB550A6D4C8e3517A939BC79d0c7093eb7cF56B5",
"WETH 0.3.2": "0xa9fE4601811213c340e850ea305481afF02f5b28",
"curve.fi/seth 0.3.2": "0x986b4AFF588a109c09B50A03f42E4110E29D353F",
}
experimental_vaults = {
"sUSD Idle 0.3.1": "0x3466c90017F82DDA939B01E8DBd9b0f97AEF8DfC",
# https://etherscan.io/address/0xA04fE40eD8a8a8d657E41276ec9e9Ee877675e34#code
"WETH Gen Lender 0.3.1": "0x5f18C75AbDAe578b483E5F43f12a39cF75b973a9",
# https://etherscan.io/address/0xac5DA2Ca938A7328dE563D7d7209370e24BFd21e#code
# "Egyptian God sETH/ETH 0.3.0": "0x0e880118C29F095143dDA28e64d95333A9e75A47",
# https://etherscan.io/address/0x3B1a1AE6052ccD643a250fa843c1fB20F9246E1a#code
"WETH Iron Lender 0.3.0": "0xED0244B688cF059f32f45E38A6ac6E479D6755f6",
# https://etherscan.io/address/0xa35A4972D74d4B3e4486163066E5fFed6d62b213#code
"yvSushi YFI-ETH 0.2.2": "0x27Eb83254D900AB4F9b15d5652d913963FeC35e3",
# https://etherscan.io/address/0x3213a6389f3f4c287925a47A6D44fe1148FA0C0d#code
"DEV Hugger 0.2.2": "0xFeD651936Af7e98F7F2A93c03B1E28a2DA7dfaD4",
# https://etherscan.io/address/0x2E949057Ce561BAA9d494895235ACCe310a73FDB#code
# https://etherscan.io/address/0x38a97cB34FCE4FAc87D1F7f8639e3341978613b6#code
"USDc Idle 0.2.2": "0x33bd0f9618cf38fea8f7f01e1514ab63b9bde64b",
# https://etherscan.io/address/0xc29CBe79F1a35a6AA00Df70851E36B14316Ab990#code
"Mushroom Worker 0.3.0": "0x0e8A7717A4FD7694682E7005957dD5d7598bF14A"
# https://etherscan.io/address/0xE5dc99Cbf841A6721781E592214674A87a1A70BC#code
# Left out Lido St. Ether Vault, and ApeTrump Vault
}
def get_vaults():
# TODO: read from registry
return [VaultV2(name=name, vault=interface.Vault(vault), strategies=[]) for name, vault in vaults.items()]
def get_experimental_vaults():
return [
VaultV2(name=name, vault=interface.Vault(vault), strategies=[]) for name, vault in experimental_vaults.items()
]
| 36.806452 | 118 | 0.72064 | 420 | 4,564 | 7.754762 | 0.366667 | 0.007369 | 0.044212 | 0.063555 | 0.116672 | 0.101934 | 0.101934 | 0.04237 | 0.04237 | 0.04237 | 0 | 0.214909 | 0.171122 | 4,564 | 123 | 119 | 37.105691 | 0.646048 | 0.182954 | 0 | 0.215054 | 0 | 0 | 0.387645 | 0.205557 | 0 | 0 | 0.205557 | 0.00813 | 0.010753 | 1 | 0.043011 | false | 0 | 0.086022 | 0.021505 | 0.204301 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
702de2efe4664687f5a4403142b920e4ac69ee5e | 22,245 | py | Python | exactdiag_4site2particles/fh.py | PedroMDuarte/thesis-hubbard | c63df1283086267bd3014e084b36408cbdcde5eb | [
"MIT"
] | 2 | 2019-06-08T14:55:24.000Z | 2021-01-18T13:52:17.000Z | exactdiag_4site2particles/fh.py | PedroMDuarte/thesis-hubbard | c63df1283086267bd3014e084b36408cbdcde5eb | [
"MIT"
] | null | null | null | exactdiag_4site2particles/fh.py | PedroMDuarte/thesis-hubbard | c63df1283086267bd3014e084b36408cbdcde5eb | [
"MIT"
] | 2 | 2020-07-08T05:50:34.000Z | 2022-01-17T09:57:31.000Z | import numpy as np
class lattice():
"""Contains functions to help out calculate matrices
in the Fermi-Hubbard model"""
def __init__(self, xs,ys,zs):
'''The dimensions of the grid are given to initialize the lattice.
Recommended max of 4 sites, otherwise it can take too long to
complete.'''
# x, y, and z have the shape of the grid, and contain the
# respective (x,y,z) coordinates of the latttic sites:
self.x, self.y, self.z = np.mgrid[ 0:xs, 0:ys, 0:zs]
self.xs = xs
self.ys = ys
self.zs = zs
def show(self,spins):
''' This prints a particular state to the terminal'''
for i in np.ravel(spins):
print "%d "%i,
print
def state(self,m):
'''
# Each site can have 4 possible configurations, we have
# labeled them as follows:
#
# 0 = vacuum
# 1 = spin up
# 2 = spin down
# 3 = doubly occupied
#
# All possible states are numbered with an index m. This function
# constructs the m_th state in the lattice. The spin configuration of
# the m_th state is stored in the 'spins' matrix and returned.
#
# Since there are 4 possible states per site (see above) the
# convention is that m be represented in base-4 (quaternary) and
# each digit can be assigned using the 0,1,2,3 convention above.
#
'''
spins = np.zeros_like( self.x)
i = 0
end = False
while m > 0:
if i>=spins.size:
end =True
break
spins.flat[i] = (m%4)
m = m /4
i = i +1
if end:
return None
else:
return spins
def sector(self):
# Finds the spin sector for the current state
s = 0
for i in self.spins.flat:
if i == 0 : s = s+0
elif i == 1 : s = s+1
elif i == 2 : s = s-1
elif i == 3 : s = s+0
return s
def filling(self):
# Finds the filling for the current state
f = 0
for i in self.spins.flat:
if i == 0 : f = f+0
elif i == 1 : f = f+1
elif i == 2 : f = f+1
elif i == 3 : f = f+2
return f
def defstates(self):
'''This function defines the half filling states of the
Fermi-Hubbard model in a 3D lattice.
It creates a dictionary where the keys correspond to the
different spin sectors available, and the values are a list
of the states in the spin sector.
For a balanced spin mixture one only needs to consider the
spin=0 sector.
'''
end = False
n = 0
self.states = {}
while n < 300:
self.spins = self.state(n)
# ATTENTION: in this code we have changed to HALF-FILLING to
# QUARTER-FILLING, in order to explore the 2x2 lattice with only
# 2 particles. We use /2 in the filling check:
if self.spins is not None and self.filling() == self.spins.size/2:
sec = self.sector()
if sec in self.states.keys():
self.states[ sec].append(self.spins)
else:
self.states[ sec]=[self.spins]
n = n+1
for k in self.states.keys():
print "Sector %d, %d states:"%(k,len(self.states[k]))
for spins in self.states[k]:
self.show(spins)
def nearest(self):
'''This function makes a list of the nearest neighbor
pairs in the lattice'''
print "\nNearest neighbors:"
# First we create a flat list of all the lattice sites.
# each element in the list is (x[i], y[i], z[i], i)
sites = []
for i in range(self.x.size):
sites.append( (self.x.flat[i], self.y.flat[i], self.z.flat[i], i))
# We do a nested iteration over the lists and create a list
# of pairs which are nearest neighbors.
neighbors = []
for i,s1 in enumerate(sites):
for j,s2 in enumerate(sites):
if j > i:
d2 = (s1[0]-s2[0])**2 + (s1[1]-s2[1])**2 + (s1[2]-s2[2])**2
print s1,"--",s2," = ",d2
if d2 == 1:
neighbors.append( (s1[3],s2[3]))
print
print "Final neighbor list: "
print neighbors
self.neighbors = neighbors
def kinetic0(self):
r'''This function calculates the kinetic energy matrix
in the spin=0 sector.
The matrix is constructed by iterating over the nearest neighbors.
As a reminder, the kinertic enrgy is given by
K = -t \sum_{\langle i j \rangle} a_{i\sigma}^{\dagger} a_{j\sigma}
So in order to find it's matrix elements we need to apply first an
annihilation operator and then a creation operator. The tricky part
is keeping track of the signs.
'''
print
msize = len(self.states[0])
kinetic = np.zeros((msize,msize))
for i,s1 in enumerate(self.states[0]):
for j,s2 in enumerate(self.states[0]):
# We will calculate the matrix element
# < s1 | K | s2 >
# This matrix element involves a sum over nearest neighbors
# and sum over spins, so we go ahead and iterate:
t = 0.
for n in self.neighbors:
PRINT = False
for spin in ['up','down']:
if PRINT:
print
print "<", np.ravel(s1)," | K | ", np.ravel(s2),">"
# Annihilates 'spin' at site n[0]
signA, stateA = annihilate( n[0], spin, s2)
# Create 'spin' at site n[1]
signC, stateC = create(n[1], spin, stateA)
if PRINT:
print "annihilate %d,%5s"%(n[0],spin)," -->",stateA
print " create %d,%5s"%(n[1],spin)," -->",stateC
# If K|s2> has a projecton on <s1| then we add it to
# t
if np.array_equal(stateC,np.ravel(s1)):
if PRINT: print " tmatrix --> % d" % (signA*signC )
t+= signA*signC
r'''
Notice that sometimes people write the kinetic energy as
K = -t \sum_{\langle i j \rangle}
a_{i\sigma}^{\dagger} a_{j\sigma} + c.c.
where the letters c.c. refer to the complex conjugate.
If they do that, then it means that the sum over nearest
neighbors must only occur for one ordering of the
neighbor pair, for instance just 1-2 whereas the sum
over both orderings includes 1-2 and 2-1.
Here we just run the sum over both orderings.
'''
# We repeat the process with the different neighbor
# ordering:
signA, stateA = annihilate( n[1], spin, s2)
signC, stateC = create(n[0], spin, stateA)
if PRINT:
print "annihilate %d,%5s"%(n[1],spin)," -->",stateA
print " create %d,%5s"%(n[0],spin)," -->",stateC
if np.array_equal(stateC,np.ravel(s1)):
if PRINT: print " tmatrix --> % d" % (signA*signC )
t+= signA*signC
kinetic[i,j] = t
print "\nKinetic energy matrix: ",kinetic.shape
print kinetic
self.kinetic = kinetic
def interaction0(self):
'''This fuction calculates the interaction energy matrix
in the spin=0 sector'''
print
msize = len(self.states[0])
inter = np.zeros((msize,msize))
# The basis we have chose is of number states,
# so the interaction energy is diagonal
for i,s1 in enumerate(self.states[0]):
for site in s1.flat:
if site == 3: # 3=double occupancy
inter[i,i] = inter[i,i] + 1
print "\nInteraction energy matrix:i ",inter.shape
print inter
self.inter = inter
def diagonal0(self):
'''This fuction calculates a diagonal matrix
in the spin=0 sector'''
print
msize = len(self.states[0])
diag = np.zeros((msize,msize))
# The basis we have chose is of number states,
# so the interaction energy is diagonal
for i,s1 in enumerate(self.states[0]):
for site in s1.flat:
diag[i,i] = 1.0
self.diag = diag
def annihilate( i, spin, state):
# The order for the creation operators is lower site number
# to the left, and then spin-up to the left
s = np.ravel(state)
out = np.copy(s)
samespin = {'up':1, 'down':2}
flipspin = {'up':2, 'down':1}
ncommute = 0.
for j in range(i):
if s[j] == 3: ncommute +=2
if s[j] == 1 or s[j] == 2: ncommute+=1
sign = (-1)**ncommute
if s[i] == 0:
out = np.zeros_like(s)
if s[i] == flipspin[spin]:
out = np.zeros_like(s)
if s[i] == 3:
out[i] = flipspin[spin]
if spin == 'up': sign*= 1
if spin == 'down': sign*=-1
if s[i] == samespin[spin]:
out[i] = 0
#print s, ", annihilate %d,%5s"%(i,spin)," --> %+d"%sign, out
return sign, out
def create( i, spin, state):
# The order for the creation operators is lower site number
# to the left, and then spin-up to the left
s = np.ravel(state)
out = np.copy(s)
samespin = {'up':1, 'down':2}
flipspin = {'up':2, 'down':1}
ncommute = 0.
for j in range(i):
if s[j] == 3: ncommute +=2
if s[j] == 1 or s[j] == 2: ncommute+=1
sign = (-1)**ncommute
if s[i] == 0:
out[i] = samespin[spin]
if s[i] == flipspin[spin]:
out[i] = 3
if spin == 'up': sign*=1
if spin == 'down': sign*=-1
if s[i] == 3:
out = np.zeros_like(s)
if s[i] == samespin[spin]:
out = np.zeros_like(s)
#print s, ", create %d,%5s"%(i,spin)," --> %+d"%sign, out
return sign, out
def puretext(state):
out = r'|'
for j,i in enumerate(np.ravel(state)):
if i == 0 : out+='0'
elif i == 1 : out+=r'1'
elif i == 2 : out+=r'2'
elif i == 3 : out+=r'D'
if j+1< state.size:
out+=','
else:
out+= r'>'
return out
def latex(state):
MATRIX = True
if MATRIX:
# one of the dimensions needs to be 1 to do matrix output
dims = []
idx = []
one = None
for ss,s in enumerate(state.shape):
if s > 1 :
dims.append( s )
idx.append( ss )
if s == 1 : one = ss
assert( len(dims) == 2 )
assert( one is not None )
just=''
for i in range( dims[0] ) :
just = just+ 'c'
if i < dims[0]-1:
just = just +'|'
#print state
out = r"$ \begin{array}{"+just+"} "
for mm in range(dims[0]):
for nn in range(dims[1]):
tup = np.empty_like( state.shape )
tup[one] = 0
tup[idx[0]] = mm
tup[idx[1]] = nn
i = state[ tuple( tup.tolist() ) ]
if i == 0 : out+='0'
elif i == 1 : out+=r'\uparrow'
elif i == 2 : out+=r'\downarrow'
elif i == 3 : out+=r'\uparrow\! \downarrow'
if nn < dims[1] - 1 :
out += ' & '
else:
out += r' \\ '
if mm < dims[0] - 1 :
out += r'\hline'
out += r"\end{array}$"
return out
else:
out = r"$|"
for j,i in enumerate(np.ravel(state)):
if i == 0 : out+='0'
elif i == 1 : out+=r'\uparrow'
elif i == 2 : out+=r'\downarrow'
elif i == 3 : out+=r'\uparrow\! \downarrow'
if j+1< state.size:
out+=','
else:
out+= r'\rangle'
out+=r'$'
return out
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import rc
rc('font',**{'family':'serif'})
if __name__=="__main__":
#a = lattice(2,2,1)
#a.defstates()
#a.nearest()
#a.kinetic0()
#a.interaction0()
#np.savetxt('221_t.dat', a.kinetic)
#np.savetxt('221_U.dat', a.inter)
SITES = 4
if SITES == 4 :
b = lattice(1,2,2)
b.defstates()
b.nearest()
b.kinetic0()
b.interaction0()
b.diagonal0()
np.savetxt('221_t.dat', b.kinetic, fmt='% 01d')
np.savetxt('221_U.dat', b.inter, fmt='% 01d')
outfile = 'Ut_eigenvalues_4site.png'
elif SITES == 2:
b = lattice(2,1,1)
b.defstates()
b.nearest()
b.kinetic0()
b.interaction0()
b.diagonal0()
np.savetxt('211_t.dat', b.kinetic, fmt='%01d')
np.savetxt('211_U.dat', b.inter, fmt='%01d')
outfile = 'Ut_eigenvalues_2site.png'
# SOLUTION IS CALCULATED FOR A SET OF U VALUES
t = 1.
U = np.linspace(0.1,18.,32)
eva = []
eve = []
for u in U:
H = t*b.kinetic + u*b.inter
##print H
evals,evecs = np.linalg.eigh(H)
##print "U = ",u
##print evals
##print evecs
SORT = True
if SORT:
# Sort the eigenvals and eigenvecs
index = np.argsort(evals)
eva.append(evals[index])
# Ensure the eigenvecs have correct phase
vecs=[]
for i in index:
vec = evecs[:,index[i]]
#Find first entry that is non-zero
i = list(np.abs(vec) > 1e-5).index(True)
vec = vec / np.sign(vec[i])
vecs.append(vec)
vecs = np.transpose( np.array(vecs) )
eve.append(vecs)
else:
eva.append(evals)
eve.append(evecs)
if False:
print
print evals[index]
print evecs[index]
print "#################"
print index
print
for i in index:
print "Eigenvalue %d = "%i, evals[index[i]]
print "Eigenvector %d = "%i, evecs[:,index[i]]
print "H*ev %d = "%i, np.dot(H, evecs[:,index[i]])
#print np.dot(H, evecs[index[i]]) / evecs[index[i]]
print
eva = np.array(eva)
eve = np.array(eve)
print "Eigenvalues", eva.shape
print "Eigenvectors", eve.shape
# SOLUTIONS ARE PLOTTED
# Start matplotlib
from matplotlib import rc
rc('text', usetex=True)
plt.rcParams['text.latex.preamble'] = [
r'\usepackage{bm}', # for bold math
]
plt.rcParams['axes.linewidth'] = 0.6
plt.rcParams['patch.linewidth'] = 0.4
nstates = len(b.states[0])
print "Number of States in Sector 0 = ", nstates
#This number should be a square:
if np.abs( np.sqrt(nstates) % 1. ) > 1e-4:
"Error, number of states in Sector 0 is not a square."
SQUARE = False
if SQUARE:
plotrows = int(np.sqrt(nstates))
plotcols = plotrows
else:
plotrows = 2
plotcols = 8
figure = plt.figure(figsize=(4.5*plotrows,2.8*plotrows))
print "Making %d x %d figure" % (plotrows, plotcols)
gs0 = matplotlib.gridspec.GridSpec( 1,1, left=0.3, right=0.7,\
bottom=0.52, top=0.98)
gs = matplotlib.gridspec.GridSpec( plotrows, plotcols, \
left=0.03, right=0.98, bottom=0.05, top=0.42, \
wspace=0.14, hspace=0.05)
figure.suptitle('')
ax = plt.subplot( gs0[0] )
#ax = plt.subplot( gs[0:plotrows,0:plotcols0] )
axvs = []
for i in range(plotrows):
for j in range(plotcols):
axvs.append( plt.subplot( gs[i,j]))
# Find indices for the ground state, and other relevant states
ground = 0
high = nstates-1
if SITES == 4:
important = [ground, high]
if SITES == 2:
important = [ground, high]
# Find if there is a state with energy U
Uindex = -1
for nn in range(nstates):
if np.abs( eva[Uindex,nn] - U[Uindex] ) < 1e-4:
#important.append(nn)
break
print "Importaant states = ", important
cc = 0
c=['blue','red', 'green','red','black','purple','limegreen','orange','brown']
for col in range(eva.shape[1]):
labeltxt = '%d'%col
if col in important:
color = c[cc % len(c)]
ax.plot( U, eva[:,col], '-', c=color,lw=1.5,\
label=labeltxt)
for i,axv in enumerate(axvs):
if i >= len(eve[0,:,0]):
continue
if col == 6:
subset = U > 4
axv.plot( U[subset], eve[:,i,col][subset],\
'-',c=color,lw=1.1,alpha=1.0)
else:
axv.plot( U, eve[:,i,col],\
'-',c=color,lw=1.1,alpha=1.0 )
cc = cc + 1
else:
ax.plot( U, eva[:,col], '-', c='0.5',lw=0.8, alpha=0.4)
# Print out the ground state for various Us
Uindex=U.size-1
#Uindex=0
Eindex =0
print
print "Energies = ", eva[Uindex,:]
print "Ground state U=",U[Uindex]," E=",eva[Uindex,Eindex], ":"
# Organize the basis states by the magnitude of their projection
# onto the ground state
order = np.argsort(np.abs(eve[Uindex,:,Eindex]))[::-1]
for i in order:
print "%02d --> % 02.6f %s" %(i,eve[Uindex,i,Eindex], \
puretext(b.states[0][i]))
print "Ground state norm = ", np.linalg.norm( eve[Uindex,:,Eindex] )
frame_coding = { \
12: 'blue',\
07: 'blue',\
05: 'blue',\
9: 'blue',\
0: 'red',\
15: 'red',\
8: 'red',\
3: 'red',\
}
for i,axv in enumerate(axvs):
if i in frame_coding.keys():
if False:
for spine in axv.spines.values():
spine.set_edgecolor( frame_coding[i] )
axv.text( 0.28,0.16,latex( b.states[0][i]), rotation=0 ,\
ha='center',va='center', fontsize=9,\
color = frame_coding[i],\
bbox=dict(facecolor='white', lw=0., pad=-1.),\
transform=axv.transAxes)
else:
axv.text( 0.28,0.16,latex( b.states[0][i]), rotation=0 ,\
ha='center',va='center', fontsize=9,\
bbox=dict(facecolor='white', lw=0., pad=-1.),\
transform=axv.transAxes)
axv.yaxis.grid(which='both', alpha=0.3)
axv.xaxis.grid(which='major', alpha=0.3)
axv.set_ylim(-1.1,1.1)
axv.set_xlim(0., 18.4)
axv.xaxis.set_major_locator( matplotlib.ticker.MultipleLocator(6.) )
axv.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(3.) )
axv.yaxis.set_major_locator( matplotlib.ticker.MultipleLocator(1.) )
axv.yaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(0.5) )
axv.tick_params(axis='both', which='major', labelsize=9., length=1.5)
axv.tick_params(axis='both', which='minor', labelsize=9., length=1.0)
if i // plotcols < plotrows-1:
axv.xaxis.set_ticklabels([])
if i % plotcols > 0 :
axv.yaxis.set_ticklabels([])
ax.set_xlabel('$U/t$')
#ax.plot( U, -4./U , '--', color='black')
#ax.grid()
ax.set_ylim(-6., 20.)
ax.set_xlabel('$U/t$')
ax.set_ylabel('$E/t$')
#ax.legend(loc='best',numpoints=1,ncol=int(nstates)//8,\
# prop={'size':10}, \
# handlelength=1.2,handletextpad=0.5)
#gs.tight_layout( figure, rect=[0.0, 0.0, 1.00, 0.7])
figure.savefig(outfile, dpi=250)
# Print out the analytical ground state calculated by R.Schuman
# (arXiv:cond-mat/0101476v1)
t = 1. + 0j
u = 18. + 0j
s3 = np.sqrt(3.)
X = np.sqrt( -1.*( t**2 * ( 512.*t**4 + 26.*t**2*u**2 + u**4 )))
Y = -36.*t**2*u + u**3. + 6.*np.sqrt(6.) * X
C1n = 6j *np.sqrt(2.) * t * Y**(1./3.)
C1d = -48. * ( -1j + s3)*t**2 - ( -1j + s3)*u**2 + 4j*u * Y**(1./3.) \
+ (1j+s3) * Y**(2./3.)
C1 = C1n / C1d
C2n = 6j *np.sqrt(2.) * t * Y**(1./3.)
C2d = -48. * ( -1j + s3)*t**2 + ( -u + Y**(1./3.) ) * \
( (-1j+s3)*u + (1j+s3)* Y**(1./3.))
C2 = C2n / C2d
C3 = -1 / (2.*np.sqrt(2.) )
print "Schumman eigenvector result:"
print "C1 = ",C1
print "C2 = ",C2
print "C3 = ",C3
norm = 4.*np.abs(C1)**2 + 4.*np.abs(C2)**2 \
+ 8.*np.abs(C3)**2
print "Schumman norm = ", norm\
print "Schumman eigenvector (normalized):"
print "C1 = ",C1 / np.sqrt(norm)
print "C2 = ",C2 / np.sqrt(norm)
print "C3 = ",C3 / np.sqrt(norm)
# It seems that my eigenvectors are correct, by comparing to the
# analytical result by schuman
| 32.713235 | 81 | 0.475882 | 2,917 | 22,245 | 3.607816 | 0.184779 | 0.006842 | 0.005131 | 0.003326 | 0.294185 | 0.268339 | 0.220544 | 0.207431 | 0.186716 | 0.165716 | 0 | 0.039583 | 0.391279 | 22,245 | 679 | 82 | 32.761414 | 0.737612 | 0.119937 | 0 | 0.29932 | 0 | 0 | 0.071437 | 0.002903 | 0 | 0 | 0 | 0 | 0.004535 | 0 | null | null | 0 | 0.020408 | null | null | 0.117914 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7030d6e9feb4e14bd7bd841d594abc3eb0f18ef0 | 5,422 | py | Python | tests/test_connection.py | miiklay/pymapd | 4665ea704eb7ffabf72048f1cb3519b4497b8830 | [
"Apache-2.0"
] | null | null | null | tests/test_connection.py | miiklay/pymapd | 4665ea704eb7ffabf72048f1cb3519b4497b8830 | [
"Apache-2.0"
] | null | null | null | tests/test_connection.py | miiklay/pymapd | 4665ea704eb7ffabf72048f1cb3519b4497b8830 | [
"Apache-2.0"
] | null | null | null | import pytest
from mapd.ttypes import TColumnType, TTypeInfo
from pymapd import OperationalError, connect
from pymapd.cursor import Cursor
from pymapd.connection import _parse_uri, ConnectionInfo
from pymapd._parsers import ColumnDetails, _extract_column_details
class TestConnect(object):
def test_host_specified(self):
with pytest.raises(TypeError):
connect(user='foo')
def test_raises_right_exception(self):
with pytest.raises(OperationalError):
connect(host='localhost', protocol='binary', port=1234)
def test_close(self, mock_transport, mock_client):
con = connect(user='user', password='password',
host='localhost', dbname='dbname')
assert con.closed == 0
con.close()
assert con.closed == 1
def test_connect(self, mock_transport, mock_client):
con = connect(user='user', password='password',
host='localhost', dbname='dbname')
assert mock_client.call_count == 1
assert con._client.connect.call_args == [
('user', 'password', 'dbname')
]
def test_context_manager(self, mock_transport, mock_client):
con = connect(user='user', password='password',
host='localhost', dbname='dbname')
with con as cur:
pass
assert isinstance(cur, Cursor)
assert con.closed == 0
def test_commit_noop(self, mock_transport, mock_client):
con = connect(user='user', password='password',
host='localhost', dbname='dbname')
result = con.commit() # it worked
assert result is None
def test_bad_protocol(self, mock_transport, mock_client):
with pytest.raises(ValueError) as m:
connect(user='user', host='localhost', dbname='dbname',
protocol='fake-proto')
assert m.match('fake-proto')
class TestURI(object):
def test_parse_uri(self):
uri = ('mapd://mapd:HyperInteractive@localhost:9091/mapd?'
'protocol=binary')
result = _parse_uri(uri)
expected = ConnectionInfo("mapd", "HyperInteractive", "localhost",
9091, "mapd", "binary")
assert result == expected
def test_both_raises(self):
uri = ('mapd://mapd:HyperInteractive@localhost:9091/mapd?'
'protocol=binary')
with pytest.raises(TypeError):
connect(uri=uri, user='my user')
class TestExtras(object):
def test_extract_row_details(self):
data = [
TColumnType(col_name='date_',
col_type=TTypeInfo(type=6, encoding=4, nullable=True,
is_array=False, precision=0,
scale=0, comp_param=32),
is_reserved_keyword=False, src_name=''),
TColumnType(col_name='trans',
col_type=TTypeInfo(type=6, encoding=4, nullable=True,
is_array=False, precision=0,
scale=0, comp_param=32),
is_reserved_keyword=False, src_name=''),
TColumnType(col_name='symbol',
col_type=TTypeInfo(type=6, encoding=4, nullable=True,
is_array=False, precision=0,
scale=0, comp_param=32),
is_reserved_keyword=False, src_name=''),
TColumnType(col_name='qty',
col_type=TTypeInfo(type=1, encoding=0, nullable=True,
is_array=False, precision=0,
scale=0, comp_param=0),
is_reserved_keyword=False, src_name=''),
TColumnType(col_name='price',
col_type=TTypeInfo(type=3, encoding=0, nullable=True,
is_array=False, precision=0,
scale=0, comp_param=0),
is_reserved_keyword=False, src_name=''),
TColumnType(col_name='vol',
col_type=TTypeInfo(type=3, encoding=0, nullable=True,
is_array=False, precision=0,
scale=0, comp_param=0),
is_reserved_keyword=False, src_name='')]
result = _extract_column_details(data)
expected = [
ColumnDetails(name='date_', type='STR', nullable=True, precision=0,
scale=0, comp_param=32),
ColumnDetails(name='trans', type='STR', nullable=True, precision=0,
scale=0, comp_param=32),
ColumnDetails(name='symbol', type='STR', nullable=True,
precision=0, scale=0, comp_param=32),
ColumnDetails(name='qty', type='INT', nullable=True, precision=0,
scale=0, comp_param=0),
ColumnDetails(name='price', type='FLOAT', nullable=True,
precision=0, scale=0, comp_param=0),
ColumnDetails(name='vol', type='FLOAT', nullable=True, precision=0,
scale=0, comp_param=0)
]
assert result == expected
| 43.725806 | 79 | 0.536887 | 549 | 5,422 | 5.129326 | 0.200364 | 0.051136 | 0.06392 | 0.068182 | 0.599787 | 0.554332 | 0.554332 | 0.554332 | 0.554332 | 0.554332 | 0 | 0.021203 | 0.356326 | 5,422 | 123 | 80 | 44.081301 | 0.785673 | 0.00166 | 0 | 0.409524 | 0 | 0 | 0.080022 | 0.018111 | 0 | 0 | 0 | 0 | 0.095238 | 1 | 0.095238 | false | 0.057143 | 0.057143 | 0 | 0.180952 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
703107f699c7f26c45918048eaa30145a2e5dcb7 | 1,149 | py | Python | setup.py | tmeiczin/pyautomount | 1e1b01538e8cb15931e63a97633f37f3e55a96b3 | [
"MIT"
] | null | null | null | setup.py | tmeiczin/pyautomount | 1e1b01538e8cb15931e63a97633f37f3e55a96b3 | [
"MIT"
] | null | null | null | setup.py | tmeiczin/pyautomount | 1e1b01538e8cb15931e63a97633f37f3e55a96b3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from setuptools import setup, find_packages
from subprocess import Popen, PIPE
setup(
name='pyautomount',
version='1.0.0',
author=['Terrence Meiczinger'],
author_email='terrence72@gmail.com',
license='LICENSE',
url='https://github.com/tmeiczin/pyautomount',
download_url='https://github.com/tmeiczin/pydhcp',
description='Python Auto Mounter',
long_description=open('README.md').read(),
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=False,
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Utilities',
],
install_requires=[
'pyudev',
],
entry_points={
'console_scripts': [
'pyautomounter = pyautomount.__main__:main'
],
},
)
| 29.461538 | 70 | 0.624021 | 112 | 1,149 | 6.258929 | 0.705357 | 0.081312 | 0.10699 | 0.048502 | 0.071327 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00905 | 0.230635 | 1,149 | 38 | 71 | 30.236842 | 0.783937 | 0.017406 | 0 | 0.085714 | 0 | 0 | 0.464539 | 0.022163 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.057143 | 0 | 0.057143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
70365219313e16a31641d194a9ebac7e3c52aa71 | 7,190 | py | Python | app/auth/views.py | fushouhai/flask_11_28 | 49a030d8a61a1a3cdd1f32746978781a5266c599 | [
"MIT"
] | null | null | null | app/auth/views.py | fushouhai/flask_11_28 | 49a030d8a61a1a3cdd1f32746978781a5266c599 | [
"MIT"
] | null | null | null | app/auth/views.py | fushouhai/flask_11_28 | 49a030d8a61a1a3cdd1f32746978781a5266c599 | [
"MIT"
] | null | null | null | from flask import render_template, redirect, request, url_for, flash, session, current_app
from flask_login import login_user, logout_user, login_required, \
current_user
from . import auth
from .. import db
from ..models import User
from ..email import send_email
from .forms import LoginForm, RegistrationForm, ChangePasswordForm, ForgetPasswordForm, FPNewPasswordForm, \
ChangeEmailPasswordConfirmForm, ChangeEmailSetForm
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
@auth.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.ping()
if not current_user.confirmed \
and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous or current_user.confirmed:
return redirect(url_for('main.index'))
return render_template('auth/unconfirmed.html')
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/change_password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_passwd.data):
current_user.password = form.new_passwd.data
flash('Password changed!')
return redirect(url_for('main.index'))
flash('Invalid password.')
return render_template('auth/change_password.html', form=form)
@auth.route('/change_email', methods=['GET', 'POST'])
@login_required
def change_email():
form = ChangeEmailPasswordConfirmForm()
if form.validate_on_submit():
if current_user.verify_password(form.password.data):
return redirect(url_for('auth.change_email_set'))
else:
flash('password error')
return redirect(url_for('main.index'))
return render_template('auth/change_email_password_confirm.html', form=form)
@auth.route('/change_email_set', methods=['GET', 'POST'])
@login_required
def change_email_set():
form = ChangeEmailSetForm()
if form.validate_on_submit():
token = current_user.generate_confirmation_token(email=form.email.data)
send_email(form.email.data, 'Confirm',
'auth/email/change_email_set', user=current_user, token=token)
flash('A confirm email has been sent to you by new email')
return redirect(url_for('main.index'))
return render_template('auth/change_email_set.html', form=form)
@auth.route('/change_email_set_done/<token>')
@login_required
def change_email_set_done(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
flash('confirm error!')
return redirect(url_for('main.index'))
if current_user.id == data.get('confirm'):
current_user.email = data.get('email')
current_user.avatar_hash = hashlib.md5(
current_user.email.encode('utf-8')).hexdigest()
flash('email changed')
return redirect(url_for('main.index'))
flash('user changed or logout')
return redirect(url_for('main.index'))
@auth.route('/forget_password', methods=['GET', 'POST'])
def forget_password():
form = ForgetPasswordForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account',
'auth/email/forget_password', user=user, token=token)
flash('A confirmation email has been sent to you by email, which is about forgetting password')
return redirect(url_for('main.index'))
return render_template('auth/forget_password.html', form=form)
@auth.route('/confirm_forget_password/<token>')
def confirm_forget_password(token):
session['token'] = token
return redirect(url_for('auth.forget_password_set_password'))
@auth.route('/forget_password_set_password', methods=['GET', 'POST'])
def forget_password_set_password():
form = FPNewPasswordForm()
if form.validate_on_submit():
s = Serializer(current_app.config['SECRET_KEY'])
try:
token = session.get('token')
data = s.loads(token)
except:
flash('confirm error!')
return redirect(url_for('main.index'))
val_id = data.get('confirm')
if val_id is not None:
user = User.query.filter_by(id=val_id).first()
if user is not None:
user.password = form.password.data
db.session.add(user)
db.session.commit()
flash('password has changed!')
return redirect(url_for('auth.login'))
else:
flash('can not find user!')
return redirect(url_for('main.index'))
else:
flash('user is error!')
return redirect(url_for('main.index'))
return render_template('auth/forget_password_set_password.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
flash('Warning:Email@163.com does not work!')
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account',
'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('auth.login'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(current_user.email, 'Confirm Your Account',
'auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
| 38.864865 | 108 | 0.665229 | 889 | 7,190 | 5.20135 | 0.150731 | 0.028547 | 0.073529 | 0.086505 | 0.588235 | 0.532007 | 0.443123 | 0.384516 | 0.292388 | 0.260164 | 0 | 0.001059 | 0.212239 | 7,190 | 184 | 109 | 39.076087 | 0.815325 | 0 | 0 | 0.345455 | 0 | 0 | 0.211266 | 0.054659 | 0 | 0 | 0 | 0 | 0 | 1 | 0.084848 | false | 0.193939 | 0.048485 | 0 | 0.309091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
703c18b3d77e9deeb40500573ea0b2b61e8d29d8 | 12,733 | py | Python | src/evaluation_system/model/user.py | FREVA-CLINT/Freva | 53c6d0951a8dcfe985c8f33cbb3fbac7e8a3db04 | [
"BSD-2-Clause-FreeBSD"
] | 2 | 2020-06-12T18:18:48.000Z | 2021-12-18T03:35:08.000Z | src/evaluation_system/model/user.py | FREVA-CLINT/Freva | 53c6d0951a8dcfe985c8f33cbb3fbac7e8a3db04 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | src/evaluation_system/model/user.py | FREVA-CLINT/Freva | 53c6d0951a8dcfe985c8f33cbb3fbac7e8a3db04 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | '''
.. moduleauthor:: Sebastian Illing / estani
This module manages the abstraction of a user providing thus all information about him/her that
might be required anywhere else.
'''
import pwd
import os
import sys
from ConfigParser import SafeConfigParser as Config
from evaluation_system.misc import config, utils
from evaluation_system.model.db import UserDB
class User(object):
'''
This Class encapsulates a user (configurations, etc).
'''
CONFIG_DIR = 'config'
"The directory name where all plug-in/system configurations will be stored."
CACHE_DIR = 'cache'
"The temporary directory where plug-ins can store files while performing some computation."
OUTPUT_DIR = 'output'
"The directory where output files are stored. Intended for files containing data and thus taking much space."
PLOTS_DIR = 'plots'
"""The directory where just plots are stored. Plots are assumed to be much smaller in size than data and might
therefore live longer"""
PROCESSES_DIR = 'processes'
"The directory might handle information required for each running process."
EVAL_SYS_CONFIG = os.path.join(CONFIG_DIR,'evaluation_system.config')
"""The file containing a central configuration for the whole system (user-wise)"""
EVAL_SYS_DEFAULT_CONFIG = os.path.normpath(os.path.dirname(sys.modules[__name__].__file__)+'/../../etc/system_default.config')
"""The central default configuration file for all users. It should not be confused with the system configuration
file that is handled by :class:`evaluation_system.api.config`."""
def __init__(self, uid = None, email = None):
'''Creates a user object for the provided id. If no id is given, a user object for
the current user, i.e. the one that started the application, is created instead.
:type uid: int
:param uid: user id in the local system, if not provided the current user is used.
:type email: str
:param email: user's email address
'''
self._dir_type = config.get(config.DIRECTORY_STRUCTURE_TYPE)
if uid is None:
uid = os.getuid()
self._userdata = None
if isinstance(uid, basestring):
self._userdata = pwd.getpwnam(uid)
else:
self._userdata = pwd.getpwuid(uid)
if self._userdata is None:
raise Exception("Cannot find user %s" % uid)
if email is None:
self._email = ''
else:
self._email = email
self._userconfig = Config()
#try to load teh configuration from the very first time.
self._userconfig.read([User.EVAL_SYS_DEFAULT_CONFIG, os.path.join(self._userdata.pw_dir, User.EVAL_SYS_CONFIG)])
self._db = UserDB(self)
row_id = self._db.getUserId(self.getName())
if row_id:
try:
self._db.updateUserLogin(row_id, email)
except:
raise
pass
else:
self._db.createUser(self.getName(), email=self._email)
#-------------------------- self._meta = metadict(compact_creation=True,
#--------------------------------- USER_BASE_DIR=)
# """Expand the user specific values in the given string. Those values might be one of:
# $USER_BASE_DIR := central directory for this user in the evaluation system.
# $USER_OUTPUT_DIR := directory where the output data for this user is stored.
#------ $USER_PLOT_DIR := directory where the plots for this user is stored.
# $USER_CACHE_DIR := directory where the cached data for this user is stored."""
def __str__(self):
return "<User (username:%s, info:%s)>" % (self._userdata[0], str(self._userdata[2:]))
def getUserConfig(self):
""":returns: the user configuration object :py:class:`ConfigParser.SafeConfigParser`"""
return self._userconfig
def getUserDB(self):
""":returns: the db abstraction for this user.
:rtype: :class:`evaluation_system.model.db.UserDB`"""
return self._db
def reloadConfig(self):
"""Reloads user central configuration from disk (not the plug-in related one)."""
self._userconfig = Config()
self._userconfig.read([User.EVAL_SYS_DEFAULT_CONFIG, os.path.join(self.getUserBaseDir(), User.EVAL_SYS_CONFIG)])
return self._userconfig
def writeConfig(self):
"""Writes the user central configuration to disk according to :class:`EVAL_SYS_CONFIG`"""
fp = open(os.path.join(self.getUserBaseDir(), User.EVAL_SYS_CONFIG), 'w')
self._userconfig.write(fp)
fp.close()
def getName(self):
""":returns: the user name
:rtype: str"""
return self._userdata.pw_name
def getEmail(self):
"""
:returns: user's email address. Maybe None. :rtype: str
"""
return self._email
def getUserID(self):
""":returns: the user id.
:rtype: int"""
return self._userdata.pw_uid
def getUserHome(self):
""":returns: the path to the user home directory.
:rtype: str"""
return self._userdata.pw_dir
def getUserScratch(self):
""":returns: the path to the user's scratch directory.
:rtype: str"""
path = config.get(config.SCRATCH_DIR)
path = path.replace('$USER', self.getName())
return path
def _getUserBaseDir(self):
if self._dir_type == config.DIRECTORY_STRUCTURE.LOCAL:
return os.path.join(self.getUserHome(), config.get(config.BASE_DIR))
elif self._dir_type == config.DIRECTORY_STRUCTURE.CENTRAL:
return os.path.join(config.get(config.BASE_DIR_LOCATION), config.get(config.BASE_DIR), str(self.getName()))
elif self._dir_type == config.DIRECTORY_STRUCTURE.SCRATCH:
return os.path.join(config.get(config.BASE_DIR_LOCATION), str(self.getName()), config.get(config.BASE_DIR))
def _getUserDir(self, dir_type, tool = None, create=False):
base_dir = dict(base='', config=User.CONFIG_DIR, cache=User.CACHE_DIR, output=User.OUTPUT_DIR, \
plots=User.PLOTS_DIR, processes=User.PROCESSES_DIR, \
scheduler_in=config.get(config.SCHEDULER_INPUT_DIR), \
scheduler_out=config.get(config.SCHEDULER_OUTPUT_DIR))
if tool is None:
bd = base_dir[dir_type]
# concatenate relative paths only
if bd and bd[0]=='/':
dir_name = bd
else:
#return the directory where the tool configuration files are stored
dir_name = os.path.join(self._getUserBaseDir(), bd)
else:
#It's too confusing if we create case sensitive directories...
tool = tool.lower()
#return the specific directory for the given tool
dir_name = os.path.join(self._getUserBaseDir(), base_dir[dir_type], tool)
#make sure we have a canonical path
dir_name = os.path.abspath(dir_name)
if create and not os.path.isdir(dir_name):
#we are letting this fail in case of problems.
utils.supermakedirs(dir_name, 0755)
return dir_name
def getUserBaseDir(self, **kwargs):
"""Returns path to where this system is managing this user data.
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:returns: (str) path"""
return self._getUserDir('base', **kwargs)
def getUserSchedulerInputDir(self, **kwargs):
"""Returns path to where this system is managing this user data.
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:returns: (str) path"""
return self._getUserDir('scheduler_in', **kwargs)
def getUserSchedulerOutputDir(self, **kwargs):
"""Returns path to where this system is managing this user data.
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:returns: (str) path"""
return self._getUserDir('scheduler_out', **kwargs)
def getUserToolConfig(self, tool, **kwargs):
"""Returns the path to the configuration file.
:param kwargs: ``create`` := If ``True`` assure the underlaying directory exists after the call is done.
:param tool: tool/plug-in for which the information is returned.
:type tool: str
:returns: path to the configuration file."""
config_dir = self._getUserDir('config', tool, **kwargs)
return os.path.join(config_dir,'%s.conf' % tool)
def getUserConfigDir(self, tool = None, **kwargs):
"""Return the path to the directory where all configurations for this user are stored.
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:param tool: tool/plug-in for which the information is returned. If None, then the directory
where all information for all tools reside is returned insted (normally, that would
be the parent directrory).
:type tool: str
:returns: path to the directory."""
return self._getUserDir('config', tool, **kwargs)
def getUserCacheDir(self, tool = None, **kwargs):
"""Return directory where cache files for this user (might not be "only" for this user though).
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:param tool: tool/plug-in for which the information is returned. If None, then the directory
where all information for all tools reside is returned insted (normally, that would
be the parent directrory).
:type tool: str
:returns: path to the directory."""
return self._getUserDir('cache', tool, **kwargs)
def getUserProcessDir(self, tool = None, **kwargs):
"""Return directory where files required for processes can be held. Is not clear what this will
be used for, but it should at least serve as a possibility for the future.
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:param tool: tool/plug-in for which the information is returned. If None, then the directory
where all information for all tools reside is returned insted (normally, that would
be the parent directrory).
:type tool: str
:returns: path to the directory."""
return self._getUserDir('processes', tool, **kwargs)
def getUserOutputDir(self, tool = None, **kwargs):
"""Return directory where output data for this user is stored.
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:param tool: tool/plug-in for which the information is returned. If None, then the directory
where all information for all tools reside is returned insted (normally, that would
be the parent directrory).
:type tool: str
:returns: path to the directory."""
return self._getUserDir('output', tool, **kwargs)
def getUserPlotsDir(self, tool = None, **kwargs):
"""Return directory where all plots for this user are stored.
:param kwargs: ``create`` := If ``True`` assure the directory exists after the call is done.
:param tool: tool/plug-in for which the information is returned. If None, then the directory
where all information for all tools reside is returned insted (normally, that would
be the parent directrory).
:type tool: str
:returns: path to the directory."""
return self._getUserDir('plots', tool, **kwargs)
def prepareDir(self):
"""Prepares the configuration directory for this user if it's not already been done."""
if os.path.isdir(self.getUserBaseDir()):
#we assume preparation was successful... but we might to be sure though...
#return
pass
if not os.path.isdir(self.getUserHome()):
raise Exception("Can't create configuration, user HOME doesn't exist (%s)" % self.getUserHome())
#create directory for the framework
#create all required subdirectories
dir_creators = [self.getUserBaseDir,
self.getUserConfigDir,
self.getUserCacheDir,
self.getUserOutputDir,
self.getUserPlotsDir,
self.getUserSchedulerInputDir,
self.getUserSchedulerOutputDir,]
for f in dir_creators:
f(create=True)
| 42.872054 | 130 | 0.641404 | 1,612 | 12,733 | 4.968983 | 0.183002 | 0.035955 | 0.015106 | 0.021348 | 0.402497 | 0.369164 | 0.348315 | 0.290262 | 0.290262 | 0.280774 | 0 | 0.000741 | 0.258305 | 12,733 | 296 | 131 | 43.016892 | 0.847416 | 0.079636 | 0 | 0.085271 | 0 | 0 | 0.090721 | 0.008274 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.015504 | 0.046512 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
703c2951dba5449a5db47926cbcfb472f6a8c13d | 622 | py | Python | python/sortalgorithm/quickSort.py | Turingu/leetcode | ac75c14604b29df394b768b23a94bb7bf310777b | [
"MIT"
] | 1 | 2020-01-01T17:46:11.000Z | 2020-01-01T17:46:11.000Z | python/sortalgorithm/quickSort.py | Turingu/leetcode | ac75c14604b29df394b768b23a94bb7bf310777b | [
"MIT"
] | null | null | null | python/sortalgorithm/quickSort.py | Turingu/leetcode | ac75c14604b29df394b768b23a94bb7bf310777b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
class QuickSort:
"""
快速排序
"""
def __init__(self):
pass
def quicksort(self, nums):
if len(nums) == 1 or len(nums) == 0:
return nums
less = []
greater = []
middle_num = nums.pop()
for num in nums:
if num > middle_num:
greater.append(num)
if num < middle_num:
less.append(num)
return self.quicksort(less) + [middle_num] + self.quicksort(greater)
if __name__ == '__main__':
print(QuickSort().quicksort([5, 6, 1, 3, 4, 2]))
| 19.4375 | 76 | 0.5 | 73 | 622 | 4.041096 | 0.506849 | 0.122034 | 0.074576 | 0.094915 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022613 | 0.360129 | 622 | 31 | 77 | 20.064516 | 0.718593 | 0.075563 | 0 | 0 | 0 | 0 | 0.014337 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0.058824 | 0 | 0 | 0.294118 | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
7041ada191d0f5730f5d5dde284d632d5177384a | 24,098 | py | Python | src/SegnetModel.py | JasonChu1313/Satellite-Segmentation | c41727af305d09ce93f745b841d228b7a4f24a9c | [
"MIT"
] | 1 | 2019-01-29T04:35:39.000Z | 2019-01-29T04:35:39.000Z | src/SegnetModel.py | JasonChu1313/Satellite-Segmentation | c41727af305d09ce93f745b841d228b7a4f24a9c | [
"MIT"
] | null | null | null | src/SegnetModel.py | JasonChu1313/Satellite-Segmentation | c41727af305d09ce93f745b841d228b7a4f24a9c | [
"MIT"
] | null | null | null | from Model import Model
from Config import Config
from math import ceil
import readfile
import customer_init
import numpy as np
import time
import datetime
import util
import os
import random
from tempfile import TemporaryFile
from customer_init import orthogonal_initializer
import tensorflow as tf
from tensorflow.core.protobuf import saver_pb2
# import the inspect_checkpoint library
from tensorflow.python.tools import inspect_checkpoint as chkp
import math
class SegnetModel(Model):
def __init__(self):
self.config = Config()
def add_placeholders(self):
self.train_data_node = tf.placeholder(tf.float32, shape=[self.config.BATCH_SIZE,
self.config.IMAGE_HEIGHT, self.config.IMAGE_WIDTH, self.config.IMAGE_DEPTH])
self.train_label_node = tf.placeholder(tf.int32, shape=[self.config.BATCH_SIZE, self.config.IMAGE_HEIGHT, self.config.IMAGE_WIDTH,1])
self.phase_train = tf.placeholder(tf.bool, name="phase_train")
self.average_pl = tf.placeholder(tf.float32)
self.acc_pl = tf.placeholder(tf.float32)
self.iu_pl = tf.placeholder(tf.float32)
self.test_data_node = tf.placeholder(
tf.float32,
shape=[self.config.TEST_BATCH_SIZE,
self.config.IMAGE_HEIGHT, self.config.IMAGE_WIDTH, self.config.IMAGE_DEPTH])
self.test_labels_node = tf.placeholder(tf.int64, shape=[self.config.TEST_BATCH_SIZE, self.config.IMAGE_HEIGHT, self.config.IMAGE_WIDTH,1])
def add_loss_op(self, pred):
pass
def add_training_op(self, total_loss):
""" fix lr """
lr = self.config.INITIAL_LEARNING_RATE
loss_averages_op = util._add_loss_summaries(total_loss)
# Compute gradients.
with tf.control_dependencies([loss_averages_op]):
opt = tf.train.AdamOptimizer(lr)
grads = opt.compute_gradients(total_loss)
apply_gradient_op = opt.apply_gradients(grads, global_step=self.global_step)
# Add histograms for trainable variables.
for var in tf.trainable_variables():
tf.summary.histogram(var.op.name, var)
# Add histograms for gradients.
for grad, var in grads:
if grad is not None:
tf.summary.histogram(var.op.name + '/gradients', grad)
# Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage(
self.config.MOVING_AVERAGE_DECAY, self.global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables())
with tf.control_dependencies([apply_gradient_op, variables_averages_op]):
train_op = tf.no_op(name='train')
return train_op
def train_on_batch(self, sess, inputs_batch, labels_batch):
pass
def add_prediction_op(self):
# norm1
norm1 = tf.nn.lrn(self.train_data_node, depth_radius=5, bias=1.0, alpha=0.0001, beta=0.75,
name='norm1')
# conv1
conv1 = self.conv_layer_with_bn(norm1, [7, 7, self.train_data_node.get_shape().as_list()[3], 64], self.phase_train, name="conv1")
# pool1
pool1, pool1_indices = tf.nn.max_pool_with_argmax(conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME', name='pool1')
# conv2
conv2 = self.conv_layer_with_bn(pool1, [7, 7, 64, 64], self.phase_train, name="conv2")
# pool2
pool2, pool2_indices = tf.nn.max_pool_with_argmax(conv2, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME', name='pool2')
# conv3
conv3 = self.conv_layer_with_bn(pool2, [7, 7, 64, 64], self.phase_train, name="conv3")
# pool3
pool3, pool3_indices = tf.nn.max_pool_with_argmax(conv3, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME', name='pool3')
# conv4
conv4 = self.conv_layer_with_bn(pool3, [7, 7, 64, 64], self.phase_train, name="conv4")
""" End of encoder """
""" start upsample """
# pool4
pool4, pool4_indices = tf.nn.max_pool_with_argmax(conv4, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME', name='pool4')
# upsample4
# Need to change when using different dataset out_w, out_h
# upsample4 = upsample_with_pool_indices(pool4, pool4_indices, pool4.get_shape(), out_w=45, out_h=60, scale=2, name='upsample4')
upsample4 = self.deconv_layer(pool4, [2, 2, 64, 64], [self.config.BATCH_SIZE, 64, 64, 64], 2, "up4")
# decode 4
conv_decode4 = self.conv_layer_with_bn(upsample4, [7, 7, 64, 64], self.phase_train, False, name="conv_decode4")
# upsample 3
# upsample3 = upsample_with_pool_indices(conv_decode4, pool3_indices, conv_decode4.get_shape(), scale=2, name='upsample3')
upsample3 = self.deconv_layer(conv_decode4, [2, 2, 64, 64], [self.config.BATCH_SIZE, 128, 128, 64], 2, "up3")
# decode 3
conv_decode3 = self.conv_layer_with_bn(upsample3, [7, 7, 64, 64], self.phase_train, False, name="conv_decode3")
# upsample2
# upsample2 = upsample_with_pool_indices(conv_decode3, pool2_indices, conv_decode3.get_shape(), scale=2, name='upsample2')
upsample2 = self.deconv_layer(conv_decode3, [2, 2, 64, 64], [self.config.BATCH_SIZE, 256, 256, 64], 2, "up2")
# decode 2
conv_decode2 = self.conv_layer_with_bn(upsample2, [7, 7, 64, 64], self.phase_train, False, name="conv_decode2")
# upsample1
# upsample1 = upsample_with_pool_indices(conv_decode2, pool1_indices, conv_decode2.get_shape(), scale=2, name='upsample1')
upsample1 = self.deconv_layer(conv_decode2, [2, 2, 64, 64], [self.config.BATCH_SIZE, 512, 512, 64], 2, "up1")
# decode4
conv_decode1 = self.conv_layer_with_bn(upsample1, [7, 7, 64, 64], self.phase_train, False, name="conv_decode1")
""" Start Classify """
# output predicted class number (6)
with tf.variable_scope('conv_classifier', reuse=tf.AUTO_REUSE) as scope:
kernel = util._variable_with_weight_decay('weights',
shape=[1, 1, 64, 2],
initializer=customer_init.msra_initializer(1, 64),
wd=0.0005)
conv = tf.nn.conv2d(conv_decode1, kernel, [1, 1, 1, 1], padding='SAME')
biases = util._variable('biases', [2], tf.constant_initializer(0.0))
conv_classifier = tf.nn.bias_add(conv, biases, name=scope.name)
logit = conv_classifier
loss = self.cal_loss(conv_classifier, self.train_label_node)
return loss, logit
def cal_loss(self, conv_classifier, labels):
with tf.name_scope("loss"):
logits = tf.reshape(conv_classifier, (-1, self.config.NUM_CLASSES))
epsilon = tf.constant(value=1e-10)
logits = logits + epsilon
softmax = tf.nn.softmax(logits)
# consturct one-hot label array
label_flat = tf.reshape(labels, (-1, 1))
# should be [batch ,num_classes]
labels = tf.reshape(tf.one_hot(label_flat, depth=self.config.NUM_CLASSES), (-1, self.config.NUM_CLASSES))
w1_n = tf.ones([softmax.shape[0],1],tf.float32)
w2_n = tf.slice(softmax,[0,0],[-1,1])
_T = 0.3
T = tf.ones(softmax.shape[0],1) * _T
condition = tf.greater(w2_n, 0.5)
w2_n = tf.where(condition, tf.math.maximum(_T, w2_n), tf.ones(w2_n.shape))
#w2_n = tf.cond(tf.greater(w2_n, 0.5), lambda : 1-w2_n, lambda : [1])
#tf.cond(tf.greater(w2_n,0.5) , lambda : 1, lambda : 0)
weight = tf.concat([w2_n,w1_n],1)
cross_entropy = -tf.reduce_sum(weight * labels * tf.log(softmax + epsilon), axis=[1])
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
loss = tf.add_n(tf.get_collection('losses'), name='total_loss')
return loss
def conv_layer_with_bn(self, inputT, shape, train_phase, activation = True, name = None):
in_channel = shape[2]
out_channel = shape[3]
k_size = shape[0]
with tf.variable_scope(name, reuse=tf.AUTO_REUSE) as scope:
kernel = util._variable_with_weight_decay('ort_weights', shape=shape, initializer=orthogonal_initializer(), wd=None)
conv = tf.nn.conv2d(inputT, kernel, [1, 1, 1, 1], padding='SAME')
biases = util._variable('biases', [out_channel], tf.constant_initializer(0.0))
bias = tf.nn.bias_add(conv, biases)
if activation is True:
conv_out = tf.nn.relu(self.batch_norm_layer(bias, train_phase, scope.name))
else:
conv_out = self.batch_norm_layer(bias, train_phase, scope.name)
return conv_out
def batch_norm_layer(self, inputT, is_training, scope):
return tf.cond(is_training,
lambda: tf.contrib.layers.batch_norm(inputT, is_training=True,
center=False, updates_collections=None,
scope=scope + "_bn"),
lambda: tf.contrib.layers.batch_norm(inputT, is_training=False,
updates_collections=None, center=False, scope=scope + "_bn",
reuse=True))
def deconv_layer(self, inputT, f_shape, output_shape, stride=2, name=None):
# output_shape = [b, w, h, c]
# sess_temp = tf.InteractiveSession()
sess_temp = tf.global_variables_initializer()
strides = [1, stride, stride, 1]
with tf.variable_scope(name):
weights = self.get_deconv_filter(f_shape)
deconv = tf.nn.conv2d_transpose(inputT, weights, output_shape,
strides=strides, padding='SAME')
return deconv
def get_deconv_filter(self, f_shape):
"""
reference: https://github.com/MarvinTeichmann/tensorflow-fcn
"""
width = f_shape[0]
heigh = f_shape[0]
f = ceil(width / 2.0)
c = (2 * f - 1 - f % 2) / (2.0 * f)
bilinear = np.zeros([f_shape[0], f_shape[1]])
for x in range(width):
for y in range(heigh):
value = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
bilinear[x, y] = value
weights = np.zeros(f_shape)
for i in range(f_shape[2]):
weights[:, :, i, i] = bilinear
init = tf.constant_initializer(value=weights,
dtype=tf.float32)
return tf.get_variable(name="up_filter", initializer=init,
shape=weights.shape)
def get_train_val(self, image_filenames, label_filenames):
val_size = int(len(image_filenames) * 0.06)
val_image_filenames = []
val_label_filenames = []
for i in range(val_size):
pop_index = random.randint(0, len(image_filenames)-1)
val_image_filenames.append(image_filenames.pop(pop_index))
val_label_filenames.append(label_filenames.pop(pop_index))
val_image_filenames.pop(0)
val_label_filenames.pop(0)
return image_filenames, label_filenames, val_image_filenames, val_label_filenames
def training(self, is_finetune=False):
batch_size = self.config.BATCH_SIZE
train_dir = self.config.log_dir # ../data/Logs
image_dir = self.config.image_dir # ../data/train
val_dir = self.config.val_dir # ../data/val
finetune_ckpt = self.config.finetune
image_w = self.config.IMAGE_WIDTH
image_h = self.config.IMAGE_HEIGHT
image_c = self.config.IMAGE_DEPTH
image_filenames, label_filenames = readfile.get_filename_list(image_dir, prefix = "../data/train")
print "total file size {}".format(len(image_filenames))
#val_image_filenames, val_label_filenames = readfile.get_filename_list(val_dir, prefix = "../data/val", is_train=False)
# image_filenames, label_filenames, val_image_filenames, val_label_filenames = self.get_train_val(image_filenames, label_filenames)
# print "train size {}".format(len(image_filenames))
# print "test size {}".format(len(val_image_filenames))
# should be changed if your model stored by different convention
startstep = 0 if not is_finetune else int(self.config.finetune.split('-')[-1])
#with tf.device('/device:GPU:0'):
with tf.Graph().as_default():
self.add_placeholders()
self.global_step = tf.Variable(0, trainable=False)
train_dataset = readfile.get_dataset(image_filenames, label_filenames, self.config.BATCH_SIZE, True)
# val_dataset = readfile.get_dataset(val_image_filenames, val_label_filenames, self.config.EVAL_BATCH_SIZE)
train_iterator = train_dataset.make_one_shot_iterator()
next_train_element = train_iterator.get_next()
# val_iterator = val_dataset.make_one_shot_iterator()
# next_val_element = val_iterator.get_next()
# Build a Graph that computes the logits predictions from the inference model.
loss, eval_prediction = self.add_prediction_op()
# Build a Graph that trains the model with one batch of examples and updates the model parameters.
train_op = self.add_training_op(loss)
saver = tf.train.Saver(tf.global_variables(),write_version= saver_pb2.SaverDef.V1)
summary_op = tf.summary.merge_all()
with tf.Session() as sess:
# Build an initialization operation to run below.
if (is_finetune == True):
saver.restore(sess, finetune_ckpt)
else:
init = tf.global_variables_initializer()
sess.run(init)
# Summery placeholders
summary_writer = tf.summary.FileWriter(train_dir, sess.graph)
average_pl = tf.placeholder(tf.float32)
acc_pl = tf.placeholder(tf.float32)
iu_pl = tf.placeholder(tf.float32)
average_summary = tf.summary.scalar("test_average_loss", average_pl)
acc_summary = tf.summary.scalar("test_accuracy", acc_pl)
iu_summary = tf.summary.scalar("Mean_IU", iu_pl)
for step in range(startstep, startstep + self.config.maxsteps):
image_batch, label_batch = sess.run(next_train_element)
# since we still use mini-batches in validation, still set bn-layer phase_train = True
feed_dict = {
self.train_data_node: image_batch,
self.train_label_node: label_batch,
self.phase_train: True
}
start_time = time.time()
_, loss_value = sess.run([train_op, loss], feed_dict=feed_dict)
duration = time.time() - start_time
assert not np.isnan(loss_value), 'Model diverged with loss = NaN'
if step % 50 == 0:
num_examples_per_step = batch_size
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.4f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.datetime.now(), step, loss_value,
examples_per_sec, sec_per_batch))
# eval current training batch pre-class accuracy
pred = sess.run(eval_prediction, feed_dict=feed_dict)
util.per_class_acc(pred, label_batch)
# if step % 100 == 0:
# print("start validating.....")
# total_val_loss = 0.0
# hist = np.zeros((self.config.NUM_CLASSES, self.config.NUM_CLASSES))
# for test_step in range(int(self.config.TEST_ITER)):
# val_images_batch, val_labels_batch = sess.run(next_val_element)
#
# _val_loss, _val_pred = sess.run([loss, eval_prediction], feed_dict={
# self.train_data_node: val_images_batch,
# self.train_label_node: val_labels_batch,
# self.phase_train: True
# })
# total_val_loss += _val_loss
# hist += util.get_hist(_val_pred, val_labels_batch)
# print("val loss: ", total_val_loss / self.config.TEST_ITER)
# acc_total = np.diag(hist).sum() / hist.sum()
# iu = np.diag(hist) / (hist.sum(1) + hist.sum(0) - np.diag(hist))
# test_summary_str = sess.run(average_summary, feed_dict={average_pl: total_val_loss / self.config.TEST_ITER})
# acc_summary_str = sess.run(acc_summary, feed_dict={acc_pl: acc_total})
# iu_summary_str = sess.run(iu_summary, feed_dict={iu_pl: np.nanmean(iu)})
# util.print_hist_summery(hist)
# print(" end validating.... ")
#
# summary_str = sess.run(summary_op, feed_dict=feed_dict)
# summary_writer.add_summary(summary_str, step)
# summary_writer.add_summary(test_summary_str, step)
# summary_writer.add_summary(acc_summary_str, step)
# summary_writer.add_summary(iu_summary_str, step)
# Save the model checkpoint periodically.
if step % 1000 == 0 or (step + 1) == self.config.maxsteps:
checkpoint_path = os.path.join(train_dir, 'model.ckpt')
saver.save(sess, checkpoint_path, global_step=step)
def visualize_prediction(self, meta_name = None, data_name = None):
with tf.Session() as sess:
self.add_placeholders()
prediction = np.random.randint(2, size=self.train_label_node.shape)
prediction.astype(np.float32)
loss, eval_prediction = self.add_prediction_op()
saver = tf.train.Saver()
data_file_path = os.path.join(self.config.test_ckpt, data_name)
if os.path.isfile(data_file_path):
saver.restore(sess, data_file_path)
else:
raise Exception('restore variable data fail')
# chkp.print_tensors_in_checkpoint_file(data_file_path, tensor_name = '', all_tensors = True)
image_filenames, label_filenames = readfile.get_filename_list("../data/test_prediction", prefix="../data/test_prediction",
is_train=False)
print "image length {}".format(len(image_filenames))
image_paths = tf.convert_to_tensor(image_filenames, dtype=tf.string)
dataset = tf.data.Dataset.from_tensor_slices(image_paths)
dataset = dataset.map(readfile.map_fn_test, num_parallel_calls=8)
dataset = dataset.batch(self.config.BATCH_SIZE)
test_iterator = dataset.make_one_shot_iterator()
test_next_element = test_iterator.get_next()
image_batch = sess.run(test_next_element)
feed_dict = {
self.train_data_node: image_batch,
self.phase_train: True
}
result = sess.run([eval_prediction], feed_dict)[0]
print "begin to write the result as image back to folder..."
for i in range(self.config.BATCH_SIZE):
util.writemask(result[i],'mask_'+str(i)+".png")
def get_submission_result(self, meta_name = None, data_name = None):
is_first = True
with tf.Session() as sess:
self.add_placeholders()
prediction = np.random.randint(2, size=self.train_label_node.shape)
prediction.astype(np.float32)
loss, eval_prediction = self.add_prediction_op()
# meta_file_path = os.path.join(self.config.test_ckpt, meta_name)
# if os.path.isfile(meta_file_path):
# saver = tf.train.import_meta_graph(meta_file_path,clear_devices=True)
# else:
# raise Exception('restore graph meta data fail')
saver = tf.train.Saver()
data_file_path = os.path.join(self.config.test_ckpt, data_name)
if os.path.isfile(data_file_path):
saver.restore(sess, data_file_path)
else:
raise Exception('restore variable data fail')
#chkp.print_tensors_in_checkpoint_file(data_file_path, tensor_name = '', all_tensors = True)
image_filenames, label_filenames = readfile.get_filename_list("../data/val", prefix="../data/val", is_train=False)
# the length of validation set; 2169
print "image length {}".format(len(image_filenames))
# construct the image dataset
image_paths = tf.convert_to_tensor(image_filenames, dtype=tf.string)
dataset = tf.data.Dataset.from_tensor_slices(image_paths)
dataset = dataset.map(readfile.map_fn_test, num_parallel_calls=8)
dataset = dataset.batch(self.config.BATCH_SIZE)
test_iterator = dataset.make_one_shot_iterator()
test_next_element = test_iterator.get_next()
for i in range(len(image_filenames)/self.config.BATCH_SIZE):
#for i in range(2):
# for i in range(len(image_filenames))
image_batch = sess.run(test_next_element)
#print image_batch.shape
feed_dict = {
self.train_data_node: image_batch,
self.phase_train: True
}
if is_first:
result = sess.run([eval_prediction],feed_dict)[0]
# prediction = tf.stack([prediction, result])
print "prediction shape : {}".format(result.shape)
is_first = False
continue
# 5,512,512,2
new_result = sess.run([eval_prediction],feed_dict)[0]
#print "old result shape {}".format(np.asarray(result).shape)
#print "new result shape {}".format(new_result.shape)
result = np.concatenate([result, new_result],axis=0)
#prediction = tf.stack([prediction, result])
print "prediction shape : {}".format(result.shape)
# preprocess the prediction and product submission, prediction is [numexample, 512, 512, 2]
util.create_submission('../data/subid2_1.csv', result, image_filenames)
if __name__ == '__main__':
segmodel = SegnetModel()
# print all tensors in checkpoint file
segmodel.visualize_prediction(meta_name="model.ckpt-38000.meta", data_name="model.ckpt-38000")
#segmodel.get_submission_result()
| 48.196 | 146 | 0.586314 | 2,931 | 24,098 | 4.564995 | 0.148414 | 0.036622 | 0.015695 | 0.01704 | 0.393049 | 0.334604 | 0.293423 | 0.248655 | 0.224439 | 0.195217 | 0 | 0.026658 | 0.310399 | 24,098 | 499 | 147 | 48.292585 | 0.778493 | 0.189061 | 0 | 0.20339 | 0 | 0 | 0.038655 | 0.003476 | 0 | 0 | 0 | 0 | 0.00339 | 0 | null | null | 0.00678 | 0.057627 | null | null | 0.023729 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
70474720aff7dcf7a5a4c61d68b746a8c74298dc | 51,753 | py | Python | pythonjs/runtime/builtins.py | bpmbank/PythonJS | 591a80afd8233fb715493591db2b68f1748558d9 | [
"BSD-3-Clause"
] | 319 | 2015-01-02T11:34:16.000Z | 2022-03-25T00:43:33.000Z | pythonjs/runtime/builtins.py | bpmbank/PythonJS | 591a80afd8233fb715493591db2b68f1748558d9 | [
"BSD-3-Clause"
] | 10 | 2015-02-03T02:33:09.000Z | 2021-11-09T21:41:00.000Z | pythonjs/runtime/builtins.py | bpmbank/PythonJS | 591a80afd8233fb715493591db2b68f1748558d9 | [
"BSD-3-Clause"
] | 61 | 2015-01-02T12:01:56.000Z | 2021-12-08T07:16:16.000Z | # PythonJS builtins
# by Amirouche Boubekki and Brett Hartshorn - copyright 2013
# License: "New BSD"
pythonjs.configure( runtime_exceptions=False )
pythonjs.configure( direct_operator='+' )
pythonjs.configure( direct_operator='*' )
pythonjs.configure( direct_keys=True )
_PythonJS_UID = 0
inline('IndexError = function(msg) {this.message = msg || "";}; IndexError.prototype = Object.create(Error.prototype); IndexError.prototype.name = "IndexError";')
inline('KeyError = function(msg) {this.message = msg || "";}; KeyError.prototype = Object.create(Error.prototype); KeyError.prototype.name = "KeyError";')
inline('ValueError = function(msg) {this.message = msg || "";}; ValueError.prototype = Object.create(Error.prototype); ValueError.prototype.name = "ValueError";')
inline('AttributeError = function(msg) {this.message = msg || "";}; AttributeError.prototype = Object.create(Error.prototype);AttributeError.prototype.name = "AttributeError";')
inline('RuntimeError = function(msg) {this.message = msg || "";}; RuntimeError.prototype = Object.create(Error.prototype);RuntimeError.prototype.name = "RuntimeError";')
with lowlevel:
def __getfast__(ob, attr):
v = ob[ attr ]
if v is undefined:
raise AttributeError(attr)
else:
return v
with javascript:
def __wrap_function__(f):
f.is_wrapper = True
return f
def __gpu_object(cls, struct_name, data_name):
cls.prototype.__struct_name__ = struct_name
cls.prototype.__struct_data__ = data_name
with lowlevel:
gpu = {
'object' : __gpu_object
}
def glsljit_runtime(header):
return new( GLSLJITRuntime(header) )
class GLSLJITRuntime:
def __init__(self, header):
self.header = header
self.shader = []
self.object_packagers = []
self.struct_types = {}
self.glsltypes = ['vec2', 'vec3', 'vec4', 'mat4']
self.matrices = []
def compile_header(self):
a = [] ## insert structs at top of header
for sname in self.struct_types:
if sname in self.glsltypes:
pass
else:
a.push( self.struct_types[sname]['code'] )
## calls get_global_id, see WebCLGL API docs. ##
a.push('int matrix_index() { return int(get_global_id().y*%s.0); }' %self.matrices.length)
a.push('int matrix_row() { return int(get_global_id().x*4.0); }') ## returns: 0, 1, 2, 3
## first class array error, can not return an array, even when the size is known ##
#a.push('float[3] floatN( float a, float b, float c) { float f[3]; f[0]=a; f[1]=b; f[2]=b; return f; }')
## these could be generated for each array size to reduce the mess in main,
## TODO it would be better to upload them as uniforms.
#a.push('void floatN( float f[3], float a, float b, float c) { f[0]=a; f[1]=b; f[2]=b; }')
## the array can be declared in the header, but not filled with data here.
#a.push('float XXX[3];')
#a.push('floatN( XXX, 1.1, 2.2, 3.3 );')
#a.push('XXX[0]=1.1;')
a = '\n'.join(a)
## code in header could be methods that reference the struct types above.
b = "\n".join(self.header)
return '\n'.join([a,b])
def compile_main(self):
return '\n'.join(self.shader)
def push(self, s):
self.shader.push(s)
def define_structure(self, ob):
struct_name = None
#if Object.hasOwnProperty.call(ob,'__struct_name__'):
if ob.__struct_name__:
struct_name = ob.__struct_name__
if struct_name in self.struct_types:
return struct_name
arrays = []
floats = []
integers = []
structs = []
struct_type = [] ## fallback for javascript objects
if struct_name and struct_name in self.glsltypes:
return struct_name
#for key in ob.keys():
for key in dir( ob ):
if key.length==1 and key in '0123456789':
raise RuntimeError(key)
t = typeof( ob[key] )
if t=='object' and instanceof(ob[key], Array) and ob[key].length and typeof(ob[key][0])=='number':
struct_type.push( 'ARY_'+key )
arrays.push(key)
elif t=='number':
struct_type.push( 'NUM_'+key)
floats.push(key)
elif instanceof(ob[key], Int16Array):
struct_type.push( 'INT_'+key)
if ob[key].length == 1:
integers.push(key)
else:
pass ## TODO int16array
elif t=='object' and ob[key].__struct_name__:
struct_type.push( 'S_'+key)
structs.push( key )
if ob[key].__struct_name__ not in self.struct_types:
if ob[key].__struct_name__ in self.glsltypes:
pass
else:
self.define_structure( ob[key] )
if struct_name is None:
#print('DEGUG: new struct name', ob.__struct_name__)
#print(ob)
struct_name = ''.join( struct_type )
ob.__struct_name__ = struct_name
if struct_name not in self.struct_types:
member_list = []
for key in integers:
member_list.append('int '+key+';')
for key in floats:
member_list.append('float '+key+';')
for key in arrays:
arr = ob[key]
member_list.append('float '+key+'['+arr.length+'];')
for key in structs:
subtype = ob[key].__struct_name__
member_list.append( subtype+' '+key+';')
if len(member_list)==0:
raise RuntimeError(struct_name)
members = ''.join(member_list)
code = 'struct ' +struct_name+ ' {' +members+ '};'
#print('-------struct glsl code-------')
#print(code)
#print('------------------------------')
self.struct_types[ struct_name ] = {
'arrays' : arrays,
'floats' : floats,
'integers': integers,
'structs' : structs,
'code' : code
}
return struct_name
def structure(self, ob, name):
wrapper = None
if instanceof(ob, Object):
pass
elif ob.__class__ is dict:
wrapper = ob
ob = ob[...]
sname = self.define_structure(ob)
if wrapper:
wrapper.__struct_name__ = sname
args = []
stype = self.struct_types[ sname ]
# if stype is None: ## TODO fix me
if sname not in self.struct_types:
if sname in self.glsltypes:
if sname == 'mat4':
if ob.__struct_data__:
o = ob[ ob.__struct_data__ ]
else:
o = ob
for i in range(o.length):
value = o[i] +''
if '.' not in value: value += '.0'
args.push( value )
else:
raise RuntimeError('no method to pack structure: ' +sname)
has_arrays = False
if stype:
if stype['arrays'].length > 0:
has_arrays = True
for key in stype['integers']:
args.push( ob[key][0]+'' )
for key in stype['floats']:
value = ob[key] + ''
if '.' not in value:
value += '.0'
args.push( value )
for key in stype['arrays']:
#args.push( '{'+ob[key].toString()+ '}') ## this will not work
## arrays need to be assigned to a local variable before passing
## it to the struct constructor.
aname = '_'+key+name
self.array(ob[key], aname)
args.push( aname )
for key in stype['structs']:
aname = '_'+key+name
self.structure(ob[key], aname)
args.push( aname )
args = ','.join(args)
if has_arrays:
self.shader.push( sname + ' ' +name+ '=' +sname+ '(' +args+ ');' )
else:
self.header.push( 'const ' + sname + ' ' +name+ '=' +sname+ '(' +args+ ');' )
return stype
def int16array(self, ob, name):
a = ['int ' + name + '[' + ob.length + ']']
i = 0
while i < ob.length:
a.push(';'+name+'['+i+']='+ob[i])
i += 1
self.shader.push( ''.join(a) )
def array(self, ob, name):
if instanceof(ob[0], Array):
a = [] #'float ' + name + '[' + ob.length + ']']
i = 0
while i < ob.length:
subarr = ob[i]
subname = '%s_%s'%(name,i)
if a.length==0:
a.append('float ' + subname + '[' + subarr.length + ']')
else:
a.append(';float ' + subname + '[' + subarr.length + ']')
j = 0
while j < subarr.length:
v = subarr[j] + ''
if '.' not in v:
v += '.0'
a.push(';'+subname+'['+j+']='+v)
j += 1
i += 1
self.shader.push( ''.join(a) )
elif instanceof(ob[0], Object) or ob[0].__class__ is dict:
i = 0
while i < ob.length:
self.structure( ob[i], name+'_'+i)
i += 1
else:
a = ['float ' + name + '[' + ob.length + '];']
i = 0
while i < ob.length:
a.push(name+'['+i+']='+ob[i] + ';')
i += 1
self.shader.push( ''.join(a) )
def object(self, ob, name):
for p in self.object_packagers:
cls, func = p
if instanceof(ob, cls):
return func(ob)
def unpack_array2d(self, arr, dims):
if typeof(dims)=='number':
return arr
w,h = dims
row = []
rows = [row]
for value in arr:
row.append(value)
if row.length >= w:
row = []
rows.append(row)
rows.pop()
if rows.length != h:
print('ERROR: __unpack_array2d, invalid height.')
return rows
def unpack_vec4(self, arr, dims):
if typeof(dims)=='number':
w = dims
h = 1
else:
w,h = dims
rows = []
i=0
for y in range(h):
row = []
rows.append( row )
for x in range(w):
vec = []
for j in range(4):
vec.append( arr[i])
i += 1
row.append( vec )
if rows.length != h:
print('ERROR: __unpack_vec4, invalid height.')
return rows
def unpack_mat4(self, arr):
i = 0
for mat in self.matrices:
for j in range(16):
mat[j] = arr[i]
i += 1
return self.matrices
with lowlevel:
def __getattr__(ob, a ):
if ob.__getattr__:
return JS("ob.__getattr__(a)")
#else:
# raise AttributeError(a)
def __test_if_true__( ob ):
if ob is True:
return True
elif ob is False:
return False
elif typeof(ob) == 'string':
return ob.length != 0
elif not ob:
return False
elif instanceof(ob, Array):
return ob.length != 0
elif typeof(ob) == 'function':
return True
elif ob.__class__ and ob.__class__ is dict: #isinstance(ob, dict):
return Object.keys( ob[...] ).length != 0
elif instanceof(ob, Object):
return Object.keys(ob).length != 0
else:
return True
def __replace_method(ob, a, b):
## this is required because string.replace in javascript only replaces the first occurrence
if typeof(ob) == 'string':
return ob.split(a).join(b)
else:
return ob.replace(a,b)
def __split_method( ob, delim ):
## special case because calling string.split() without args its not the same as python,
## and we do not want to touch the default string.split implementation.
if typeof(ob) == 'string':
if delim is undefined:
return ob.split(' ')
else:
return ob.split( delim )
else:
if delim is undefined:
return ob.split()
else:
return ob.split( delim )
with javascript:
__dom_array_types__ = []
if typeof(NodeList) == 'function': ## NodeList is only available in browsers
## minimal dom array types common to allow browsers ##
__dom_array_types__ = [ NodeList, FileList, DOMStringList, HTMLCollection, SVGNumberList, SVGTransformList]
## extra dom array types ##
if typeof(DataTransferItemList) == 'function': ## missing in NodeWebkit
__dom_array_types__.push( DataTransferItemList )
if typeof(HTMLAllCollection) == 'function': ## missing in Firefox
__dom_array_types__.push( HTMLAllCollection )
if typeof(SVGElementInstanceList) == 'function':## missing in Firefox
__dom_array_types__.push( SVGElementInstanceList )
if typeof(ClientRectList) == 'function': ## missing in Firefox-trunk
__dom_array_types__.push( ClientRectList )
def __is_some_array( ob ):
if __dom_array_types__.length > 0:
for t in __dom_array_types__:
if instanceof(ob, t):
return True
return False
def __is_typed_array( ob ):
if instanceof( ob, Int8Array ) or instanceof( ob, Uint8Array ):
return True
elif instanceof( ob, Int16Array ) or instanceof( ob, Uint16Array ):
return True
elif instanceof( ob, Int32Array ) or instanceof( ob, Uint32Array ):
return True
elif instanceof( ob, Float32Array ) or instanceof( ob, Float64Array ):
return True
else:
return False
def __js_typed_array( t, a ):
if t == 'i':
arr = new( Int32Array(a.length) )
arr.set( a )
return arr
def __contains__( ob, a ):
t = typeof(ob)
if t == 'string':
if ob.indexOf(a) == -1: return False
else: return True
elif t == 'number':
raise TypeError
elif __is_typed_array(ob):
for x in ob:
if x == a:
return True
return False
elif ob.__contains__:
return ob.__contains__(a)
elif instanceof(ob, Object) and Object.hasOwnProperty.call(ob, a):
return True
else:
return False
def __add_op(a, b):
## 'number' is already checked before this gets called (ternary op)
## but it can still appear here when called from an inlined lambda
t = typeof(a)
if t == 'string' or t == 'number':
return JS("a+b")
elif instanceof(a, Array):
c = []
c.extend(a)
c.extend(b)
return c
elif a.__add__:
return a.__add__(b)
else:
raise TypeError('invalid objects for addition')
def __mul_op(a, b):
t = typeof(a)
if t == 'number':
return JS("a * b")
elif t == 'string':
arr = []
for i in range(b):
arr.append(a)
return ''.join(arr)
elif instanceof(a, Array):
c = []
for i in range(b):
c.extend(a)
return c
elif a.__mul__:
return a.__mul__(b)
else:
raise TypeError('invalid objects for multiplication')
def __jsdict( items ):
d = JS("{}")
for item in items:
key = item[0]
if instanceof(key, Array):
key = JSON.stringify(key)
elif key.__uid__:
key = key.__uid__
d[ key ] = item[1]
return d
def __jsdict_get(ob, key, default_value):
if instanceof(ob, Object):
if instanceof(key, Array):
key = JSON.stringify(key)
if JS("key in ob"): return ob[key]
return default_value
else: ## PythonJS object instance ##
## this works because instances from PythonJS are created using Object.create(null) ##
if default_value is not undefined:
return JS("ob.get(key, default_value)")
else:
return JS("ob.get(key)")
def __jsdict_set(ob, key, value):
if instanceof(ob, Object):
if instanceof(key, Array):
key = JSON.stringify(key)
ob[ key ] = value
else: ## PythonJS object instance ##
## this works because instances from PythonJS are created using Object.create(null) ##
JS("ob.set(key,value)")
def __jsdict_keys(ob):
if instanceof(ob, Object):
## in the case of tuple keys this would return stringified JSON instead of the original arrays,
## TODO, should this loop over the keys and convert the json strings back to objects?
## but then how would we know if a given string was json... special prefix character?
return JS("Object.keys( ob )")
else: ## PythonJS object instance ##
## this works because instances from PythonJS are created using Object.create(null) ##
return JS("ob.keys()")
def __jsdict_values(ob):
if instanceof(ob, Object):
arr = []
for key in ob:
if ob.hasOwnProperty(key):
value = ob[key]
arr.push( value )
return arr
else: ## PythonJS object instance ##
## this works because instances from PythonJS are created using Object.create(null) ##
return JS("ob.values()")
def __jsdict_items(ob):
## `ob.items is None` is for: "self.__dict__.items()" because self.__dict__ is not actually a dict
if instanceof(ob, Object) or ob.items is undefined: ## in javascript-mode missing attributes do not raise AttributeError
arr = []
for key in ob:
if Object.hasOwnProperty.call(ob, key):
value = ob[key]
arr.push( [key,value] )
return arr
else: ## PythonJS object instance ##
return JS("ob.items()")
def __jsdict_pop(ob, key, _default=None):
if instanceof(ob, Array):
if ob.length:
## note: javascript array.pop only pops the end of an array
if key is undefined:
return inline("ob.pop()")
else:
return ob.splice( key, 1 )[0]
else:
raise IndexError(key)
elif instanceof(ob, Object):
if JS("key in ob"):
v = ob[key]
JS("delete ob[key]")
return v
elif _default is undefined:
raise KeyError(key)
else:
return _default
else: ## PythonJS object instance ##
## this works because instances from PythonJS are created using Object.create(null) ##
return JS("ob.pop(key, _default)")
def dir(ob):
if instanceof(ob, Object):
return JS("Object.keys( ob )")
else:
return __object_keys__(ob)
def __object_keys__(ob):
'''
notes:
. Object.keys(ob) will not work because we create PythonJS objects using `Object.create(null)`
. this is different from Object.keys because it traverses the prototype chain.
'''
arr = []
JS('for (var key in ob) { arr.push(key) }')
return arr
def __bind_property_descriptors__(o, klass):
for name in klass.__properties__:
desc = {"enumerable":True}
prop = klass.__properties__[ name ]
if prop['get']:
desc['get'] = __generate_getter__(klass, o, name)
if prop['set']:
desc['set'] = __generate_setter__(klass, o, name)
Object.defineProperty( o, name, desc )
for base in klass.__bases__:
__bind_property_descriptors__(o, base)
def __generate_getter__(klass, o, n):
return lambda : klass.__properties__[ n ]['get']([o],{})
def __generate_setter__(klass, o, n):
return lambda v: klass.__properties__[ n ]['set']([o,v],{})
def __sprintf(fmt, args):
## note: '%sXXX%s'.split().length != args.length
## because `%s` at the start or end will split to empty chunks ##
if instanceof(args, Array):
chunks = fmt.split('%s')
arr = []
for i,txt in enumerate(chunks):
arr.append( txt )
if i >= args.length:
break
item = args[i]
if typeof(item) == 'string':
arr.append( item )
elif typeof(item) == 'number':
arr.append( ''+item )
else:
arr.append( Object.prototype.toString.call(item) )
return ''.join(arr)
else:
return fmt.replace('%s', args)
def __create_class__(class_name, parents, attrs, props):
"""Create a PythonScript class"""
#if attrs.__metaclass__:
# metaclass = attrs.__metaclass__
# attrs.__metaclass__ = None
# return metaclass([class_name, parents, attrs])
klass = Object.create(null)
klass.__bases__ = parents
klass.__name__ = class_name
#klass.__dict__ = attrs
klass.__unbound_methods__ = Object.create(null)
klass.__all_method_names__ = []
klass.__properties__ = props
klass.__attributes__ = attrs
for key in attrs:
if typeof( attrs[key] ) == 'function':
klass.__all_method_names__.push( key )
f = attrs[key]
if hasattr(f, 'is_classmethod') and f.is_classmethod:
pass
elif hasattr(f, 'is_staticmethod') and f.is_staticmethod:
pass
else:
klass.__unbound_methods__[key] = attrs[key]
if key == '__getattribute__': continue
klass[key] = attrs[key]
## this is needed for fast lookup of property names in __set__ ##
klass.__setters__ = []
klass.__getters__ = []
for name in klass.__properties__:
prop = klass.__properties__[name]
klass.__getters__.push( name )
if prop['set']:
klass.__setters__.push( name )
for base in klass.__bases__:
Array.prototype.push.apply( klass.__getters__, base.__getters__ )
Array.prototype.push.apply( klass.__setters__, base.__setters__ )
Array.prototype.push.apply( klass.__all_method_names__, base.__all_method_names__ )
def __call__():
"""Create a PythonJS object"""
object = Object.create(null) ## this makes pythonjs object not compatible with things like: Object.hasOwnProperty
object.__class__ = klass
object.__dict__ = object
## we need __dict__ so that __setattr__ can still set attributes using `old-style`: self.__dict__[n]=x
#Object.defineProperty(
# object,
# '__dict__',
# {enumerable:False, value:object, writeable:False, configurable:False}
#)
has_getattribute = False
has_getattr = False
for name in klass.__all_method_names__:
if name == '__getattribute__':
has_getattribute = True
elif name == '__getattr__':
has_getattr = True
else:
wrapper = __get__(object, name)
if not wrapper.is_wrapper:
print 'RUNTIME ERROR: failed to get wrapper for:',name
## to be safe the getters come after other methods are cached ##
if has_getattr:
__get__(object, '__getattr__')
if has_getattribute:
__get__(object, '__getattribute__')
__bind_property_descriptors__(object, klass)
if object.__init__:
object.__init__.apply(this, arguments)
#object.__init__.call(this,args, kwargs)
return object
__call__.is_wrapper = True
klass.__call__ = __call__
return klass
def type(ob_or_class_name, bases=None, class_dict=None):
'''
type(object) -> the object's type
type(name, bases, dict) -> a new type ## broken? - TODO test
'''
with javascript:
if bases is None and class_dict is None:
return ob_or_class_name.__class__
else:
return create_class(ob_or_class_name, bases, class_dict) ## TODO rename create_class to _pyjs_create_class
def hasattr(ob, attr):
## TODO check parent classes for attr
with javascript:
return Object.hasOwnProperty.call(ob, attr)
def getattr(ob, attr, property=False):
with javascript:
if property:
prop = _get_upstream_property( ob.__class__, attr )
if prop and prop['get']:
return prop['get']( [ob], {} )
else:
print "ERROR: getattr property error", prop
else:
return __get__(ob, attr)
def setattr(ob, attr, value, property=False):
with javascript:
if property:
prop = _get_upstream_property( ob.__class__, attr )
if prop and prop['set']:
prop['set']( [ob, value], {} )
else:
print "ERROR: setattr property error", prop
else:
__set__(ob, attr, value)
def issubclass(C, B):
if C is B:
return True
with javascript: bases = C.__bases__ ## js-array
i = 0
while i < bases.length:
if issubclass( bases[i], B ):
return True
i += 1
return False
def isinstance( ob, klass):
with javascript:
if ob is undefined or ob is null:
return False
elif instanceof(ob, Array) and klass is list:
return True
#elif klass is dict and instanceof(ob, Object): ## this is safe because instances created with Object.create(null) are not instances-of Object
# if instanceof(ob, Array):
# return False
# elif ob.__class__:
# return False
# else:
# return True
elif not Object.hasOwnProperty.call(ob, '__class__'):
return False
ob_class = ob.__class__
if ob_class is undefined:
return False
else:
return issubclass( ob_class, klass )
def int(a):
with javascript:
a = Math.round(a)
if isNaN(a):
raise ValueError('not a number')
return a
with javascript:
def int16(a): ## used by glsljit when packing structs.
arr = new(Int16Array(1))
arr[0]=a
return arr
def float(a):
with javascript:
if typeof(a)=='string':
if a.lower()=='nan':
return NaN
elif a.lower()=='inf':
return Infinity
b = Number(a)
if isNaN(b):
## invalid strings also convert to NaN, throw error ##
raise ValueError('can not convert to float: '+a)
return b
def round(a, places=0):
with javascript:
b = '' + a
if b.indexOf('.') == -1:
return a
else:
## this could return NaN with large numbers and large places,
## TODO check for NaN and instead fallback to `a.toFixed(places)`
p = Math.pow(10, places)
return Math.round(a * p) / p
def str(s):
return ''+s
def _setup_str_prototype():
'''
Extend JavaScript String.prototype with methods that implement the Python str API.
The decorator @String.prototype.[name] assigns the function to the prototype,
and ensures that the special 'this' variable will work.
'''
with javascript:
@String.prototype.__contains__
def func(a):
if this.indexOf(a) == -1: return False
else: return True
@String.prototype.get
def func(index):
if index < 0:
return this[ this.length + index ]
else:
return this[ index ]
@String.prototype.__iter__
def func(self):
with python:
return Iterator(this, 0)
@String.prototype.__getitem__
def func(idx):
if idx < 0:
return this[ this.length + idx ]
else:
return this[ idx ]
@String.prototype.__len__
def func():
return this.length
@String.prototype.__getslice__
def func(start, stop, step):
if start is undefined and stop is undefined and step == -1:
return this.split('').reverse().join('')
else:
if stop < 0:
stop = this.length + stop
return this.substring(start, stop)
@String.prototype.splitlines
def func():
return this.split('\n')
@String.prototype.strip
def func():
return this.trim() ## missing in IE8
@String.prototype.startswith
def func(a):
if this.substring(0, a.length) == a:
return True
else:
return False
@String.prototype.endswith
def func(a):
if this.substring(this.length-a.length, this.length) == a:
return True
else:
return False
@String.prototype.join
def func(a):
out = ''
if instanceof(a, Array):
arr = a
else:
arr = a[...]
i = 0
for value in arr:
out += value
i += 1
if i < arr.length:
out += this
return out
@String.prototype.upper
def func():
return this.toUpperCase()
@String.prototype.lower
def func():
return this.toLowerCase()
@String.prototype.index
def func(a):
i = this.indexOf(a)
if i == -1:
raise ValueError(a + ' - not in string')
return i
@String.prototype.find
def func(a):
return this.indexOf(a)
@String.prototype.isdigit
def func():
digits = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
for char in this:
if char in digits: pass
else: return False
return True
@String.prototype.isnumber
def func():
digits = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.']
for char in this:
if char in digits: pass
else: return False
return True
## TODO - for now these are just dummy functions.
@String.prototype.decode
def func(encoding):
return this
@String.prototype.encode
def func(encoding):
return this
@String.prototype.format
def func(fmt):
r = this
keys = Object.keys(fmt)
for key in keys:
r = r.split(key).join(fmt[key])
r = r.split('{').join('').split('}').join('')
return r
_setup_str_prototype()
## note Arrays in javascript by default sort by string order, even if the elements are numbers.
with javascript:
def __sort_method(ob):
if instanceof(ob, Array):
def f(a,b):
if a < b:
return -1
elif a > b:
return 1
else:
return 0
return JS("ob.sort( f )")
else:
return JS("ob.sort()")
def _setup_array_prototype():
with javascript:
@Array.prototype.jsify
def func():
i = 0
while i < this.length:
item = this[ i ]
if typeof(item) == 'object':
if item.jsify:
this[ i ] = item.jsify()
i += 1
return this
@Array.prototype.__contains__
def func(a):
if this.indexOf(a) == -1: return False
else: return True
@Array.prototype.__len__
def func():
return this.length
@Array.prototype.get
def func(index):
return this[ index ]
@Array.prototype.__getitem__
def __getitem__(index):
if index < 0: index = this.length + index
return this[index]
@Array.prototype.__setitem__
def __setitem__(index, value):
if index < 0: index = this.length + index
this[ index ] = value
@Array.prototype.__iter__
def func():
with python:
return Iterator(this, 0)
@Array.prototype.__getslice__
def func(start, stop, step):
arr = []
start = start | 0
if stop is undefined:
stop = this.length
if start < 0:
start = this.length + start
if stop < 0:
stop = this.length + stop
#reverse = step < 0 ## in javascript `null<0` and `undefined<0` are false
#reverse = False
if typeof(step)=='number':
#reverse = step < 0
#if reverse:
if step < 0:
#step = Math.abs(step)
i = start
while i >= 0:
arr.push( this[i] )
i += step
return arr
else:
i = start
n = stop
while i < n:
arr.push( this[i] )
i += step
return arr
else:
i = start
n = stop
while i < n:
#arr[ i ] = this[i] ## slower in chrome
arr.push( this[i] )
i += 1 ## this gets optimized to i++
return arr
#if reverse:
# arr.reverse()
#if step == 1:
# arr = new(Array(this.length))
# i = 0
# while i < this.length:
# arr[ i ] = this[i]
# i += 1 ## this gets optimized to i++
#else:
# arr = []
# i = 0
# while i < this.length:
# arr.push( this[i] )
# i += step
#if start is undefined and stop is undefined:
# if reverse: arr.reverse()
#elif reverse:
# arr = arr.slice(stop, start+1)
# arr.reverse()
#else:
# #if stop < 0: ## mozilla spec says negative indices are supported
# # stop = arr.length + stop
# arr = arr.slice(start, stop)
#return arr
@Array.prototype.__setslice__
def func(start, stop, step, items):
if start is undefined: start = 0
if stop is undefined: stop = this.length
arr = [start, stop-start]
for item in items: arr.push( item )
this.splice.apply(this, arr )
@Array.prototype.append
def func(item):
this.push( item )
return this
@Array.prototype.extend
def extend(other):
for obj in other:
this.push(obj)
return this
@Array.prototype.remove
def func(item):
index = this.indexOf( item )
this.splice(index, 1)
@Array.prototype.insert
def insert(index, obj):
if index < 0: index = this.length + index
this.splice(index, 0, obj)
@Array.prototype.index
def index(obj):
return this.indexOf(obj)
@Array.prototype.count
def count(obj):
a = 0
for item in this:
if item is obj: ## note that `==` will not work here, `===` is required for objects
a += 1
return a
## set-like features ##
@Array.prototype.bisect
def func(x, low, high):
if low is undefined: low = 0
if high is undefined: high = this.length
while low < high:
a = low+high
mid = Math.floor(a/2)
if x < this[mid]:
high = mid
else:
low = mid + 1
return low
## `-` operator
@Array.prototype.difference
def func(other):
f = lambda i: other.indexOf(i)==-1
return this.filter( f )
## `&` operator
@Array.prototype.intersection
def func(other):
f = lambda i: other.indexOf(i)!=-1
return this.filter( f )
## `<=` operator
@Array.prototype.issubset
def func(other):
for item in this:
if other.indexOf(item) == -1:
return False
return True
## non-standard utils ##
@Array.prototype.copy
def func():
arr = []
i = 0
while i < this.length:
arr.push( this[i] )
i += 1
return arr
_setup_array_prototype()
def _setup_nodelist_prototype():
with javascript:
@NodeList.prototype.__contains__
def func(a):
if this.indexOf(a) == -1: return False
else: return True
@NodeList.prototype.__len__
def func():
return this.length
@NodeList.prototype.get
def func(index):
return this[ index ]
@NodeList.prototype.__getitem__
def __getitem__(index):
if index < 0: index = this.length + index
return this[index]
@NodeList.prototype.__setitem__
def __setitem__(index, value):
if index < 0: index = this.length + index
this[ index ] = value
@NodeList.prototype.__iter__
def func():
with python:
return Iterator(this, 0)
@NodeList.prototype.index
def index(obj):
return this.indexOf(obj)
if __NODEJS__ == False and __WEBWORKER__ == False:
_setup_nodelist_prototype()
def bisect(a, x, low=None, high=None):
## bisect function from bisect module of the stdlib
with javascript:
return a.bisect(x, low, high)
def range(num, stop, step):
"""Emulates Python's range function"""
if stop is not undefined:
i = num
num = stop
else:
i = 0
if step is undefined:
step = 1
with javascript:
arr = []
while i < num:
arr.push(i)
i += step
return arr
def xrange(num, stop, step):
return range(num, stop, step)
def sum( arr ):
a = 0
for b in arr:
a += b
return a
class StopIteration: ## DEPRECATED
pass
def len(ob):
with javascript:
if instanceof(ob, Array):
return ob.length
elif __is_typed_array(ob):
return ob.length
elif instanceof(ob, ArrayBuffer):
return ob.byteLength
elif ob.__len__:
return ob.__len__()
else: #elif instanceof(ob, Object):
return Object.keys(ob).length
def next(obj):
return obj.next()
def map(func, objs):
with javascript: arr = []
for ob in objs:
v = func(ob)
with javascript:
arr.push( v )
return arr
def filter(func, objs):
with javascript: arr = []
for ob in objs:
if func( ob ):
with javascript:
arr.push( ob )
return arr
def min( lst ):
a = None
for value in lst:
if a is None: a = value
elif value < a: a = value
return a
def max( lst ):
a = None
for value in lst:
if a is None: a = value
elif value > a: a = value
return a
def abs( num ):
return JS('Math.abs(num)')
def ord( char ):
return JS('char.charCodeAt(0)')
def chr( num ):
return JS('String.fromCharCode(num)')
with javascript:
class __ArrayIterator:
def __init__(self, arr, index):
self.arr = arr
self.index = index
self.length = arr.length
def next(self):
index = self.index
self.index += 1
arr = self.arr
return JS('arr[index]')
class Iterator:
## rather than throwing an exception, it could be more optimized to have the iterator set a done flag,
## and another downside is having the try/catch around this makes errors in in the loop go slient.
def __init__(self, obj, index):
self.obj = obj
self.index = index
self.length = len(obj)
self.obj_get = obj.get ## cache this for speed
def next(self):
with javascript:
index = self.index
self.index += 1
return self.obj_get( [index], {} )
def tuple(a):
## TODO tuple needs a solution for dict keys
with javascript:
if Object.keys(arguments).length == 0: #arguments.length == 0:
return []
elif instanceof(a, Array):
return a.slice()
elif typeof(a) == 'string':
return a.split('')
else:
print a
print arguments
raise TypeError
def list(a):
with javascript:
if Object.keys(arguments).length == 0: #arguments.length == 0:
return []
elif instanceof(a, Array):
return a.slice()
elif typeof(a) == 'string':
return a.split('')
else:
print a
print arguments
raise TypeError
with javascript:
def __tuple_key__(arr):
r = []
i = 0
while i < arr.length:
item = arr[i]
t = typeof(item)
if t=='string':
r.append( "'"+item+"'")
elif instanceof(item, Array):
r.append( __tuple_key__(item) )
elif t=='object':
if item.__uid__ is undefined:
raise KeyError(item)
r.append( item.__uid__ )
else:
r.append( item )
i += 1
return r.join(',')
class dict:
# http://stackoverflow.com/questions/10892322/javascript-hashtable-use-object-key
# using a function as a key is allowed, but would waste memory because it gets converted to a string
# http://stackoverflow.com/questions/10858632/are-functions-valid-keys-for-javascript-object-properties
def __init__(self, js_object=None, pointer=None):
with javascript:
self[...] = {}
if pointer is not None:
self[...] = pointer
elif js_object:
ob = js_object
if instanceof(ob, Array):
for o in ob:
with lowlevel:
if instanceof(o, Array):
k= o[0]; v= o[1]
else:
k= o['key']; v= o['value']
try:
self.__setitem__( k,v )
except KeyError:
raise KeyError('error in dict init, bad key')
elif isinstance(ob, dict):
for key in ob.keys():
value = ob[ key ]
self.__setitem__( key, value )
else:
print 'ERROR init dict from:', js_object
raise TypeError
def jsify(self):
#keys = Object.keys( self[...] ) ## TODO check how this got broken, this should always be a low-level object?
keys = __object_keys__( self[...] )
for key in keys:
value = self[...][key]
if typeof(value) == 'object':
if hasattr(value, 'jsify'):
self[...][key] = value.jsify()
elif typeof(value) == 'function':
raise RuntimeError("can not jsify function")
return self[...]
def copy(self):
return dict( self )
def clear(self):
with javascript:
self[...] = {}
def has_key(self, key):
__dict = self[...]
if JS("typeof(key) === 'object' || typeof(key) === 'function'"):
# Test undefined because it can be in the dict
key = key.__uid__
if JS("key in __dict"):
return True
else:
return False
def update(self, other):
for key in other:
self.__setitem__( key, other[key] )
def items(self):
arr = []
for key in self.keys():
arr.append( [key, self[key]] )
return arr
def get(self, key, _default=None):
try:
return self[key]
except:
return _default
def set(self, key, value):
self.__setitem__(key, value)
def __len__(self):
__dict = self[...]
return JS('Object.keys(__dict).length')
def __getitem__(self, key):
'''
note: `"4"` and `4` are the same key in javascript, is there a sane way to workaround this,
that can remain compatible with external javascript?
'''
with javascript:
__dict = self[...]
err = False
if instanceof(key, Array):
#key = JSON.stringify( key ) ## fails on objects with circular references ##
key = __tuple_key__(key)
elif JS("typeof(key) === 'object' || typeof(key) === 'function'"):
# Test undefined because it can be in the dict
if JS("key.__uid__ && key.__uid__ in __dict"):
return JS('__dict[key.__uid__]')
else:
err = True
if __dict and JS("key in __dict"):
return JS('__dict[key]')
else:
err = True
if err:
msg = "missing key: %s -\n" %key
raise KeyError(__dict.keys())
def __setitem__(self, key, value):
with javascript:
if key is undefined:
raise KeyError('undefined is invalid key type')
if key is null:
raise KeyError('null is invalid key type')
__dict = self[...]
if instanceof(key, Array):
#key = JSON.stringify( key ) ## fails on objects with circular references ##
key = __tuple_key__(key)
if key is undefined:
raise KeyError('undefined is invalid key type (tuple)')
inline( '__dict[key] = value')
elif JS("typeof(key) === 'object' || typeof(key) === 'function'"):
if JS("key.__uid__ === undefined"):
# "" is needed so that integers can also be used as keys #
JS(u"key.__uid__ = '' + _PythonJS_UID++")
JS('__dict[key.__uid__] = value')
else:
JS('__dict[key] = value')
def keys(self):
with lowlevel:
return Object.keys( self[...] )
def pop(self, key, d=None):
v = self.get(key, None)
if v is None:
return d
else:
js_object = self[...]
JS("delete js_object[key]")
return v
def values(self):
with javascript:
keys = Object.keys( self[...] )
out = []
for key in keys:
out.push( self[...][key] )
return out
def __contains__(self, value):
try:
self[value]
return True
except:
return False
def __iter__(self):
return Iterator(self.keys(), 0)
def set(a):
'''
This returns an array that is a minimal implementation of set.
Often sets are used simply to remove duplicate entries from a list,
and then it get converted back to a list, it is safe to use fastset for this.
The array prototype is overloaded with basic set functions:
difference
intersection
issubset
Note: sets in Python are not subscriptable, but can be iterated over.
Python docs say that set are unordered, some programs may rely on this disorder
for randomness, for sets of integers we emulate the unorder only uppon initalization
of the set, by masking the value by bits-1. Python implements sets starting with an
array of length 8, and mask of 7, if set length grows to 6 (3/4th), then it allocates
a new array of length 32 and mask of 31. This is only emulated for arrays of
integers up to an array length of 1536.
'''
with javascript:
hashtable = null
if a.length <= 1536:
hashtable = {}
keys = []
if a.length < 6: ## hash array length 8
mask = 7
elif a.length < 22: ## 32
mask = 31
elif a.length < 86: ## 128
mask = 127
elif a.length < 342: ## 512
mask = 511
else: ## 2048
mask = 2047
fallback = False
if hashtable:
for b in a:
if typeof(b)=='number' and b is (b|0): ## set if integer
key = b & mask
hashtable[ key ] = b
keys.push( key )
else:
fallback = True
break
else:
fallback = True
s = []
if fallback:
for item in a:
if s.indexOf(item) == -1:
s.push( item )
else:
keys.sort()
for key in keys:
s.push( hashtable[key] )
return s
def frozenset(a):
return set(a)
class array:
## note that class-level dicts can only be used after the dict class has been defined above,
## however, we can still not rely on using a dict here because dict creation relies on get_attribute,
## and get_attribute relies on __NODEJS__ global variable to be set to False when inside NodeJS,
## to be safe this is changed to use JSObjects
with javascript:
typecodes = {
'c': 1, # char
'b': 1, # signed char
'B': 1, # unsigned char
'u': 2, # unicode
'h': 2, # signed short
'H': 2, # unsigned short
'i': 4, # signed int
'I': 4, # unsigned int
'l': 4, # signed long
'L': 4, # unsigned long
'f': 4, # float
'd': 8, # double
'float32':4,
'float16':2,
'float8' :1,
'int32' :4,
'uint32' :4,
'int16' :2,
'uint16' :2,
'int8' :1,
'uint8' :1,
}
typecode_names = {
'c': 'Int8',
'b': 'Int8',
'B': 'Uint8',
'u': 'Uint16',
'h': 'Int16',
'H': 'Uint16',
'i': 'Int32',
'I': 'Uint32',
#'l': 'TODO',
#'L': 'TODO',
'f': 'Float32',
'd': 'Float64',
'float32': 'Float32',
'float16': 'Int16',
'float8' : 'Int8',
'int32' : 'Int32',
'uint32' : 'Uint32',
'int16' : 'Int16',
'uint16' : 'Uint16',
'int8' : 'Int8',
'uint8' : 'Uint8',
}
def __init__(self, typecode, initializer=None, little_endian=False):
self.typecode = typecode
self.itemsize = self.typecodes[ typecode ]
self.little_endian = little_endian
if initializer:
self.length = len(initializer)
self.bytes = self.length * self.itemsize
if self.typecode == 'float8':
self._scale = max( [abs(min(initializer)), max(initializer)] )
self._norm_get = self._scale / 127 ## half 8bits-1
self._norm_set = 1.0 / self._norm_get
elif self.typecode == 'float16':
self._scale = max( [abs(min(initializer)), max(initializer)] )
self._norm_get = self._scale / 32767 ## half 16bits-1
self._norm_set = 1.0 / self._norm_get
else:
self.length = 0
self.bytes = 0
size = self.bytes
buff = JS('new ArrayBuffer(size)')
self.dataview = JS('new DataView(buff)')
self.buffer = buff
self.fromlist( initializer )
def __len__(self):
return self.length
def __contains__(self, value):
#lst = self.to_list()
#return value in lst ## this old style is deprecated
arr = self.to_array()
with javascript:
if arr.indexOf(value) == -1: return False
else: return True
def __getitem__(self, index):
step = self.itemsize
offset = step * index
dataview = self.dataview
func_name = 'get'+self.typecode_names[ self.typecode ]
func = JS('dataview[func_name].bind(dataview)')
if offset < self.bytes:
value = JS('func(offset)')
if self.typecode == 'float8':
value = value * self._norm_get
elif self.typecode == 'float16':
value = value * self._norm_get
return value
else:
raise IndexError(index)
def __setitem__(self, index, value):
step = self.itemsize
if index < 0: index = self.length + index -1 ## TODO fixme
offset = step * index
dataview = self.dataview
func_name = 'set'+self.typecode_names[ self.typecode ]
func = JS('dataview[func_name].bind(dataview)')
if offset < self.bytes:
if self.typecode == 'float8':
value = value * self._norm_set
elif self.typecode == 'float16':
value = value * self._norm_set
JS('func(offset, value)')
else:
raise IndexError(index)
def __iter__(self):
return Iterator(self, 0)
def get(self, index):
return self[ index ]
def fromlist(self, lst):
length = len(lst)
step = self.itemsize
typecode = self.typecode
size = length * step
dataview = self.dataview
func_name = 'set'+self.typecode_names[ typecode ]
func = JS('dataview[func_name].bind(dataview)')
if size <= self.bytes:
i = 0; offset = 0
while i < length:
item = lst[i]
if typecode == 'float8':
item *= self._norm_set
elif typecode == 'float16':
item *= self._norm_set
JS('func(offset,item)')
offset += step
i += 1
else:
raise TypeError
def resize(self, length):
buff = self.buffer
source = JS('new Uint8Array(buff)')
new_size = length * self.itemsize
new_buff = JS('new ArrayBuffer(new_size)')
target = JS('new Uint8Array(new_buff)')
JS('target.set(source)')
self.length = length
self.bytes = new_size
self.buffer = new_buff
self.dataview = JS('new DataView(new_buff)')
def append(self, value):
length = self.length
self.resize( self.length + 1 )
self[ length ] = value
def extend(self, lst): ## TODO optimize
for value in lst:
self.append( value )
def to_array(self):
arr = JSArray()
i = 0
while i < self.length:
item = self[i]
JS('arr.push( item )')
i += 1
return arr
def to_list(self):
return self.to_array()
def to_ascii(self):
string = ''
arr = self.to_array()
i = 0; length = arr.length
while i < length:
JS('var num = arr[i]')
JS('var char = String.fromCharCode(num)')
string += char
i += 1
return string
## file IO ##
class file:
'''
TODO, support multiple read/writes. Currently this just reads all data,
and writes all data.
'''
def __init__(self, path, flags):
self.path = path
if flags == 'rb':
self.flags = 'r'
self.binary = True
elif flags == 'wb':
self.flags = 'w'
self.binary = True
else:
self.flags = flags
self.binary = False
self.flags = flags
def read(self, binary=False):
_fs = require('fs')
path = self.path
with javascript:
if binary or self.binary:
return _fs.readFileSync( path, encoding=None )
else:
return _fs.readFileSync( path, {'encoding':'utf8'} )
def write(self, data, binary=False):
_fs = require('fs')
path = self.path
with javascript:
if binary or self.binary:
binary = binary or self.binary
if binary == 'base64': ## TODO: fixme, something bad in this if test
#print('write base64 data')
buff = new Buffer(data, 'base64')
_fs.writeFileSync( path, buff, {'encoding':None})
else:
#print('write binary data')
#print(binary)
_fs.writeFileSync( path, data, {'encoding':None})
else:
#print('write utf8 data')
_fs.writeFileSync( path, data, {'encoding':'utf8'} )
def close(self):
pass
def __open__( path, mode=None): ## this can not be named `open` because it replaces `window.open`
return file( path, mode )
with javascript:
## mini json library ##
json = {
'loads': lambda s: JSON.parse(s),
'dumps': lambda o: JSON.stringify(o)
}
def __get_other_workers_with_shared_arg( worker, ob ):
a = []
for b in threading.workers:
other = b['worker']
args = b['args']
if other is not worker:
for arg in args:
if arg is ob:
if other not in a:
a.append( other )
return a
threading = {'workers': [], '_blocking_callback':None }
def __start_new_thread(f, args):
worker = new(Worker(f))
worker.__uid__ = len( threading.workers )
threading.workers.append( {'worker':worker,'args':args} )
def func(event):
#print('got signal from thread')
#print(event.data)
if event.data.type == 'terminate':
worker.terminate()
elif event.data.type == 'call':
res = __module__[ event.data.function ].apply(null, event.data.args)
if res is not None and res is not undefined:
worker.postMessage({'type':'return_to_blocking_callback', 'result':res})
elif event.data.type == 'append':
#print('got append event')
a = args[ event.data.argindex ]
a.push( event.data.value )
for other in __get_other_workers_with_shared_arg(worker, a):
other.postMessage( {'type':'append', 'argindex':event.data.argindex, 'value':event.data.value} )
elif event.data.type == '__setitem__':
#print('got __setitem__ event')
a = args[ event.data.argindex ]
value = event.data.value
if a.__setitem__:
a.__setitem__(event.data.index, value)
else:
a[event.data.index] = value
for other in __get_other_workers_with_shared_arg(worker, a):
#print('relay __setitem__')
other.postMessage( {'type':'__setitem__', 'argindex':event.data.argindex, 'key':event.data.index, 'value':event.data.value} )
else:
raise RuntimeError('unknown event')
worker.onmessage = func
jsargs = []
for i,arg in enumerate(args):
if arg.jsify:
jsargs.append( arg.jsify() )
else:
jsargs.append( arg )
if instanceof(arg, Array):
__gen_worker_append(worker, arg, i)
worker.postMessage( {'type':'execute', 'args':jsargs} )
return worker
def __gen_worker_append(worker, ob, index):
def append(item):
#print('posting to thread - append')
worker.postMessage( {'type':'append', 'argindex':index, 'value':item} )
ob.push( item )
Object.defineProperty(ob, "append", {'enumerable':False, 'value':append, 'writeable':True, 'configurable':True})
######## webworker client #########
def __webworker_wrap(ob, argindex):
if instanceof(ob, Array):
#ob.__argindex__ = argindex
def func(index, item):
#print('posting to parent setitem')
postMessage({'type':'__setitem__', 'index':index, 'value':item, 'argindex':argindex})
Array.prototype.__setitem__.call(ob, index, item)
## this can raise RangeError recursive overflow if the worker entry point is a recursive function
Object.defineProperty(ob, "__setitem__", {"enumerable":False, "value":func, "writeable":True, "configurable":True})
#ob.__setitem__ =func
def func(item):
#print('posting to parent append')
postMessage({'type':'append', 'value':item, 'argindex':argindex})
Array.prototype.push.call(ob, item)
Object.defineProperty(ob, "append", {"enumerable":False, "value":func, "writeable":True, "configurable":True})
#ob.append = func
elif typeof(ob) == 'object':
def func(key, item):
#print('posting to parent setitem object')
postMessage({'type':'__setitem__', 'index':key, 'value':item, 'argindex':argindex})
ob[ key ] = item
#ob.__setitem__ = func
Object.defineProperty(ob, "__setitem__", {"enumerable":False, "value":func, "writeable":True, "configurable":True})
return ob
######### simple RPC API #########
def __rpc__( url, func, args):
req = new( XMLHttpRequest() )
req.open('POST', url, False) ## false is sync
req.setRequestHeader("Content-Type", "application/json;charset=UTF-8")
req.send( JSON.stringify({'call':func, 'args':args}) )
return JSON.parse( req.responseText )
def __rpc_iter__( url, attr):
req = new( XMLHttpRequest() )
req.open('POST', url, False) ## false is sync
req.setRequestHeader("Content-Type", "application/json;charset=UTF-8")
req.send( JSON.stringify({'iter':attr}) )
return JSON.parse( req.responseText )
def __rpc_set__( url, attr, value):
req = new( XMLHttpRequest() )
req.open('POST', url, False) ## false is sync
req.setRequestHeader("Content-Type", "application/json;charset=UTF-8")
req.send( JSON.stringify({'set':attr, 'value':value}) )
def __rpc_get__( url, attr):
req = new( XMLHttpRequest() )
req.open('POST', url, False) ## false is sync
req.setRequestHeader("Content-Type", "application/json;charset=UTF-8")
req.send( JSON.stringify({'get':attr}) )
return JSON.parse( req.responseText )
| 25.14723 | 177 | 0.638591 | 7,240 | 51,753 | 4.397376 | 0.104696 | 0.009235 | 0.005026 | 0.002764 | 0.309012 | 0.252097 | 0.212363 | 0.181581 | 0.162515 | 0.132864 | 0 | 0.01067 | 0.224934 | 51,753 | 2,057 | 178 | 25.159456 | 0.782983 | 0.148668 | 0 | 0.385219 | 0 | 0.00327 | 0.101019 | 0.017401 | 0 | 0 | 0 | 0.003403 | 0 | 0 | null | null | 0.00654 | 0 | null | null | 0.007194 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
704898ebecbbd9155f7d6b166429c2459741b79f | 1,714 | py | Python | feedmapper/migrations/0001_initial.py | benwhalley/django-feedmapper | 23eeabeda4d7ab6c3404f96348905c2ca12e964e | [
"BSD-3-Clause"
] | null | null | null | feedmapper/migrations/0001_initial.py | benwhalley/django-feedmapper | 23eeabeda4d7ab6c3404f96348905c2ca12e964e | [
"BSD-3-Clause"
] | null | null | null | feedmapper/migrations/0001_initial.py | benwhalley/django-feedmapper | 23eeabeda4d7ab6c3404f96348905c2ca12e964e | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-11-26 10:42
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Mapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('label', models.CharField(help_text='Label for your reference', max_length=255, verbose_name='label')),
('source', models.CharField(help_text='The source feed for your data', max_length=255, verbose_name='source')),
('parser', models.CharField(choices=[(b'feedmapper.parsers.AtomParser', b'Atom'), (b'feedmapper.parsers.XMLParser', b'XML')], help_text='Which parser to use when synchronizing', max_length=255, verbose_name='parser')),
('purge', models.BooleanField(default=False, help_text='Purge existing items on sync?', verbose_name='purge')),
('data_map', jsonfield.fields.JSONField(default=dict, verbose_name='data map')),
('notification_recipients', models.TextField(blank=True, help_text='Specify one email address per line to be notified of parsing errors.', verbose_name='notification recipients')),
('parse_attempted', models.DateTimeField(blank=True, null=True, verbose_name='parse attempted')),
('parse_succeeded', models.BooleanField(verbose_name='parse succeeded')),
('parse_log', models.TextField(blank=True, verbose_name='parse log')),
],
),
]
| 51.939394 | 234 | 0.658693 | 198 | 1,714 | 5.550505 | 0.5 | 0.100091 | 0.032757 | 0.051865 | 0.062784 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019853 | 0.206534 | 1,714 | 32 | 235 | 53.5625 | 0.788235 | 0.040257 | 0 | 0 | 1 | 0 | 0.272229 | 0.048721 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.291667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7048c4e22df4e28d615e6cb3c66edb19918a9647 | 1,264 | py | Python | static/_source.py | Manazius/blacksmith-bot | c31ebdc8f8df1ab79ea1cc833e2c5c2266a231ea | [
"Apache-2.0"
] | 3 | 2015-10-15T15:40:17.000Z | 2021-06-08T05:39:21.000Z | static/_source.py | Manazius/blacksmith-bot | c31ebdc8f8df1ab79ea1cc833e2c5c2266a231ea | [
"Apache-2.0"
] | 1 | 2019-04-06T11:54:56.000Z | 2019-04-07T00:57:49.000Z | static/_source.py | Manazius/blacksmith-bot | c31ebdc8f8df1ab79ea1cc833e2c5c2266a231ea | [
"Apache-2.0"
] | 3 | 2015-10-26T14:49:57.000Z | 2018-03-04T15:34:11.000Z | # coding: utf-8
# BlackSmith general configuration file
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Jabber server to connect
SERVER = 'example.com'
# Connecting Port
PORT = 5222
# Jabber server`s connecting Host
HOST = 'example.com'
# Using TLS (True - to enable, False - to disable)
SECURE = True
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# User`s account
USERNAME = 'username'
# Jabber ID`s Password
PASSWORD = 'password'
# Resourse (please don`t touch it)
RESOURCE = u'simpleApps' # You can write unicode symbols here
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Default chatroom nick
DEFAULT_NICK = u'BlackSmith-m.1' # You can write unicode symbols here
# Groupchat message size limit
CHAT_MSG_LIMIT = 1024
# Private/Roster message size limit
PRIV_MSG_LIMIT = 2024
# Incoming message size limit
INC_MSG_LIMIT = 8960
# Working without rights of moder (True - to enable, False - to disable)
MSERVE = False
# Jabber account of bot`s owner
BOSS = 'boss@example.com'
# Memory usage limit (size in kilobytes, 0 - not limited)
MEMORY_LIMIT = 49152
# Admin password, used as a key to command "login"
BOSS_PASS = ''
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
| 22.175439 | 72 | 0.58307 | 148 | 1,264 | 4.918919 | 0.601351 | 0.041209 | 0.065934 | 0.046703 | 0.151099 | 0.151099 | 0 | 0 | 0 | 0 | 0 | 0.022388 | 0.151899 | 1,264 | 56 | 73 | 22.571429 | 0.656716 | 0.698576 | 0 | 0 | 0 | 0 | 0.218487 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.133333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
704c2153fa8d91e12d420a68efc08384b40b0410 | 2,025 | py | Python | src/ndn/app_support/light_versec/grammar.py | tianyuan129/python-ndn | f390b3122d2a233a9a22a1ee9468b1241c46ef86 | [
"Apache-2.0"
] | null | null | null | src/ndn/app_support/light_versec/grammar.py | tianyuan129/python-ndn | f390b3122d2a233a9a22a1ee9468b1241c46ef86 | [
"Apache-2.0"
] | null | null | null | src/ndn/app_support/light_versec/grammar.py | tianyuan129/python-ndn | f390b3122d2a233a9a22a1ee9468b1241c46ef86 | [
"Apache-2.0"
] | 1 | 2020-09-25T18:38:23.000Z | 2020-09-25T18:38:23.000Z | # -----------------------------------------------------------------------------
# This piece of work is inspired by Pollere' VerSec:
# https://github.com/pollere/DCT
# But this code is implemented independently without using any line of the
# original one, and released under Apache License.
#
# Copyright (C) 2019-2022 The python-ndn authors
#
# This file is part of python-ndn.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -----------------------------------------------------------------------------
lvs_grammar = r'''
?start: file_input
TAG_IDENT: CNAME
RULE_IDENT: "#" CNAME
FN_IDENT: "$" CNAME
name: "/"? component ("/" component)*
component: STR -> component_from_str
| TAG_IDENT -> tag_id
| RULE_IDENT -> rule_id
definition: RULE_IDENT ":" def_expr
def_expr: name ("&" comp_constraints)? ("<=" sign_constraints)?
sign_constraints: RULE_IDENT ("|" RULE_IDENT)*
comp_constraints: cons_set ("|" cons_set)*
cons_set: "{" cons_term ("," cons_term)* "}"
cons_term: TAG_IDENT ":" cons_expr
cons_expr: cons_option ("|" cons_option)*
cons_option: STR -> component_from_str
| TAG_IDENT -> tag_id
| FN_IDENT "(" fn_args ")" -> fn_call
fn_args: (STR | TAG_IDENT)? ("," (STR | TAG_IDENT))*
file_input: definition*
%import common (DIGIT, LETTER, WS, CNAME, CPP_COMMENT)
%import common.ESCAPED_STRING -> STR
%ignore WS
%ignore CPP_COMMENT
'''
| 36.818182 | 79 | 0.609877 | 252 | 2,025 | 4.718254 | 0.484127 | 0.050463 | 0.037006 | 0.026913 | 0.074853 | 0.053827 | 0.053827 | 0.053827 | 0 | 0 | 0 | 0.007552 | 0.215309 | 2,025 | 54 | 80 | 37.5 | 0.740717 | 0.474568 | 0 | 0.076923 | 0 | 0 | 0.957733 | 0.020173 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7052230dd2fbe62419de26e704bc3a6fc7f8c7d8 | 335 | py | Python | service/models.py | robscarvalho8/currency_converter | 21ac36583da15271bfbae9ee511093948af37534 | [
"MIT"
] | null | null | null | service/models.py | robscarvalho8/currency_converter | 21ac36583da15271bfbae9ee511093948af37534 | [
"MIT"
] | null | null | null | service/models.py | robscarvalho8/currency_converter | 21ac36583da15271bfbae9ee511093948af37534 | [
"MIT"
] | null | null | null | from sqlalchemy import Column, Date, Integer, String, Float
from database import Base
class Currency(Base):
__tablename__ = "currency"
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String)
abbreviated = Column(String)
dolar_quotation = Column(Float)
date_quotation = Column(Date)
| 23.928571 | 62 | 0.728358 | 39 | 335 | 6.076923 | 0.564103 | 0.084388 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.185075 | 335 | 13 | 63 | 25.769231 | 0.868132 | 0 | 0 | 0 | 0 | 0 | 0.023881 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
7055e9bdeeea00b88731d10f9f645b5f2d10ded1 | 3,640 | py | Python | stanCode_Projects/boggle_game_solver/boggle.py | yjchang-tw/sc-projects | 474f10ec740b8ce9dedcdd01f8c285f58d642ec8 | [
"MIT"
] | null | null | null | stanCode_Projects/boggle_game_solver/boggle.py | yjchang-tw/sc-projects | 474f10ec740b8ce9dedcdd01f8c285f58d642ec8 | [
"MIT"
] | null | null | null | stanCode_Projects/boggle_game_solver/boggle.py | yjchang-tw/sc-projects | 474f10ec740b8ce9dedcdd01f8c285f58d642ec8 | [
"MIT"
] | null | null | null | """
File: boggle.py
Name:
----------------------------------------
TODO:
"""
# This is the file name of the dictionary txt file
# we will be checking if a word exists by searching through it
FILE = 'dictionary.txt'
dict_list = []
final = []
def main():
"""
TODO:
"""
read_dictionary()
temp = ''
word_lst = []
for i in range(4):
while True:
row = input(str(i + 1) + ' row of letters: ')
ch = row.split()
if len(ch)!= 4 or len(ch[0])!=1 or len(ch[1])!=1 or len(ch[2])!=1 or len(ch[3])!=1:
print('illegal input')
else:
break
for j in range(4):
word_lst.append(ch[j])
for start in range(16):
temp = word_lst[start]
word_lst[start] = ''
find_word(word_lst,[temp],start)
word_lst[start] = temp
print(f'There are {len(final)} words in total.')
def find_word(word_lst,current,now):
if now == 0 or now ==4 or now == 8 or now == 12:
nums = [-4,-3,1,4,5,1000]
elif now ==1 or now ==5 or now ==9 or now ==13 or now ==2 or now ==6 or now ==10 or now ==14 :
nums = [-5, -4, -3, -1, 1, 3, 4, 5,1000]
elif now == 3 or now == 7 or now == 11 or now ==15:
nums =[-5,-4,-1,3,4,1000]
# print(current)
temp = ''
temp2 = ''
a = ''
for word in current:
a+=word
for dict in dict_list:
if len(dict)>=4:
if dict == a:
if a not in final:
final.append(a)
print('Found "'+a+'"')
for num in nums:
# credential = 0
# if 16 > (now+num) > 0:
# if word_lst[now+num] != '':
# credential = 1
# if credential == 0:
# pass
# print(num)
if num > 100:
break
else:
# for num in nums:
# print('now:',now)
# print(num)
# print(word_lst,current)
if 0 <= now + num < 16:
if word_lst[now+num] != '':
temp=''
for ch in current:
temp += ch
# print(word_lst)
# print(temp+word_lst[now+num])
if has_prefix(temp+word_lst[now+num]):
current.append(word_lst[now+num])
temp2 = word_lst[now+num]
word_lst[now+num] = ''
find_word(word_lst,current,now+num)
word_lst[now+num] = temp2
current.pop()
# print('--')
# print(word_lst)
# print(current)
# print('--')
'roof coif hoof '
# nums = [-5,-1,-1,-1,1,3,4,5]
# a = 0
# temp=''
# sub=''
# a=''
# for ch in word_lst:
# if ch != '':
# a =1
# if a == 0:
# print(current)
# else:
# for i in range(16):
# if word_lst[i] != '':
# current.append(word_lst[i])
# a = word_lst[i]
# word_lst[i] = ''
# for num in nums:
# if 0 < i+num < 16:
# if word_lst[i+num] != '':
# for ch in current:
# temp += ch
# if has_prefix(temp+word_lst[i+num]):
# print('-----------')
# print(temp+word_lst[i+num])
# print('-----------')
# current.append(word_lst[i+num])
# sub = word_lst[i+num]
# word_lst[i+num] = ''
# find_word(word_lst,current)
# else:
# word_lst[i+num] = ''
# if len(current)!=0:
# current.pop()
# word_lst[i+num] = sub
# word_lst[i] = a
# if len(current) != 0:
# current.pop()
# print(i)
def read_dictionary():
"""
This function reads file "dictionary.txt" stored in FILE
and appends words in each line into a Python list
"""
with open(FILE, 'r') as f:
for line in f:
word = line.strip()
dict_list.append(word)
def has_prefix(sub_s):
"""
:param sub_s: (str) A substring that is constructed by neighboring letters on a 4x4 square grid
:return: (bool) If there is any words with prefix stored in sub_s
"""
for word in dict_list:
if word.startswith(sub_s):
return True
return False
if __name__ == '__main__':
main()
| 21.162791 | 96 | 0.545055 | 583 | 3,640 | 3.301887 | 0.210978 | 0.123636 | 0.054026 | 0.054026 | 0.254026 | 0.162597 | 0.022857 | 0.022857 | 0 | 0 | 0 | 0.038288 | 0.268132 | 3,640 | 171 | 97 | 21.28655 | 0.684309 | 0.432692 | 0 | 0.1 | 0 | 0 | 0.057839 | 0 | 0 | 0 | 0 | 0.011696 | 0 | 1 | 0.057143 | false | 0 | 0 | 0 | 0.085714 | 0.042857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7056d18658c61757b2d33f0ad939e77a9975faf5 | 207 | py | Python | tags/urls.py | jrdbnntt/FaceTags | 0d2e9a78521d90f19f3feb6440421a42f0327472 | [
"Apache-2.0"
] | null | null | null | tags/urls.py | jrdbnntt/FaceTags | 0d2e9a78521d90f19f3feb6440421a42f0327472 | [
"Apache-2.0"
] | null | null | null | tags/urls.py | jrdbnntt/FaceTags | 0d2e9a78521d90f19f3feb6440421a42f0327472 | [
"Apache-2.0"
] | null | null | null | from django.conf.urls import url
import views
urlpatterns = [
url(r'^user/', views.get_user, name='user'),
url(r'^all/', views.get_all, name='all'),
url(r'^fix/$', views.get_fix, name='fix'),
]
| 23 | 48 | 0.628019 | 33 | 207 | 3.848485 | 0.424242 | 0.094488 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.154589 | 207 | 8 | 49 | 25.875 | 0.725714 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
705c42c02eee24ad63d65a1412a711fc1a3dbabb | 377 | py | Python | code/utils.py | Riccorl/ml-malware-classifier | f07d40f395bf11169d6eb57b9975760e625eb092 | [
"MIT"
] | 5 | 2018-11-26T13:34:20.000Z | 2019-10-22T03:25:24.000Z | code/utils.py | Riccorl/ml-malware-classificator | f07d40f395bf11169d6eb57b9975760e625eb092 | [
"MIT"
] | 2 | 2019-06-01T05:28:14.000Z | 2019-06-02T10:55:42.000Z | code/utils.py | Riccorl/ml-malware-classificator | f07d40f395bf11169d6eb57b9975760e625eb092 | [
"MIT"
] | 2 | 2020-09-20T21:22:59.000Z | 2021-05-26T02:57:41.000Z | def timer(start: float, end: float) -> str:
"""
Timer function. Compute execution time from strart to end (end - start).
:param start: start time
:param end: end time
:return: end - start
"""
hours, rem = divmod(end - start, 3600)
minutes, seconds = divmod(rem, 60)
return "{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), seconds)
| 31.416667 | 77 | 0.607427 | 53 | 377 | 4.320755 | 0.528302 | 0.104803 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044674 | 0.228117 | 377 | 11 | 78 | 34.272727 | 0.742268 | 0.3687 | 0 | 0 | 0 | 0 | 0.105263 | 0.105263 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7060539b2e45ee14195f9314ca889a6ddf2387b8 | 695 | py | Python | malib/envs/gr_football/__init__.py | ReinholdM/play_football_with_human | 9ac2f0a8783aede56f4ac1f6074db7daa41b6b6c | [
"MIT"
] | 5 | 2021-11-17T03:11:13.000Z | 2021-12-23T09:04:21.000Z | malib/envs/gr_football/__init__.py | ReinholdM/play_football_with_human | 9ac2f0a8783aede56f4ac1f6074db7daa41b6b6c | [
"MIT"
] | null | null | null | malib/envs/gr_football/__init__.py | ReinholdM/play_football_with_human | 9ac2f0a8783aede56f4ac1f6074db7daa41b6b6c | [
"MIT"
] | null | null | null | from .grf_env import BaseGFootBall as base_env, ParameterSharingWrapper
from .encoders import encoder_basic, encoder_highpass, rewarder_basic
default_config = {
# env building config
"use_built_in_GK": True,
"scenario_config": {
"env_name": "5_vs_5",
"number_of_left_players_agent_controls": 4,
"number_of_right_players_agent_controls": 4,
"representation": "raw",
"logdir": "",
"write_goal_dumps": False,
"write_full_episode_dumps": False,
"render": False,
"stacked": False,
},
}
def env(**kwargs):
return ParameterSharingWrapper(base_env(**kwargs), lambda x: x[:6])
# return base_env(**kwargs)
| 28.958333 | 71 | 0.661871 | 81 | 695 | 5.308642 | 0.617284 | 0.048837 | 0.093023 | 0.097674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009225 | 0.220144 | 695 | 23 | 72 | 30.217391 | 0.784133 | 0.064748 | 0 | 0 | 0 | 0 | 0.301391 | 0.153014 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0.055556 | 0.111111 | 0.055556 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
706338e7331b1562584249f362b42d600f79a095 | 1,215 | py | Python | tools/perf/contrib/cluster_telemetry/rasterize_and_record_micro_ct.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668 | 2015-01-01T01:57:10.000Z | 2022-03-31T23:33:32.000Z | tools/perf/contrib/cluster_telemetry/rasterize_and_record_micro_ct.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 395 | 2020-04-18T08:22:18.000Z | 2021-12-08T13:04:49.000Z | tools/perf/contrib/cluster_telemetry/rasterize_and_record_micro_ct.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5,941 | 2015-01-02T11:32:21.000Z | 2022-03-31T16:35:46.000Z | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from contrib.cluster_telemetry import ct_benchmarks_util
from contrib.cluster_telemetry import page_set
from contrib.cluster_telemetry import repaint_helpers
from benchmarks import rasterize_and_record_micro
# pylint: disable=protected-access
class RasterizeAndRecordMicroCT(
rasterize_and_record_micro._RasterizeAndRecordMicro):
"""Measures rasterize and record performance for Cluster Telemetry."""
@classmethod
def Name(cls):
return 'rasterize_and_record_micro_ct'
@classmethod
def AddBenchmarkCommandLineArgs(cls, parser):
(rasterize_and_record_micro._RasterizeAndRecordMicro.
AddBenchmarkCommandLineArgs(parser))
ct_benchmarks_util.AddBenchmarkCommandLineArgs(parser)
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
ct_benchmarks_util.ValidateCommandLineArgs(parser, args)
def CreateStorySet(self, options):
return page_set.CTPageSet(
options.urls_list, options.user_agent, options.archive_data_file,
run_page_interaction_callback=repaint_helpers.WaitThenRepaint)
| 34.714286 | 73 | 0.8107 | 140 | 1,215 | 6.785714 | 0.542857 | 0.063158 | 0.094737 | 0.096842 | 0.201053 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003791 | 0.131687 | 1,215 | 34 | 74 | 35.735294 | 0.896682 | 0.209054 | 0 | 0.142857 | 0 | 0 | 0.030462 | 0.030462 | 0 | 0 | 0 | 0 | 0 | 1 | 0.190476 | false | 0 | 0.190476 | 0.095238 | 0.52381 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
706e04d6ec9e8da6c51a9f6c58f84eb9714001e8 | 691 | py | Python | 2016/python/day20.py | astonshane/AdventOfCode | 25c7380e73eede3f79287de6a9dedc8314ab7965 | [
"MIT"
] | null | null | null | 2016/python/day20.py | astonshane/AdventOfCode | 25c7380e73eede3f79287de6a9dedc8314ab7965 | [
"MIT"
] | null | null | null | 2016/python/day20.py | astonshane/AdventOfCode | 25c7380e73eede3f79287de6a9dedc8314ab7965 | [
"MIT"
] | null | null | null | MAX = 4294967295
blacklist = []
with open("inputs/day20.txt") as f:
for line in f:
line = line.strip().split('-')
blacklist.append([int(x) for x in line])
blacklist.sort()
def part1():
ip = 0
for i in range(0, len(blacklist)):
bl = blacklist[i]
if ip < bl[0]:
break
if bl[1] > ip:
ip = bl[1]+1
print "(part1):", ip
def part2():
ip = 0
good_ips = 0
for i in range(0, len(blacklist)):
bl = blacklist[i]
if ip < bl[0]:
good_ips += bl[0]-ip
ip = bl[1]+1
elif bl[1] > ip:
ip = bl[1]+1
print "(part2):", good_ips
part1()
part2()
| 16.452381 | 48 | 0.47178 | 102 | 691 | 3.166667 | 0.343137 | 0.06192 | 0.055728 | 0.065015 | 0.396285 | 0.371517 | 0.371517 | 0.371517 | 0.272446 | 0.272446 | 0 | 0.078341 | 0.371925 | 691 | 41 | 49 | 16.853659 | 0.665899 | 0 | 0 | 0.37931 | 0 | 0 | 0.047757 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.068966 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
707f2e96048822ebc61af551711d5b289dab5523 | 4,550 | py | Python | model.py | Riksi/nerf | 4cbc0a19f2abd80465b4fba9ba2d6dbfd4ab08ea | [
"MIT"
] | null | null | null | model.py | Riksi/nerf | 4cbc0a19f2abd80465b4fba9ba2d6dbfd4ab08ea | [
"MIT"
] | null | null | null | model.py | Riksi/nerf | 4cbc0a19f2abd80465b4fba9ba2d6dbfd4ab08ea | [
"MIT"
] | null | null | null | import tensorflow as tf
import math
from helpers import get_rays, ndc_rays
def get_sample_bounds(near, far, num_samples):
diff = (far - near)
idx = tf.range(num_samples)
diff_term = diff / num_samples
start = near + idx * diff_term
end = start + diff_term
return start, end
def get_embedding(data, num_dims):
# [L]
rng = tf.range(num_dims)
# [P, Z, L]
embed_term = 2 ** rng * math.pi * data[..., None]
# [P, Z, 2 * L]
embed = tf.dynamic_stitch(
[tf.sin(embed_term), tf.cos(embed_term)],
[(rng * 2), (rng * 2 + 1)]
)
return embed
class NeRF(tf.keras.models.Model):
def __init__(self, units1=256, num_layers1=8, units2=128, num_layers2=2):
super(NeRF, self).__init__()
self.part1 = tf.keras.models.Sequential(
[
*(tf.keras.layers.Dense(
units=units1,
activation='relu'
)
for _ in range(num_layers1 - 1)
),
tf.keras.layers.Dense(
units=units1 + 1,
)
]
)
self.part2 = tf.keras.models.Sequential(
[
*(tf.keras.layers.Dense(
units=units2,
activation='relu'
)
for _ in range(num_layers2 - 1)
),
tf.keras.layers.Dense(
units=256 * 3,
)
]
)
self.loss_fn = tf.losses.MeanSquaredError()
def estimate_color(self, samples, t_far, origin, direction, mask=None):
# [P, N]
delta = tf.concat([samples[1:], t_far[None]], axis=-1) - samples
# [P, N, 3]
coords = samples[..., None] * direction + origin
# [P, N, 1 + Z]
y1 = self.part1(coords)
# [P, Nc, 1], [P, Nc, Z]
sigma, features = tf.split(y1, [1, -1], axis=-1)
# [P, N, 3 + Z]
x2 = tf.concat([features, direction], axis=-1)
# [P, N, 3]
clr = self.part2(x2)
# [P, N, 1]
neg_sig_times_delta = - sigma * delta[..., None]
# [P, N, 1]
transmittance = tf.exp(-tf.cumsum(
neg_sig_times_delta,
axis=1,
exclusive=True
))
# [P, N, 1]
weights = transmittance * (1 - tf.exp(neg_sig_times_delta))
if mask is not None:
weights = weights * mask
# [P, 3]
clr_est = tf.reduce_sum(
clr * weights,
axis=1
)
return clr_est, weights
def call(self, inputs, num_samples=None, training=True):
origin = get_embedding(inputs.origin, inputs.num_embed_dims)
direction = get_embedding(inputs.direction, inputs.num_embed_dims)
# [Nc], [Nc]
starts, ends = get_sample_bounds(inputs.t_near, inputs.t_far, num_samples.coarse)
t_coarse = tf.random.uniform(
[tf.shape(inputs.direction)[0], num_samples.coarse],
starts,
ends
)
# [P, 3], [P, Nc]
clr_coarse, coarse_weights = self.estimate_color(t_coarse, t_far, origin, direction)
coarse_weights = tf.stop_gradient(coarse_weights)
# [P, Nc]
regions = tf.random.categorical(
tf.squeeze(tf.log(coarse_weights) - tf.log(tf.reduce_sum(coarse_weights)), axis=-1),
num_samples=num_samples.fine
)
# [P, Nf], [P, Nf]
starts_for_regions = tf.gather(starts, regions)
ends_for_regions = tf.gather(ends, regions)
t_fine = tf.random.uniform(
[tf.shape(direction)[0], num_samples.fine],
starts_for_regions,
ends_for_regions
)
t_union_ragged = tf.RaggedTensor.from_sparse(
tf.sets.union(t_fine, t_coarse)
)
# [P, N']
t_union = t_union_ragged.to_tensor()
# [P, N']
mask = tf.sequence_mask(
t_union_ragged.row_lengths(),
tf.shape(t_union)[-1]
)
clr_fine = self.estimate_color(t_union_ragged, inputs.t_far, origin, direction, mask)
return clr_coarse, clr_fine
def train_step(self, data):
clr_coarse, clr_fine = self.call(data.inputs, data.num_samples, training=True)
4
def inference_step(self, data):
_, clr_fine = self.call(data.inputs, data.num_samples, False)
pred = tf.reshape(clr_fine, [data.grid_shape, 3])
return pred
| 25.561798 | 96 | 0.526154 | 556 | 4,550 | 4.098921 | 0.251799 | 0.048267 | 0.022817 | 0.031593 | 0.171128 | 0.124616 | 0.074594 | 0.074594 | 0.074594 | 0 | 0 | 0.021024 | 0.351868 | 4,550 | 177 | 97 | 25.706215 | 0.75178 | 0.046154 | 0 | 0.074074 | 0 | 0 | 0.001862 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.027778 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
70835d3043da389a933c6f4fb7d3c80b37959050 | 3,056 | py | Python | diagnostics/diagnostic_analysis/scripts/export_csv.py | zhj-buffer/ROS2-driver-for-Realsense | 936cf27be4e7dc3d699ff99499e72ea8638cc622 | [
"Apache-2.0"
] | 2 | 2021-07-14T12:33:55.000Z | 2021-11-21T07:14:13.000Z | melodic/src/diagnostics/diagnostic_analysis/scripts/export_csv.py | disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA | 3d265bb64712e3cd7dfa0ad56d78fcdebafdb4b0 | [
"BSD-3-Clause"
] | 1 | 2021-07-08T10:26:06.000Z | 2021-07-08T10:31:11.000Z | melodic/src/diagnostics/diagnostic_analysis/scripts/export_csv.py | disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA | 3d265bb64712e3cd7dfa0ad56d78fcdebafdb4b0 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
#
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Willow Garage nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
##\author Eric Berger, Kevin Watts
##\brief Converts diagnostics log files into CSV's for analysis
PKG = 'diagnostic_analysis'
import roslib; roslib.load_manifest(PKG)
import diagnostic_msgs.msg
import time, sys, os
import operator, tempfile, subprocess
from optparse import OptionParser
from diagnostic_analysis.exporter import LogExporter
if __name__ == '__main__':
# Allow user to set output directory
parser = OptionParser()
parser.add_option("-d", "--directory", dest="directory",
help="Write output to DIR/output. Default: %s" % PKG, metavar="DIR",
default=roslib.packages.get_pkg_dir(PKG), action="store")
options, args = parser.parse_args()
exporters = []
print 'Output directory: %s/output' % options.directory
try:
for i, f in enumerate(args):
filepath = 'output/%s_csv' % os.path.basename(f)[0:os.path.basename(f).find('.')]
output_dir = os.path.join(options.directory, filepath)
print "Processing file %s. File %d of %d." % (os.path.basename(f), i + 1, len(args))
exp = LogExporter(output_dir, f)
exp.process_log()
exp.finish_logfile()
exporters.append(exp)
print 'Finished processing files.'
except:
import traceback
print "Caught exception processing log file"
traceback.print_exc()
| 39.179487 | 96 | 0.708115 | 403 | 3,056 | 5.315136 | 0.526055 | 0.011204 | 0.019608 | 0.021008 | 0.085901 | 0.063492 | 0.063492 | 0.063492 | 0.063492 | 0.063492 | 0 | 0.002494 | 0.212696 | 3,056 | 77 | 97 | 39.688312 | 0.887781 | 0.551047 | 0 | 0 | 0 | 0 | 0.174663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.241379 | null | null | 0.172414 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
708a5b2019c15309a0d19d434e0322b1d3a4cda7 | 325 | py | Python | unit_06/car7.py | janusnic/21v-pyqt | 8ee3828e1c6e6259367d6cedbd63b9057cf52c24 | [
"MIT"
] | null | null | null | unit_06/car7.py | janusnic/21v-pyqt | 8ee3828e1c6e6259367d6cedbd63b9057cf52c24 | [
"MIT"
] | null | null | null | unit_06/car7.py | janusnic/21v-pyqt | 8ee3828e1c6e6259367d6cedbd63b9057cf52c24 | [
"MIT"
] | 2 | 2019-11-14T15:04:22.000Z | 2021-10-31T07:34:46.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sqlite3 as lite
import sys
uId = 1
uPrice = 62300
con = lite.connect('test.db')
with con:
cur = con.cursor()
cur.execute("UPDATE Cars SET Price=? WHERE Id=?", (uPrice, uId))
con.commit()
print "Number of rows updated: %d" % cur.rowcount | 17.105263 | 76 | 0.593846 | 46 | 325 | 4.195652 | 0.804348 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032922 | 0.252308 | 325 | 19 | 77 | 17.105263 | 0.761317 | 0.116923 | 0 | 0 | 0 | 0 | 0.234266 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.2 | null | null | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
708ddb951ae7cdc4d261f043827396558a075007 | 1,470 | py | Python | packages/pyright-internal/src/tests/samples/properties3.py | Jasha10/pyright | 0ce0cfa10fe7faa41071a2cc417bb449cf8276fe | [
"MIT"
] | 3,934 | 2019-03-22T09:26:41.000Z | 2019-05-06T21:03:08.000Z | packages/pyright-internal/src/tests/samples/properties3.py | Jasha10/pyright | 0ce0cfa10fe7faa41071a2cc417bb449cf8276fe | [
"MIT"
] | 107 | 2019-03-24T04:09:37.000Z | 2019-05-06T17:00:04.000Z | packages/pyright-internal/src/tests/samples/properties3.py | Jasha10/pyright | 0ce0cfa10fe7faa41071a2cc417bb449cf8276fe | [
"MIT"
] | 119 | 2019-03-23T10:48:04.000Z | 2019-05-06T08:57:56.000Z | # This sample tests the type checker's ability to handle
# custom subclasses of property.
from typing import Any, Callable
class custom_property1(property):
pass
class Custom1(object):
@custom_property1
def x(self) -> int:
return 3
@custom_property1
def y(self) -> float:
return 3.5
@y.setter
def y(self, val: float):
pass
@y.deleter
def y(self):
pass
m1 = Custom1()
a1: int = m1.x
# This should generate an error because m.x is
# an int and cannot be assigned to str.
b1: str = m1.x
c1: float = m1.y
# This should generate an error because m.y is
# a float and cannot be assigned to int.
d1: int = m1.y
# This should generate an error because there
# is no setter for x.
m1.x = 4
m1.y = 4
# This should generate an error because there is
# no deleter for x.
del m1.x
del m1.y
class custom_property2(property):
_custom_func: Callable[..., Any] | None
def custom_function(self, _custom_func: Callable[..., Any]):
self._custom_func = _custom_func
return self
class Custom2(object):
@custom_property2
def x(self) -> int:
return 3
@custom_property2
def y(self) -> float:
return 3.5
@y.setter
def y(self, val: float):
pass
@y.deleter
def y(self):
pass
@y.custom_function
def y(self):
pass
m2 = Custom2()
a2 = m2.y
reveal_type(a2, expected_text="float")
m2.y = 4
del m2.y
| 15.638298 | 64 | 0.627211 | 228 | 1,470 | 3.964912 | 0.302632 | 0.030973 | 0.061947 | 0.088496 | 0.420354 | 0.373894 | 0.373894 | 0.283186 | 0.24115 | 0.150442 | 0 | 0.035714 | 0.27619 | 1,470 | 93 | 65 | 15.806452 | 0.81391 | 0.259184 | 0 | 0.529412 | 0 | 0 | 0.004638 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.196078 | false | 0.117647 | 0.019608 | 0.078431 | 0.411765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
70a1684fb88e63e0907be1bba1399d11eb060a74 | 150 | py | Python | Project_Euler/01_mults_3_or_5/find_sum.py | perlygatekeeper/glowing-robot | 7ef5eb089f552a1de309092606c95e805e6723a0 | [
"Artistic-2.0"
] | 2 | 2015-06-05T15:40:06.000Z | 2020-03-19T17:08:37.000Z | Project_Euler/01_mults_3_or_5/find_sum.py | perlygatekeeper/glowing-robot | 7ef5eb089f552a1de309092606c95e805e6723a0 | [
"Artistic-2.0"
] | null | null | null | Project_Euler/01_mults_3_or_5/find_sum.py | perlygatekeeper/glowing-robot | 7ef5eb089f552a1de309092606c95e805e6723a0 | [
"Artistic-2.0"
] | null | null | null | #!/opt/local/bin/python
sum_3_5 = 0
for i in range(1,1000):
if i % 3 == 0 or i % 5 == 0:
print(i)
sum_3_5 += i
print(sum_3_5)
| 12.5 | 32 | 0.513333 | 32 | 150 | 2.21875 | 0.53125 | 0.169014 | 0.211268 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.158416 | 0.326667 | 150 | 11 | 33 | 13.636364 | 0.544554 | 0.146667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
70a9bb288ab4678c73dcaaed29cf5fcda9867f4f | 1,115 | py | Python | playlist_creation/migrations/0002_create_privacy_policy_flatpage.py | theLionWar/time_capsule | 64a5c8f430e6815dae8ee2f93f55d9b406911377 | [
"MIT"
] | null | null | null | playlist_creation/migrations/0002_create_privacy_policy_flatpage.py | theLionWar/time_capsule | 64a5c8f430e6815dae8ee2f93f55d9b406911377 | [
"MIT"
] | null | null | null | playlist_creation/migrations/0002_create_privacy_policy_flatpage.py | theLionWar/time_capsule | 64a5c8f430e6815dae8ee2f93f55d9b406911377 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.4 on 2021-08-03 15:02
from django.contrib.sites.management import create_default_site
from django.db import migrations
def create_privacy_policy_flatpage(apps, schema_editor):
Site = apps.get_model("sites", "Site")
site = Site.objects.first()
if not site:
create_default_site(None)
site = Site.objects.first()
FlatPage = apps.get_model("flatpages", "FlatPage")
page, created = \
FlatPage.objects.get_or_create(url='/privacy-policy/',
defaults={'title': 'Privacy Policy'})
if created:
page.sites.add(site)
def delete_privacy_policy_flatpage(apps, schema_editor):
FlatPage = apps.get_model("flatpages", "FlatPage")
FlatPage.objects.filter(url='/privacy-policy/').delete()
class Migration(migrations.Migration):
dependencies = [
('playlist_creation', '0001_initial'),
("sites", "0002_alter_domain_unique")
]
operations = [
migrations.RunPython(create_privacy_policy_flatpage,
delete_privacy_policy_flatpage),
]
| 30.135135 | 76 | 0.658296 | 127 | 1,115 | 5.559055 | 0.464567 | 0.128895 | 0.11898 | 0.076487 | 0.209632 | 0.209632 | 0 | 0 | 0 | 0 | 0 | 0.026682 | 0.226906 | 1,115 | 36 | 77 | 30.972222 | 0.792343 | 0.040359 | 0 | 0.153846 | 1 | 0 | 0.142322 | 0.022472 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.076923 | 0 | 0.269231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
70aa7222dc1e2e9cae7903ea030f5828a1ff715a | 3,607 | py | Python | profiles_api/views.py | singhmonika10/profiles-rest-api | 69b403aaa24ca16a265db06cdbc99c970eb146c1 | [
"MIT"
] | null | null | null | profiles_api/views.py | singhmonika10/profiles-rest-api | 69b403aaa24ca16a265db06cdbc99c970eb146c1 | [
"MIT"
] | 6 | 2020-06-06T01:42:09.000Z | 2021-06-10T20:01:52.000Z | profiles_api/views.py | singhmonika10/profiles-rest-api | 69b403aaa24ca16a265db06cdbc99c970eb146c1 | [
"MIT"
] | null | null | null | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from profiles_api import serializers
from profiles_api import models
from profiles_api import permissions
class HelloApiView(APIView):
"""Test API View"""
serializer_class = serializers.HelloSerializer
def get(self,request, format=None):
"""returns a list of APIView features"""
an_apiview = [
'uses HTTTP mothod as function(get, post, patch, put, delete) ',
'Is simmilar to Traditional Django View',
'Gives you the most control over your application logic',
'Is mapped manually to URLs',
]
return Response({'message':'Hello!','an_apiview':an_apiview})
def post(self, request):
"""Create a hello message with our name"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'message':message})
else:
return Response(
serializer.errors,
status = status.HTTP_400_BAD_REQUEST
)
def put(self, request, pk=None):
"""Handle updating an object"""
return Response({'method': 'PUT'})
def patch(self, request, pk=None):
"""Handle partial update of object"""
return Response({'method': 'PATCH'})
def delete(self, request, pk=None):
"""Delete an object"""
return Response({'method': 'DELETE'})
class HelloViewSet(viewsets.ViewSet):
"""Test API ViewSet"""
serializer_class = serializers.HelloSerializer
def list(self, request):
"""Return a hello message."""
a_viewset = [
'Uses actions (list, create, retrieve, update, partial_update)',
'Automatically maps to URLS using Routers',
'Provides more functionality with less code',
]
return Response({'message': 'Hello!', 'a_viewset': a_viewset})
def create(self, request):
"""create a new hello mesage"""
serializer= self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'hello{name}!'
return Response({'message':message})
else:
return Response(
serializer.errors,
status = status.HTTP_400_BAD_REQUEST
)
def retrieve(self, request,pk=None):
"""habdle getting an object by its id"""
return Response({'http_method': 'GET'})
def update(self, request, pk=None):
"""handle updateing an object"""
return Response({'http_method':'PUT'})
def partial_update(self, request, pk=None):
"""handle updating part of an object"""
return Response({'http_method':'PATCH'})
def destroy(self, request, pk=None):
"""handle removing an object"""
return Response({'http_method':'DELETE'})
class UserProfileViewSet(viewsets.ModelViewSet):
"""handle creating and updating profiles"""
serializer_class = serializers.UserProfileSerializer
queryset = models.UserProfile.objects.all()
authentication_classes = (TokenAuthentication,)
permission_class = (permissions.UpdateOwnProfile,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'email',)
| 30.567797 | 76 | 0.640144 | 392 | 3,607 | 5.788265 | 0.32398 | 0.080212 | 0.040106 | 0.052446 | 0.352138 | 0.2684 | 0.183341 | 0.183341 | 0.183341 | 0.183341 | 0 | 0.002217 | 0.249792 | 3,607 | 117 | 77 | 30.82906 | 0.83629 | 0.107291 | 0 | 0.253521 | 0 | 0 | 0.163544 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15493 | false | 0 | 0.126761 | 0 | 0.619718 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
5606244a09399ed959432761133bc9ec94e45b68 | 5,831 | py | Python | mprocessing/client.py | ftranschel/evoMPS | b9e8d13066d12ee779376fdfd24ac2b34ac73ba2 | [
"BSD-3-Clause"
] | 1 | 2017-02-26T12:36:40.000Z | 2017-02-26T12:36:40.000Z | mprocessing/client.py | ftranschel/evoMPS | b9e8d13066d12ee779376fdfd24ac2b34ac73ba2 | [
"BSD-3-Clause"
] | null | null | null | mprocessing/client.py | ftranschel/evoMPS | b9e8d13066d12ee779376fdfd24ac2b34ac73ba2 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module is part of
an extension of evoMPS by adding
dissipative dynmaics based on
Monte-Carlo methods.
This part is the client file
for the distributed computing
framework that utilizes parallel
processing to speed up dissipative
dynamics.
@author: F.W.G. Transchel
"""
import Queue as qu
import multiprocessing as mp
import multiprocessing.managers as mpm
import time
import socket
import sys
import traceback
import io
import multiprocessing.sharedctypes
from ast import literal_eval
import string as STR
import pickle as pic
import numpy as np
global np
import scipy as sp
global sp
import scipy.linalg as la
import nullspace as ns
import matmul as m
import tdvp_common_diss as tm
import matmul as mm
import tdvp_gen as TDVP
import scipy.sparse as spp
authkey = "SECRET"
port = 5678
ip = '127.0.0.1'
internal_call = True
# This is used to tell tdvp_gen_diss.py
#(and other dynamically loaded modules)
# to not execute code on its own.
def worker(job_q, result_q,codebase):
""" A worker function to be launched in a separate process. Takes jobs from
job_q - each job a list of numbers to factorize. When the job is done,
the result (dict mapping number -> list of factors) is placed into
result_q. Runs until job_q is empty.
"""
try:
#print codebase
exec(codebase,globals(),locals())
except:
print "Unexpected error:", sys.exc_info()[0]
traceback.print_exc()
raise
while True:
try:
job = job_q.get_nowait()
# job = data for executing something...
# idea: serialize the code and just have it executed distributively
result = {}
try:
exec(job)
except:
print "Unexpected error:", sys.exc_info()[0]
traceback.print_exc()
raise
#print result
result_q.put(result)
time.sleep(2)
except qu.Empty:
return
except:
print "Unexpected error:", sys.exc_info()[0]
return
#raise
def scheduler(shared_job_q, shared_result_q, codebase, nprocs):
""" Split the work with jobs in shared_job_q and results in
shared_result_q into several processes. Launch each process with
factorizer_worker as the worker function, and wait until all are
finished.
"""
#print codebase
#exec(codebase)
procs = []
for i in range(nprocs):
p = mp.Process(
target=worker,
args=(shared_job_q, shared_result_q, codebase))
procs.append(p)
p.start()
for p in procs:
p.join()
def runclient():
""" This is the __main__ function of client.py that connects to a server
and distributes jobs to the scheduler().
"""
max_tries = 0
max_tries_limit = 5
while True:
try:
manager = make_client_manager(ip,port,authkey)
job_q = manager.get_job_q()
result_q = manager.get_result_q()
cdbs = manager.codebase().__str__() + "\n"
code = cdbs[12:-3] #workaround to get rid of control chars
decoded = code.replace("\\n","\n")
decoded = decoded.replace("\\r","\r")
#exec(decoded,globals(),locals())
#tdvp_obj = tdvp_diss()
#print code
scheduler(job_q, result_q, decoded, (2))
print "All available jobs finished."
print "==="
time.sleep(5)
except socket.error:
print "No answer from server. Trying again...."
max_tries += 1
time.sleep(5)
if max_tries >= max_tries_limit:
break
else:
continue
print "Process aborted from too many failed connection tries. Exiting."
#from multiprocessing.managers import BaseManager
class ServerQueueManager(mpm.SyncManager):
pass
def make_client_manager(ip, port, authkey):
""" Create a manager for a client. This manager connects to a server on the
given address and exposes the get_job_q and get_result_q methods for
accessing the shared queues from the server.
Return a manager object.
"""
ServerQueueManager.register('get_job_q')
ServerQueueManager.register('get_result_q')
ServerQueueManager.register('codebase')
manager = ServerQueueManager(address=(ip, port), authkey=authkey)
manager.connect()
print 'Client connected to %s:%s' % (ip, port)
return manager
def dist_process(n):
return n
def importCode(code,name,add_to_sys_modules=0):
"""
Import dynamically generated code as a module. code is the
object containing the code (a string, a file handle or an
actual compiled code object, same types as accepted by an
exec statement). The name is the name to give to the module,
and the final argument says wheter to add it to sys.modules
or not. If it is added, a subsequent import statement using
name will return this module. If it is not added to sys.modules
import will try to load it in the normal fashion.
import foo
is equivalent to
foofile = open("/path/to/foo.py")
foo = importCode(foofile,"foo",1)
Returns a newly generated module.
"""
import sys,imp
module = imp.new_module(name)
exec code in module.__dict__
if add_to_sys_modules:
sys.modules[name] = module
return module
if __name__ == '__main__':
print "==="
print "This is the mpsampling distributed computation CLIENT."
print "==="
print "Using " + str(mp.cpu_count()) + " cores."
print "==="
runclient()
| 28.724138 | 79 | 0.629395 | 776 | 5,831 | 4.612113 | 0.367268 | 0.013412 | 0.013412 | 0.00922 | 0.077396 | 0.077396 | 0.060631 | 0.043308 | 0.03297 | 0.03297 | 0 | 0.006286 | 0.290688 | 5,831 | 202 | 80 | 28.866337 | 0.859043 | 0.079403 | 0 | 0.214286 | 0 | 0 | 0.099232 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.008929 | 0.205357 | null | null | 0.133929 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
560a9dc22e71652afba1f2f2605eeeb97cccd37c | 2,268 | py | Python | auctions/models.py | adamiantorno/CS50W-commerce | 017d690b8b344158f0bdcff165f60702f173618b | [
"MIT"
] | null | null | null | auctions/models.py | adamiantorno/CS50W-commerce | 017d690b8b344158f0bdcff165f60702f173618b | [
"MIT"
] | null | null | null | auctions/models.py | adamiantorno/CS50W-commerce | 017d690b8b344158f0bdcff165f60702f173618b | [
"MIT"
] | null | null | null | from django.contrib.auth.models import AbstractUser
from django.urls import reverse
from django.db import models
from datetime import date
class User(AbstractUser):
pass
class Listing(models.Model):
CATEGORIES = (
('ART', 'Art'),
('CLT', 'Clothing & Accessories'),
('ELE', 'Electronics'),
('HME', 'Home'),
('KIT', 'Kitchen'),
('ENT', 'Entertainment'),
('TOY', 'Toys & Games'),
('SPT', 'Sports & Outdoors')
)
creator = models.ForeignKey(User, on_delete=models.CASCADE, related_name='listings', editable=False)
title = models.CharField(max_length=100)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
image = models.URLField(max_length=264, blank=True, null=True)
start_bid = models.DecimalField(max_digits=10, decimal_places=2)
category = models.CharField(max_length=50, choices=CATEGORIES)
is_active = models.BooleanField(default=True)
winner = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
def __str__(self):
return f"${self.start_bid} {self.title} - {self.creator}"
def get_absolute_url(self):
return reverse('listing', kwargs={'pk': self.pk})
class Bid(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='bids')
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, related_name='bids')
bid = models.DecimalField(max_digits=10, decimal_places=2)
def __str__(self):
return f"${self.bid} for {self.listing} from {self.user}"
class Comment(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='comments')
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, related_name='comments')
comment = models.CharField(max_length=500)
timestamp = models.DateTimeField(auto_now_add=True, blank=True, null=True)
def __str__(self):
return f"{self.comment} - {self.user}"
class Watchlist(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
listing = models.ManyToManyField(Listing, blank=True, related_name='watchlists')
def __str__(self):
return f"{self.user}'s Watchlist"
| 32.4 | 104 | 0.689594 | 279 | 2,268 | 5.437276 | 0.362007 | 0.036915 | 0.064601 | 0.096902 | 0.428477 | 0.411997 | 0.340804 | 0.264997 | 0.264997 | 0.170073 | 0 | 0.009081 | 0.174603 | 2,268 | 69 | 105 | 32.869565 | 0.801282 | 0 | 0 | 0.083333 | 0 | 0 | 0.136544 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.104167 | false | 0.020833 | 0.083333 | 0.104167 | 0.791667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
5611625ebacc0d47640b097555b0e9f837492d3b | 3,109 | py | Python | CameraToServer/python/client/SendDataWithSSL.py | yabuta/CameraSender | 99b84ae941782924ecb677a1f38a18dd082459d5 | [
"MIT"
] | null | null | null | CameraToServer/python/client/SendDataWithSSL.py | yabuta/CameraSender | 99b84ae941782924ecb677a1f38a18dd082459d5 | [
"MIT"
] | null | null | null | CameraToServer/python/client/SendDataWithSSL.py | yabuta/CameraSender | 99b84ae941782924ecb677a1f38a18dd082459d5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import socket
import cv2
import time
import threading
import datetime
import ssl
import readSettings as RS
"""
send picture per 5 second.
when called stop, thread finish.
picture convert from mat to jpeg
"""
class SendThread(threading.Thread):
def __init__(self,HOST,PORT,encrypt):
super(SendThread,self).__init__()
self.e = threading.Event()
self.HOST,self.PORT,self.encrypt = HOST,PORT,encrypt
self.frame = None
self.lock = threading.RLock()
self.ca_path = RS.getSettings([["settings","ca_cert_path"]])[0]
#self.ca_path = RS.get_ca_path()
error_flag = False
error_message = []
if self.ca_path == None:
error_flag = True
error_message.append("fail to get ca_cert_path.")
self.isError = [error_flag,error_message]
def run(self):
time.sleep(1)
#ca_pathの取得失敗の場合はエラーとする
if self.isError[0]:
for e in self.isError[1]:
print e
return
while not self.e.is_set():
start = time.time()
self.sendImageToServer()
elapse_time = time.time() - start
print elapse_time * 1000 , "(ms)"
time.sleep(5)
def sendImageToServer(self):
try:
sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
#証明書はないのでNone
#None以外にするとca_certsが必要になるような気がする
#versionはv1,安全らしいので
ssl_sock = ssl.wrap_socket(sock,
ca_certs = self.ca_path,
cert_reqs = ssl.CERT_REQUIRED,
ssl_version = ssl.PROTOCOL_TLSv1)
ssl_sock.connect((self.HOST,self.PORT))
#test
print "test:",ssl_sock.cipher()
#lock while processing picture data
#because it is shared to Capture thread
self.lock.acquire()
if self.frame != None:
#picture is sent after convert .jpeg from mat
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY),90]
jpegstring = cv2.imencode('.jpeg',self.frame,encode_param)[1].tostring()
jpegstring = self.encrypt.encrypt(jpegstring)
#add date information
tm = datetime.datetime.today()
senddata = str(tm) + '\t' + jpegstring
#test
print "test:",len(senddata)
#send
ssl_sock.write(senddata)
self.lock.release()
ssl_sock.close()
except Exception as e:
print "In SendDataWithSSL.py"
print e
def stop(self):
self.e.set()
self.join()
#get date
def getDate(self):
d = datetime.datetime.today()
return '%4d-%2d-%2d %2d:%2d:%2d' % (d.year,d.month,d.day,d.hour,d.minute,d.second)
#set picture data with lock
def setFrame(self,frame):
with self.lock:
self.frame = frame
| 29.330189 | 90 | 0.5468 | 354 | 3,109 | 4.672316 | 0.412429 | 0.027207 | 0.024184 | 0.019347 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012407 | 0.351882 | 3,109 | 105 | 91 | 29.609524 | 0.808437 | 0.108717 | 0 | 0.030769 | 0 | 0 | 0.041463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.107692 | null | null | 0.092308 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
561789e6a59c1c859ca74d0814d0ff171e8cafa1 | 6,112 | py | Python | PyFunceble/checker/syntax/domain_base.py | Centaurioun/PyFunceble | 59b809f3322118f7824195752c6015220738d4a0 | [
"Apache-2.0"
] | 213 | 2017-11-19T16:00:29.000Z | 2022-03-30T20:51:35.000Z | PyFunceble/checker/syntax/domain_base.py | Centaurioun/PyFunceble | 59b809f3322118f7824195752c6015220738d4a0 | [
"Apache-2.0"
] | 270 | 2018-01-10T12:42:41.000Z | 2022-03-22T00:03:23.000Z | PyFunceble/checker/syntax/domain_base.py | Centaurioun/PyFunceble | 59b809f3322118f7824195752c6015220738d4a0 | [
"Apache-2.0"
] | 48 | 2017-12-09T22:53:49.000Z | 2022-01-29T15:50:52.000Z | """
The tool to check the availability or syntax of domain, IP or URL.
::
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
Provides the base of all domain syntax checker.
Author:
Nissar Chababy, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/#/special-thanks
Contributors:
https://pyfunceble.github.io/#/contributors
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/dev/
Project homepage:
https://pyfunceble.github.io/
License:
::
Copyright 2017, 2018, 2019, 2020, 2021 Nissar Chababy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
from typing import Optional, Tuple
from PyFunceble.checker.base import CheckerBase
from PyFunceble.dataset.iana import IanaDataset
from PyFunceble.dataset.public_suffix import PublicSuffixDataset
class DomainSyntaxCheckerBase(CheckerBase):
"""
Provides an interface to check the syntax of a second domain.
:param str subject:
Optional, The subject to work with.
"""
# pylint: disable=line-too-long
SPECIAL_USE_DOMAIN_NAMES_EXTENSIONS = ["onion"]
"""
Specifies the extension which are specified as "Special-Use Domain Names"
and supported by our project.
:type: list
.. seealso::
* `RFC6761`_
* `IANA Special-Use Domain Names`_ assignments.
* `RFC7686`_
.. _RFC6761: https://tools.ietf.org/html/rfc6761
.. _RFC7686: https://tools.ietf.org/html/rfc6761
.. _IANA Special-Use Domain Names: https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.txt
"""
last_point_index: Optional[int] = None
"""
Saves the index of the last point.
"""
iana_dataset: Optional[IanaDataset] = None
public_suffix_dataset: Optional[PublicSuffixDataset] = None
def __init__(self, subject: Optional[str] = None) -> None:
self.iana_dataset = IanaDataset()
self.public_suffix_dataset = PublicSuffixDataset()
super().__init__(subject)
def reset_last_point_index(func): # pylint: disable=no-self-argument
"""
Resets the last point index before executing the decorated method.
"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
self.last_point_index = None
return func(self, *args, **kwargs) # pylint: disable=not-callable
return wrapper
def find_last_point_index(func): # pylint: disable=no-self-argument
"""
Try to find the index of the last point after the execution of the
decorated method.
"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
result = func(self, *args, **kwargs) # pylint: disable=not-callable
self.last_point_index = self.get_last_point_index(self.idna_subject)
return result
return wrapper
@CheckerBase.subject.setter
@reset_last_point_index
@find_last_point_index
def subject(self, value: str):
"""
Sets the subject to work with.
:param value:
The subject to set.
:raise TypeError:
When the given :code:`value` is not a :py:class:`str`.
:raise ValueError:
When the given :code:`value` is empty.
"""
# pylint: disable=no-member
super(DomainSyntaxCheckerBase, self.__class__).subject.fset(self, value)
@staticmethod
def get_last_point_index(subject: str) -> Optional[int]:
"""
Provides the index of the last point of the given subject.
"""
try:
if subject.endswith("."):
return subject[:-1].rfind(".")
return subject.rindex(".")
except ValueError:
return None
def get_subject_without_suffix(
self, subject: str, extension: str
) -> Optional[Tuple[Optional[int], Optional[str]]]:
"""
Provides the given subject without the suffix.
:param subject:
The subject to work with.
:param extension:
The extension previously extracted.
"""
if extension in self.public_suffix_dataset:
for suffix in self.public_suffix_dataset.get_available_suffix(extension):
try:
return subject[: subject.rindex(f".{suffix}")], suffix
except ValueError:
continue
return None, None
@CheckerBase.ensure_subject_is_given
def get_extension(self) -> Optional[str]:
"""
Provides the extension to work with (if exists).
"""
if self.last_point_index is None:
return None
# Plus one is for the leading point.
extension = self.idna_subject[self.last_point_index + 1 :]
if extension.endswith("."):
extension = extension[:-1]
return extension
def is_valid(self) -> bool:
"""
Validate the given subject.
"""
raise NotImplementedError()
| 29.52657 | 125 | 0.586224 | 697 | 6,112 | 5.596844 | 0.341463 | 0.034607 | 0.043066 | 0.032299 | 0.163804 | 0.14586 | 0.075365 | 0.075365 | 0.053832 | 0.030761 | 0 | 0.011366 | 0.26587 | 6,112 | 206 | 126 | 29.669903 | 0.770894 | 0.434228 | 0 | 0.2 | 0 | 0 | 0.006829 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.083333 | 0 | 0.516667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
562235d95b74c7a9bd19a09f477eb9ad2191d459 | 8,183 | py | Python | Computer_science/B05_Python/01_Basics/S12_Debugging.py | Polirecyliente/SGConocimiento | 560b08984236d7a10f50c6b5e6fb28844193d81b | [
"CC-BY-4.0"
] | null | null | null | Computer_science/B05_Python/01_Basics/S12_Debugging.py | Polirecyliente/SGConocimiento | 560b08984236d7a10f50c6b5e6fb28844193d81b | [
"CC-BY-4.0"
] | null | null | null | Computer_science/B05_Python/01_Basics/S12_Debugging.py | Polirecyliente/SGConocimiento | 560b08984236d7a10f50c6b5e6fb28844193d81b | [
"CC-BY-4.0"
] | null | null | null |
# Debugging
#T# Table of contents
#C# Python debugger (pdb)
#T# Beginning of content
#C# Python debugger (pdb)
# |-------------------------------------------------------------
#T# pdb is the builtin Python debugger, it has breakpoints, stepping through the code, printing the values of variables, post-mortem debugging, debugging of modules, functions, scripts, among other features
#T# pdb can be executed with an script argument to debug said script, the following syntax is done in the operating system shell
# SYNTAX python3 -m pdb script1.py
#T# python3 is the Python executable, -m pdb script1.py runs pdb to debug script1.py (see the file titled Interpreter), this automatically enters post-mortem if script1 crashes
#T# the pdb module is imported to use the pdb debugger as part of a script
import pdb
#T# the run function of the pdb module allows debugging the execution of a Python string
# SYNTAX pdb.run('string1')
#T# the pdb debugger is started right before the execution of string1, and is used to debug whatever string1 executes
pdb.run('import S01_Basic_syntax') # this debugs the S01_Basic_syntax.py file, because the import statement executes the imported module
#T# the following code is used to show the syntax of the pdb debugger in its interactive mode
output_var1 = "help variable to show the different output of the pdb debugger"
var1 = [5, 2, 3]
var2 = 7
def func1(num1, num2):
num3 = num1 + num2
print("func1_string1")
return num3
def func2():
func1(var1[0], var1[2])
loc1 = 72
for i1 in [1, 2, 3]:
print("i1 is", i1)
func2()
#T# create a breakpoint with the breakpoint function, this starts the (Pdb) interpreter to do interactive debugging
breakpoint()
# |--------------------------------------------------\
#T# the following syntaxes are written in the pdb debugger language, so they can't be written outside of a comment because they are not valid Python syntax and this .py file would show errors in an IDE (IDE stands for Integrated Development Environment)
#T# the 'output_var1' variable used in the following is used as a helper to signal and display the output of the pdb debugger commands
#T# the pdb debugger prompt is (Pdb), so anything shown after a (Pdb) means that it was typed in said prompt, e.g. '(Pdb) prompt_typings1' prompt_typings1 was typed directly in the pdb debugger prompt
#T# when source code is printed, the current line is shown with '->' after the line number
# SYNTAX next
#T# the next command executes code up to the next line of code (not entering functions)
output_var1 # (Pdb) next # this shows output of the script or program under debugging
# SYNTAX step
#T# the step command steps into functions or the next line
output_var1 # (Pdb) step # this shows output of the script or program under debugging, possibly inside a function
# SYNTAX continue
#T# the continue command continues execution until a breakpoint is found
output_var1 # (Pdb) continue # this shows output of the script or program under debugging, up to the next breakpoint or the end of the file
# SYNTAX until int1
#T# the until command continues execution until a line of number int1 or greater is reached, without int1 it continues until the next bigger line number
output_var1 # (Pdb) until # this shows output of the script or program under debugging, up to the next bigger line number
# SYNTAX return
#T# the return command continues execution until arriving at the return keyword of the current function, so this is used inside functions
output_var1 # (Pdb) return # inside a function, this shows output of the script or program under debugging, up to the return keyword of the current function
# SYNTAX run
# SYNTAX restart
#T# the run command and its alias the restart command restart the script or program under debugging, preserving the options and created breakpoints
output_var1 # (Pdb) restart # the script or program restarts
# SYNTAX p var1
#T# the p command (for print) prints the value of var1, if the name var1 is defined, this syntax is an alias for print(var1)
output_var1 # (Pdb) p var1 # [5, 2, 3]
output_var1 # (Pdb) print(var1) # [5, 2, 3]
# SYNTAX p func1
#T# same as before, but when used with a function, its address is printed
output_var1 # (Pdb) p func1 # <function func1 at 0x7f35210401f0> # or similar
# SYNTAX p func1(arg1, arg2)
#T# same as before, but this prints any output from func1 and its return value using arg1, arg2 as arguments, and any other arguments present
output_var1 # (Pdb) p func1(2, 3)
#T# the former prints
# func1_string1
# 5
# SYNTAX args
#T# the args command displays the arguments passed to a function
output_var1 # (Pdb) args # inside func1(5, 3)
#T# the former prints
# num1 = 5
# num2 = 3
# SYNTAX display var1
#T# the display command prints a variable each time it changes
# SYNTAX undisplay var1
#T# stop displaying a variable var1 with the undisplay command
# SYNTAX l int1
#T# the l command lists 11 source code lines, this is done around line int1, 5 lines above and 5 lines below it, this syntax is an alias for list int1
output_var1 # (Pdb) l 7 # (Pdb) list 7
#T# the former prints
# 2 # Debugging
# 3
# 4 #T# Table of contents
# 5
# 6 #C# Python debugger (pdb)
# 7
# 8 #T# Beginning of content
# 9
# 10 #C# Python debugger (pdb)
# 11
# 12 # |-------------------------------------------------------------
# SYNTAX l .
#T# same as before, but list source code lines around the current line
output_var1 # (Pdb) l . # this prints similar as before
# SYNTAX ll
#T# the ll commands does a long list of the source code local to the current line
output_var1 # (Pdb) ll # the output is too large to put here, more than 150 lines
# SYNTAX break
#T# the break command alone displays all breakpoints
output_var1 # (Pdb) break # with two breakpoints already created
#T# the former prints
# Num Type Disp Enb Where
# 1 breakpoint keep yes at /path/to/S13_Debugging.py:20
# 2 breakpoint keep yes at /path/to/S13_Debugging.py:35
# stop only if var2 < 10
# SYNTAX break file1:int1
#T# the break command is used to create breakpoints in file1 (the current python script name without the .py extension), in line int1
output_var1 # (Pdb) break S13_Debugging:20 # Breakpoint 1 at /path/to/S13_Debugging.py:20
# SYNTAX break file1.func1, condition1
#T# same as before, but the breakpoint is created in the first line of func1 (its def line), and the breakpoint only activates if condition1 evaluates to True using Python boolean syntax
output_var1 # (Pdb) break S13_Debugging.func1, var2 < 10 # Breakpoint 2 at /path/to/S13_Debugging.py:35
# SYNTAX disable int1
#T# the disable command disables the breakpoint numbered with the number int1
output_var1 # (Pdb) disable 1 # Disabled breakpoint 1 at /path/to/S13_Debugging.py:20
# SYNTAX enable int1
#T# the enable command enables the breakpoint numbered with the number int1
output_var1 # (Pdb) enable 1 # Enabled breakpoint 1 at /path/to/S13_Debugging.py:20
# SYNTAX clear int1
#T# the clear command completely deletes a breakpoint
output_var1 # (Pdb) clear 1 # Deleted breakpoint 1 at /path/to/S13_Debugging.py:20
# SYNTAX where
#T# the where command prints the stack_frame trace
output_var1 # (Pdb) where
#T# the former prints
# /path/to/S13_Debugging.py(46)<module>()
#-> func2()
# /path/to/S13_Debugging.py(42)func2()
#-> func1(var1[0], var1[2])
#> /path/to/S13_Debugging.py(40)func1()->8
#-> return num3
# SYNTAX up int1
#T# the up command goes up to an older frame in the stack trace, the amount of frames that go up is int1
output_var1 # (Pdb) up 1
#T# the former prints
#> /path/to/S13_Debugging.py(42)func2()
#-> func1(var1[0], var1[2])
# SYNTAX down int1
#T# the down command goes down to a newer frame in the stack trace, the amount of frames that go down is int1
output_var1 # (Pdb) down 1 # *** Newest frame # this is the output at the lowest frame
# SYNTAX help
#T# print the debugger pdb help with the help command
# SYNTAX quit
#T# quit the debugger with the quit command
# |--------------------------------------------------/
# |------------------------------------------------------------- | 42.619792 | 253 | 0.717096 | 1,333 | 8,183 | 4.366842 | 0.221305 | 0.021989 | 0.051366 | 0.034015 | 0.267308 | 0.197045 | 0.160969 | 0.144305 | 0.144305 | 0.121972 | 0 | 0.037463 | 0.181229 | 8,183 | 192 | 254 | 42.619792 | 0.831343 | 0.873396 | 0 | 0.589744 | 0 | 0 | 0.12561 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051282 | false | 0 | 0.051282 | 0 | 0.128205 | 0.051282 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5624f94b924fed45d90c5a7120d1219ba2228c0e | 543 | py | Python | python3/numpy/numpy_absolute.py | Nahid-Hassan/code-snippets | 24bd4b81564887822a0801a696001fcbeb6a7a75 | [
"MIT"
] | 2 | 2020-09-29T04:09:41.000Z | 2020-10-18T13:33:36.000Z | python3/numpy/numpy_absolute.py | Nahid-Hassan/code-snippets | 24bd4b81564887822a0801a696001fcbeb6a7a75 | [
"MIT"
] | null | null | null | python3/numpy/numpy_absolute.py | Nahid-Hassan/code-snippets | 24bd4b81564887822a0801a696001fcbeb6a7a75 | [
"MIT"
] | 1 | 2021-12-26T04:55:55.000Z | 2021-12-26T04:55:55.000Z | """
Created on Sat Mar 23 00:23:27 2019
@author: nahid
"""
#https://docs.scipy.org/doc/numpy/reference/generated/numpy.absolute.html
import numpy as np
import matplotlib.pyplot as plt
x = np.array([-1.2, 1.2])
x = np.absolute(x)
print(x)
print(np.absolute(1 + 2j))
#Plot the function over [-10, 10]:
x = np.linspace(-10, 10, 101); #start, end, totalElements you want to create
plt.plot(np.absolute(x))
plt.show()
plt.plot(x)
plt.show()
xx = x + 1j * x[:, np.newaxis]
plt.imshow(np.abs(xx), extent=[-10, 10, -10, 10], cmap='gray')
plt.show() | 23.608696 | 76 | 0.672192 | 100 | 543 | 3.65 | 0.55 | 0.054795 | 0.060274 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.080338 | 0.128913 | 543 | 23 | 77 | 23.608696 | 0.691332 | 0.368324 | 0 | 0.214286 | 1 | 0 | 0.011976 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
562f521bc543c349ce98082080bb5aacbf7c8b15 | 1,467 | py | Python | problems/324.Wiggle_Sort_II/try.py | subramp-prep/leetcode | d125201d9021ab9b1eea5e5393c2db4edd84e740 | [
"Unlicense"
] | null | null | null | problems/324.Wiggle_Sort_II/try.py | subramp-prep/leetcode | d125201d9021ab9b1eea5e5393c2db4edd84e740 | [
"Unlicense"
] | null | null | null | problems/324.Wiggle_Sort_II/try.py | subramp-prep/leetcode | d125201d9021ab9b1eea5e5393c2db4edd84e740 | [
"Unlicense"
] | null | null | null | # coding=utf-8
# Author: Jianghan LI
# Question: 324.Wiggle_Sort_II
# Date:
# Complexity: O(N)
import random
class Solution(object):
def wiggleSort(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
i = 1
for j in range(2, len(nums)):
print i, j,
if i & 1:
if nums[i - 1] < nums[j]:
if i < j:
nums[i], nums[j] = nums[j], nums[i]
i += 2
else:
i += 1
elif nums[i - 1] > nums[j]:
nums[i - 1], nums[j] = nums[j], nums[i - 1]
i += 1
else:
if nums[i - 1] > nums[j]:
if i < j:
nums[i], nums[j] = nums[j], nums[i]
i += 2
else:
i += 1
elif nums[i - 1] < nums[j]:
nums[i - 1], nums[j] = nums[j], nums[i - 1]
i += 1
print nums
return i == len(nums)
############ test case ###########
s = Solution()
nums = [1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2]
nums = [2, 1, 1, 2, 2, 3]
nums = [1, 3, 2, 2, 3, 1]
nums = [5, 5, 4, 6]
nums = [1, 2, 9, 5, 5, 5, 5, 5, 8, 2]
s.wiggleSort(nums)
print nums
############ comments ############
# 只能满足没有dup的情况
| 24.864407 | 74 | 0.362645 | 195 | 1,467 | 2.717949 | 0.297436 | 0.05283 | 0.169811 | 0.113208 | 0.339623 | 0.316981 | 0.316981 | 0.316981 | 0.316981 | 0.316981 | 0 | 0.075641 | 0.468303 | 1,467 | 58 | 75 | 25.293103 | 0.603846 | 0.080436 | 0 | 0.472222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.027778 | null | null | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
563378f9927ab2c4212b302cc8dcfd23528fc8a2 | 1,667 | py | Python | mount_point.py | janglapuk/smstools-bot | e1bc4ee486b1425326028b68ce6d075cdcd6726d | [
"MIT"
] | null | null | null | mount_point.py | janglapuk/smstools-bot | e1bc4ee486b1425326028b68ce6d075cdcd6726d | [
"MIT"
] | null | null | null | mount_point.py | janglapuk/smstools-bot | e1bc4ee486b1425326028b68ce6d075cdcd6726d | [
"MIT"
] | 1 | 2019-02-06T08:56:11.000Z | 2019-02-06T08:56:11.000Z | import spf, os
from email.parser import Parser
__author__ = "TRA"
__doc__ = '''Modified mount point module'''
RECEIVED = 'RECEIVED'
SENT = 'SENT'
FAILED = 'FAILED'
REPORT = 'REPORT'
class Bot(object, metaclass=spf.MountPoint):
_runnable = False
_program = None
_headers = None
_body = None
trim = False
def __init__(self, program):
self._program = program
self._runnable = self._program.event == self.bot_event
self.__init()
def __str__(self):
return self.__class__.__name__
def __init(self):
self.__read_message()
self.__run()
def __read_message(self):
if os.path.isfile(self._program.fn):
fn = self._program.fn
f = None
try:
f = open(fn, 'r')
raw = f.read()
self.__parse_message(raw)
except Exception as e:
print(self, 'Exception:\n', e)
else:
f.close()
def __run(self):
if self.is_runnable():
self.run()
def __parse_message(self, raw):
parser = Parser()
msg = parser.parsestr(raw)
self._headers = {}
for key in msg.keys():
# Force all keys to lowercase
k = key.lower()
self._headers[k] = msg.get(key)
self._body = msg.get_payload()
# Check if trim enabled and not binary body
if self.trim and 'binary' not in self._headers.keys():
self._body = self._body.strip()
def valid_event(self):
return self.bot_event == self._program.event
def is_runnable(self):
return self._runnable
def get_event(self):
return self._program.event
def get_headers(self):
return self._headers
def get_body(self):
return self._body
| 20.084337 | 58 | 0.627475 | 221 | 1,667 | 4.402715 | 0.357466 | 0.079137 | 0.086331 | 0.032888 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.257349 | 1,667 | 82 | 59 | 20.329268 | 0.785945 | 0.041392 | 0 | 0 | 0 | 0 | 0.045768 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.189655 | false | 0 | 0.034483 | 0.103448 | 0.431034 | 0.017241 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
565396c20414a45a7b60faed824ce5d1bc750a09 | 621 | py | Python | kubernetes_typed/client/models/v1_iscsi_volume_source.py | nikhiljha/kubernetes-typed | 4f4b969aa400c88306f92560e56bda6d19b2a895 | [
"Apache-2.0"
] | 22 | 2020-12-10T13:06:02.000Z | 2022-02-13T21:58:15.000Z | kubernetes_typed/client/models/v1_iscsi_volume_source.py | nikhiljha/kubernetes-typed | 4f4b969aa400c88306f92560e56bda6d19b2a895 | [
"Apache-2.0"
] | 4 | 2021-03-08T07:06:12.000Z | 2022-03-29T23:41:45.000Z | kubernetes_typed/client/models/v1_iscsi_volume_source.py | nikhiljha/kubernetes-typed | 4f4b969aa400c88306f92560e56bda6d19b2a895 | [
"Apache-2.0"
] | 2 | 2021-09-05T19:18:28.000Z | 2022-03-14T02:56:17.000Z | # Code generated by `typeddictgen`. DO NOT EDIT.
"""V1ISCSIVolumeSourceDict generated type."""
from typing import TypedDict, List
from kubernetes_typed.client import V1LocalObjectReferenceDict
V1ISCSIVolumeSourceDict = TypedDict(
"V1ISCSIVolumeSourceDict",
{
"chapAuthDiscovery": bool,
"chapAuthSession": bool,
"fsType": str,
"initiatorName": str,
"iqn": str,
"iscsiInterface": str,
"lun": int,
"portals": List[str],
"readOnly": bool,
"secretRef": V1LocalObjectReferenceDict,
"targetPortal": str,
},
total=False,
)
| 25.875 | 62 | 0.634461 | 49 | 621 | 8.020408 | 0.693878 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010753 | 0.251208 | 621 | 23 | 63 | 27 | 0.834409 | 0.140097 | 0 | 0 | 1 | 0 | 0.246212 | 0.043561 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.105263 | 0 | 0.105263 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
56700e57818953003d07b25e398460473fe1926b | 525 | py | Python | src/web_homepage/jinja.py | Mattan-Qwer/test1 | 16bc7642a18d632181480644d1f188c9fb3785bc | [
"Apache-2.0"
] | 1 | 2021-07-25T12:46:08.000Z | 2021-07-25T12:46:08.000Z | src/web_homepage/jinja.py | Mattan-Qwer/test1 | 16bc7642a18d632181480644d1f188c9fb3785bc | [
"Apache-2.0"
] | 3 | 2021-03-04T21:15:32.000Z | 2021-05-15T22:01:11.000Z | src/web_homepage/jinja.py | Mattan-Qwer/test1 | 16bc7642a18d632181480644d1f188c9fb3785bc | [
"Apache-2.0"
] | 2 | 2021-03-17T18:02:58.000Z | 2021-07-15T17:58:28.000Z | from django.templatetags.static import static
from django.urls import reverse
from jinja2 import Environment
from fontawesome_5.templatetags import fontawesome_5
from wissenslandkarte.settings import DEBUG, ENABLE_LIVE_JS
def environment(**options):
env = Environment(**options)
env.globals.update({
'static': static,
'url': reverse,
'fontawesome_5_static': fontawesome_5.fontawesome_5_static,
'debug': DEBUG,
'setting_enable_livejs' : ENABLE_LIVE_JS
})
return env
| 26.25 | 67 | 0.727619 | 61 | 525 | 6.04918 | 0.42623 | 0.162602 | 0.065041 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014151 | 0.192381 | 525 | 19 | 68 | 27.631579 | 0.856132 | 0 | 0 | 0 | 0 | 0 | 0.104762 | 0.04 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.333333 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
5671326c276d2e23216bc6e77ae1e2508c743236 | 273 | py | Python | app.py | EGeorge2021r/nft-marketplace | cc6a31f67e4657da72e59d5106a85b4d7dd0eb7f | [
"MIT"
] | null | null | null | app.py | EGeorge2021r/nft-marketplace | cc6a31f67e4657da72e59d5106a85b4d7dd0eb7f | [
"MIT"
] | 5 | 2022-02-20T00:49:34.000Z | 2022-02-25T21:29:49.000Z | app.py | EGeorge2021r/nft-marketplace | cc6a31f67e4657da72e59d5106a85b4d7dd0eb7f | [
"MIT"
] | 2 | 2022-02-21T03:29:34.000Z | 2022-03-04T00:46:46.000Z | import streamlit as st
from multiapp import MultiApp
from apps import buyer, home,creator # import your app modules here
app = MultiApp()
app.add_app("Home", home.home)
app.add_app("Creator", creator.creator)
app.add_app("Buyer", buyer.buyer)
# The main app
app.run()
| 18.2 | 67 | 0.747253 | 44 | 273 | 4.568182 | 0.409091 | 0.089552 | 0.134328 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139194 | 273 | 14 | 68 | 19.5 | 0.855319 | 0.150183 | 0 | 0 | 0 | 0 | 0.070175 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.375 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
56751bf83e23ee1f59d7cb7602bc4e9a2548390d | 587 | py | Python | library/tests/test_setup.py | edalatpour/unicornhatmini-python | d1bbfe8d4fdabad5a14489505ba3751386e2f990 | [
"MIT"
] | 32 | 2020-05-01T16:07:22.000Z | 2022-03-18T13:02:54.000Z | library/tests/test_setup.py | edalatpour/unicornhatmini-python | d1bbfe8d4fdabad5a14489505ba3751386e2f990 | [
"MIT"
] | 10 | 2020-04-26T13:40:50.000Z | 2022-01-06T14:22:03.000Z | library/tests/test_setup.py | edalatpour/unicornhatmini-python | d1bbfe8d4fdabad5a14489505ba3751386e2f990 | [
"MIT"
] | 19 | 2020-05-05T15:12:18.000Z | 2022-03-31T09:18:20.000Z | import mock
def test_setup(GPIO, spidev):
from unicornhatmini import UnicornHATMini
unicornhatmini = UnicornHATMini()
spidev.SpiDev.assert_has_calls((
mock.call(0, 0),
mock.call(0, 1)
), any_order=True)
GPIO.setwarnings.assert_called_once_with(False)
GPIO.setmode.assert_called_once_with(GPIO.BCM)
del unicornhatmini
def test_shutdown(GPIO, spidev, atexit):
from unicornhatmini import UnicornHATMini
unicornhatmini = UnicornHATMini()
atexit.register.assert_called_once_with(unicornhatmini._exit)
unicornhatmini._exit()
| 23.48 | 65 | 0.739353 | 68 | 587 | 6.147059 | 0.441176 | 0.267943 | 0.114833 | 0.143541 | 0.315789 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0.008282 | 0.177172 | 587 | 24 | 66 | 24.458333 | 0.857143 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.125 | false | 0 | 0.1875 | 0 | 0.3125 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
567bcfdb694290f895bf395e9cbebe9b725dbb36 | 4,192 | py | Python | namebench/appengine/models.py | chicks-net/namebench | 8dd5e67ba3077650d49aa47a75a257e3286c8044 | [
"Apache-2.0"
] | 2 | 2017-12-13T00:39:46.000Z | 2018-05-02T14:35:48.000Z | namebench/appengine/models.py | chicks-net/namebench | 8dd5e67ba3077650d49aa47a75a257e3286c8044 | [
"Apache-2.0"
] | null | null | null | namebench/appengine/models.py | chicks-net/namebench | 8dd5e67ba3077650d49aa47a75a257e3286c8044 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
class IndexHost(db.Model):
record_type = db.StringProperty()
record_name = db.StringProperty()
listed = db.BooleanProperty()
class NameServer(db.Model):
ip = db.StringProperty()
hostname = db.StringProperty()
name = db.StringProperty()
listed = db.BooleanProperty()
city = db.StringProperty()
region = db.StringProperty()
country = db.StringProperty()
country_code = db.StringProperty()
coordinates = db.GeoPtProperty()
is_global = db.BooleanProperty()
is_regional = db.BooleanProperty()
is_custom = db.BooleanProperty()
url = db.LinkProperty()
timestamp = db.DateTimeProperty(auto_now_add=True)
class Submission(db.Model):
client_id = db.IntegerProperty()
submit_id = db.IntegerProperty()
class_c = db.StringProperty()
timestamp = db.DateTimeProperty(auto_now_add=True)
listed = db.BooleanProperty()
hidden = db.BooleanProperty()
city = db.StringProperty()
region = db.StringProperty()
country = db.StringProperty()
country_code = db.StringProperty()
coordinates = db.GeoPtProperty()
# de-normalized data, also duplicated in RunResults (though much slower)
best_nameserver = db.ReferenceProperty(NameServer, collection_name='best_submissions')
best_improvement = db.FloatProperty()
primary_nameserver = db.ReferenceProperty(NameServer, collection_name="primary_submissions")
class SubmissionConfig(db.Model):
submission = db.ReferenceProperty(Submission, collection_name='config')
input_source = db.StringProperty()
benchmark_thread_count = db.IntegerProperty()
health_thread_count = db.IntegerProperty()
health_timeout = db.FloatProperty()
timeout = db.FloatProperty()
query_count = db.IntegerProperty()
run_count = db.IntegerProperty()
platform = db.StringProperty()
version = db.StringProperty()
class SubmissionNameServer(db.Model):
nameserver = db.ReferenceProperty(NameServer, collection_name='submissions')
submission = db.ReferenceProperty(Submission, collection_name='nameservers')
is_error_prone = db.BooleanProperty()
is_disabled = db.BooleanProperty()
is_reference = db.BooleanProperty()
overall_average = db.FloatProperty()
check_average = db.FloatProperty()
averages = db.ListProperty(float)
duration_min = db.FloatProperty()
duration_max = db.FloatProperty()
error_count = db.IntegerProperty()
timeout_count = db.IntegerProperty()
nx_count = db.IntegerProperty()
position = db.IntegerProperty()
sys_position = db.IntegerProperty()
version = db.StringProperty()
node_ids = db.ListProperty(str)
# TODO(tstromberg): Remove obsoleted improvement variable
improvement = db.FloatProperty()
diff = db.FloatProperty()
notes = db.ListProperty(str)
port_behavior = db.StringProperty()
# Store one row per run for run_results, since we do not need to do much with them.
class RunResult(db.Model):
submission_nameserver = db.ReferenceProperty(SubmissionNameServer, collection_name='results')
run_number = db.IntegerProperty()
durations = db.ListProperty(float)
answer_counts = db.ListProperty(int)
# We may want to compare index results, so we will store one row per record
class IndexResult(db.Model):
submission_nameserver = db.ReferenceProperty(SubmissionNameServer, collection_name='index_results')
index_host = db.ReferenceProperty(IndexHost, collection_name='results')
duration = db.FloatProperty()
answer_count = db.IntegerProperty()
ttl = db.IntegerProperty()
response = db.StringProperty()
| 37.428571 | 101 | 0.76479 | 495 | 4,192 | 6.361616 | 0.389899 | 0.10162 | 0.055891 | 0.020959 | 0.308034 | 0.268657 | 0.162591 | 0.136551 | 0.136551 | 0.085742 | 0 | 0.002209 | 0.135973 | 4,192 | 111 | 102 | 37.765766 | 0.8672 | 0.203244 | 0 | 0.207317 | 0 | 0 | 0.027108 | 0 | 0 | 0 | 0 | 0.009009 | 0 | 1 | 0 | false | 0 | 0.036585 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
567e66953f9087009e3135f6fcd09362a85c10a2 | 416 | py | Python | PE/PE77.py | bristy/codemania | ceaacce07cb1b66202e17ad313a3467bd591bdc1 | [
"MIT"
] | null | null | null | PE/PE77.py | bristy/codemania | ceaacce07cb1b66202e17ad313a3467bd591bdc1 | [
"MIT"
] | null | null | null | PE/PE77.py | bristy/codemania | ceaacce07cb1b66202e17ad313a3467bd591bdc1 | [
"MIT"
] | null | null | null | # https://projecteuler.net/problem=77
from prime_util import sieve
MAX = 5000
INF = 1 << 31
def pe77():
primes, s = sieve(MAX)
dp = [0] * MAX
dp[0] = 1
for p in primes:
w = p
while w < MAX:
dp[w] = dp[w] + dp[w - p]
w += 1
for i, p in enumerate(dp):
if p > 5000:
print i, p
break
if __name__ == '__main__':
pe77()
| 16 | 37 | 0.463942 | 62 | 416 | 2.967742 | 0.532258 | 0.081522 | 0.065217 | 0.065217 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08502 | 0.40625 | 416 | 25 | 38 | 16.64 | 0.659919 | 0.084135 | 0 | 0 | 0 | 0 | 0.021108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.055556 | null | null | 0.055556 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5691317d3c567d014aec439954667396b59f6a2b | 735 | py | Python | src/my_pytube/__init__.py | mjmartinson/intravideo_search | 7c123f515d0e9fb0934cae5894088a0dabcb166f | [
"MIT"
] | null | null | null | src/my_pytube/__init__.py | mjmartinson/intravideo_search | 7c123f515d0e9fb0934cae5894088a0dabcb166f | [
"MIT"
] | null | null | null | src/my_pytube/__init__.py | mjmartinson/intravideo_search | 7c123f515d0e9fb0934cae5894088a0dabcb166f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# flake8: noqa
# noreorder
"""
Pytube: a very serious Python library for downloading YouTube Videos.
"""
__title__ = 'my_pytube'
__version__ = '9.5.2'
__author__ = 'Nick Ficano'
__license__ = 'MIT License'
__copyright__ = 'Copyright 2019 Nick Ficano'
#import logging
#import query
#import streams
#import captions
#import contrib
#import __main__
from my_pytube.logging import create_logger
from my_pytube.query import CaptionQuery
from my_pytube.query import StreamQuery
from my_pytube.streams import Stream
from my_pytube.captions import Caption
from my_pytube.contrib.playlist import Playlist
from my_pytube.__main__ import YouTube
logger = create_logger()
logger.info('%s v%s', __title__, __version__)
| 22.96875 | 69 | 0.786395 | 100 | 735 | 5.32 | 0.47 | 0.120301 | 0.157895 | 0.06391 | 0.086466 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014041 | 0.127891 | 735 | 31 | 70 | 23.709677 | 0.815913 | 0.270748 | 0 | 0 | 0 | 0 | 0.130769 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
56960d66f7d4e91cd7177d74b8b5d014ec97d10d | 634 | py | Python | setup.py | AjayMT/emitter | 8b8c1aaab39ca858a59ad45a36f22f2737a0d46a | [
"MIT"
] | 1 | 2021-01-04T05:29:49.000Z | 2021-01-04T05:29:49.000Z | setup.py | AjayMT/emitter | 8b8c1aaab39ca858a59ad45a36f22f2737a0d46a | [
"MIT"
] | null | null | null | setup.py | AjayMT/emitter | 8b8c1aaab39ca858a59ad45a36f22f2737a0d46a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from distutils.core import setup
setup(
name='emitter',
version='0.0.7',
description='simple event emitter',
author='Ajay MT',
author_email='ajaymt@icloud.com',
url='http://github.com/ajaymt/emitter',
download_url='https://github.com/AjayMT/emitter/tarball/v0.0.7',
py_modules=['emitter'],
keywords='emitter event eventemitter node',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
]
)
| 27.565217 | 68 | 0.632492 | 72 | 634 | 5.527778 | 0.736111 | 0.01005 | 0.075377 | 0.110553 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014056 | 0.214511 | 634 | 22 | 69 | 28.818182 | 0.785141 | 0.031546 | 0 | 0 | 0 | 0 | 0.535073 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.052632 | 0 | 0.052632 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5697543911acf8f5ff9fcad51bfbbe032aa5d07b | 358 | py | Python | preprocessing/extract_hashtags.py | acvander/kaggle_real_or_not | 737d949b1f8446e734ed5113b84b5b199a7aee3c | [
"MIT"
] | null | null | null | preprocessing/extract_hashtags.py | acvander/kaggle_real_or_not | 737d949b1f8446e734ed5113b84b5b199a7aee3c | [
"MIT"
] | 10 | 2020-02-11T19:07:36.000Z | 2022-02-09T23:35:13.000Z | preprocessing/extract_hashtags.py | acvander/kaggle_real_or_not | 737d949b1f8446e734ed5113b84b5b199a7aee3c | [
"MIT"
] | null | null | null | import re
import pandas as pd
def extract_hashtags(df: pd.DataFrame) -> pd.DataFrame:
pattern = re.compile(r'#(\w+)')
def get_hashtags(row: pd.Series) -> pd.Series:
text = row['text']
hashtags = re.findall(pattern, text)
row['hashtags'] = hashtags
return row
df = df.apply(get_hashtags, axis=1)
return df | 21.058824 | 55 | 0.614525 | 49 | 358 | 4.428571 | 0.469388 | 0.101382 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003731 | 0.251397 | 358 | 17 | 56 | 21.058824 | 0.80597 | 0 | 0 | 0 | 0 | 0 | 0.050139 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.181818 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
56978a1f08405d1e9c9e366fa78cdc804de04d55 | 1,472 | py | Python | app/app.py | escofresco/makeschool_fsp2_realtweets | a1df7c54e5ef3d5a91282141389200248b40f695 | [
"MIT"
] | null | null | null | app/app.py | escofresco/makeschool_fsp2_realtweets | a1df7c54e5ef3d5a91282141389200248b40f695 | [
"MIT"
] | 9 | 2019-11-17T22:33:15.000Z | 2021-06-02T00:37:50.000Z | app/app.py | escofresco/makeschool_fsp2_realtweets | a1df7c54e5ef3d5a91282141389200248b40f695 | [
"MIT"
] | null | null | null | import marshal
from multiprocessing import Condition, Process, Queue, Pipe
import os
from threading import Timer
from types import FunctionType
import pickle
from celery import Celery
from flask import Flask, url_for
from grams.grams import Histogram
from grams.markov import MC
import time
def make_app():
def make_model():
def _make_model(corpus, n_sentences=10):
# global cv
def _generate():
markovchain = MC(corpus)
return markovchain.generate
generate = _generate()
child_conn.send(generate(n_sentences))
while True:
if parent_conn.poll():
## previously sent message got consumed
# send another
child_conn.send(generate(n_sentences))
parent_conn, child_conn = Pipe(duplex=True)
with open("res/the_adventures_of_sherlock_holmes.txt", "r") as f:
f_out = f.read()
make_process = Process(target=_make_model, args=(f_out,))
make_process.start()
return parent_conn, make_process
# init app
flask_app = Flask(__name__)
parent_conn, make_process = make_model()
@flask_app.route("/")
def home():
if parent_conn.poll():
return parent_conn.recv()
return "loading..."
return flask_app
flask_app = make_app()
if __name__ == "__main__":
flask_app.run(debug=True, port=8080)
| 23.365079 | 73 | 0.623641 | 177 | 1,472 | 4.903955 | 0.446328 | 0.069124 | 0.02765 | 0.048387 | 0.071429 | 0.071429 | 0 | 0 | 0 | 0 | 0 | 0.005769 | 0.293478 | 1,472 | 62 | 74 | 23.741935 | 0.828846 | 0.046196 | 0 | 0.102564 | 0 | 0 | 0.043634 | 0.029328 | 0 | 0 | 0 | 0 | 0 | 1 | 0.128205 | false | 0 | 0.282051 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
56a4c4a9da8e522b22406ce899d916afef096ab3 | 427 | py | Python | apps/blog/template.py | Bean-jun/PersonBlogSystemFlask | 7935dfa8e8f1a385a296267045f6f26e03fd6b18 | [
"MIT"
] | null | null | null | apps/blog/template.py | Bean-jun/PersonBlogSystemFlask | 7935dfa8e8f1a385a296267045f6f26e03fd6b18 | [
"MIT"
] | null | null | null | apps/blog/template.py | Bean-jun/PersonBlogSystemFlask | 7935dfa8e8f1a385a296267045f6f26e03fd6b18 | [
"MIT"
] | null | null | null | from flask import request
from apps.blog import home_blueprint
from apps.models import Category
@home_blueprint.app_template_global("category_navigate")
def category_navigate():
"""导航栏"""
category_obj = Category.query.all()
return category_obj
# 前端template页面可以使用这部分获取,或者直接使用request.user亦可
@home_blueprint.app_template_global("userinfo_navigate")
def userinfo_navigate():
"""导航栏头像"""
return request.user
| 23.722222 | 56 | 0.779859 | 51 | 427 | 6.27451 | 0.509804 | 0.121875 | 0.1 | 0.15 | 0.1875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.124122 | 427 | 17 | 57 | 25.117647 | 0.855615 | 0.124122 | 0 | 0 | 0 | 0 | 0.093664 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.3 | 0 | 0.7 | 0.3 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
3b0964e54ac0934e525bad71399782f8aff00bb9 | 377 | py | Python | alshamelah_api/apps/authors/models.py | devna-dev/durar-backend | 36ea29bafd4cb95098e4057eb71df211dc923008 | [
"MIT"
] | null | null | null | alshamelah_api/apps/authors/models.py | devna-dev/durar-backend | 36ea29bafd4cb95098e4057eb71df211dc923008 | [
"MIT"
] | null | null | null | alshamelah_api/apps/authors/models.py | devna-dev/durar-backend | 36ea29bafd4cb95098e4057eb71df211dc923008 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils.translation import ugettext_lazy as _
from ..core.models import BaseModel
class Author(BaseModel):
name = models.CharField(max_length=100, verbose_name=_(u'name'), null=False, blank=False)
class Meta:
verbose_name_plural = "Authors"
ordering = ['name']
def __str__(self):
return self.name
| 23.5625 | 93 | 0.702918 | 49 | 377 | 5.183673 | 0.653061 | 0.07874 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009934 | 0.198939 | 377 | 15 | 94 | 25.133333 | 0.831126 | 0 | 0 | 0 | 0 | 0 | 0.039788 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.3 | 0.1 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
3b13ffb0d8a69d11829542e3e7415101aea54667 | 394 | py | Python | 2020/2/2-1.py | jonathonball/adventofcode | 041f3bb2b1ebe3ddcb21341bf52f29512e17a326 | [
"MIT"
] | 1 | 2020-01-17T18:59:59.000Z | 2020-01-17T18:59:59.000Z | 2020/2/2-1.py | jonathonball/adventofcode | 041f3bb2b1ebe3ddcb21341bf52f29512e17a326 | [
"MIT"
] | null | null | null | 2020/2/2-1.py | jonathonball/adventofcode | 041f3bb2b1ebe3ddcb21341bf52f29512e17a326 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import sys
total_valid = 0
for line in sys.stdin:
raw = line.strip()
rules, password = raw.split(":")
ranges, character = rules.split(" ")
min_range, max_range = [ int(x) for x in ranges.split("-") ]
count = password.count(character)
if count >= min_range and count <= max_range:
total_valid += 1
print(total_valid)
| 23.176471 | 64 | 0.598985 | 53 | 394 | 4.320755 | 0.54717 | 0.131004 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010417 | 0.269036 | 394 | 16 | 65 | 24.625 | 0.784722 | 0.043147 | 0 | 0 | 0 | 0 | 0.007979 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.181818 | 0.090909 | 0 | 0.090909 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
3b1fa76083519e9576fd893b3ec3da51308b56e5 | 2,644 | py | Python | test_auth_app/backend/db_set_users.py | MalyshevValery/testweb | fec105a62b0ef4488e523e1bf3a9bde16e82bffe | [
"MIT"
] | null | null | null | test_auth_app/backend/db_set_users.py | MalyshevValery/testweb | fec105a62b0ef4488e523e1bf3a9bde16e82bffe | [
"MIT"
] | null | null | null | test_auth_app/backend/db_set_users.py | MalyshevValery/testweb | fec105a62b0ef4488e523e1bf3a9bde16e82bffe | [
"MIT"
] | null | null | null | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
__author__ = "eduard.snezhko@gmail.com"
from test_auth_app.backend import test_auth_app_db
from test_auth_app.backend.db_models import User, Role, Application
from test_auth_app.backend.db_models import get_default_roles
from uuid import uuid1
def clear_database():
users = User.query.all()
for u in users:
test_auth_app_db.session.delete(u)
roles = Role.query.all()
for r in roles:
test_auth_app_db.session.delete(r)
apps = Application.query.all()
for a in apps:
test_auth_app_db.session.delete(a)
test_auth_app_db.session.commit()
def list_users():
users = User.query.all()
for u in users:
print('\n{}:\n\t{}\n\t{}\n{}\n{}\n{}\nvalid = {}'.format(u.id, u.email, u.user_uid, u.roles, u.applications, u.password_hash, u.validated))
def list_roles():
roles = Role.query.all()
for r in roles:
print('{}:\n\t{}\n\t{}\n'.format(r.id, r.role, r.description))
def list_applications():
apps = Application.query.all()
for a in apps:
print('{}:\n\t{}\n\t{}\n'.format(a.id, a.name, a.description))
def init_prod_database():
user_1 = User(email='empty@email.org', user_uid=str(uuid1()))
role_read = Role(role='read', description='Can list, view and download cases related to a user')
role_read_all = Role(role='read_all', description='Can list, view and download all cases related to all user (as admin)')
role_edit = Role(role='edit', description='Can add new case, run case processing, clear processing results and case delete. Case is relevant to a particular user')
role_edit_all = Role(role='edit_all', description='Can run case processing, clear processing results and case delete. Case may be relevant to any user (as admin)')
app_lungs = Application(name='lungs', description='Lungs segmentation on CT images using CNNs')
app_lesions = Application(name='lesions', description='Lesions segmentation in lungs on CT images using CNNs')
# Default Anonymous user has access only to the example cases, to list them, preview and download.
# All apps are available
user_1.set_password('empty')
user_1.roles.append(role_read)
user_1.roles.append(role_edit)
user_1.set_validated(True)
user_1.applications.append(app_lungs)
user_1.applications.append(app_lesions)
test_auth_app_db.session.add(user_1)
test_auth_app_db.session.commit()
if __name__ == '__main__':
# clear_database()
# init_prod_database()
# list_users()
pass
| 34.789474 | 167 | 0.708396 | 411 | 2,644 | 4.352798 | 0.287105 | 0.044718 | 0.061487 | 0.050866 | 0.428731 | 0.329793 | 0.220235 | 0.202348 | 0.062605 | 0.062605 | 0 | 0.005005 | 0.168684 | 2,644 | 75 | 168 | 35.253333 | 0.808917 | 0.064297 | 0 | 0.28 | 0 | 0.04 | 0.256078 | 0.024311 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0.06 | 0.12 | 0 | 0.22 | 0.06 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
3b212170dc8dc576e182a858e68aeef56e1e1acd | 1,232 | py | Python | activecollab_digger/views.py | kingsdigitallab/django-activecollab-digger | 508c31eb4a3fe9887aa9d3a86ea160f3bc1e60b0 | [
"MIT"
] | null | null | null | activecollab_digger/views.py | kingsdigitallab/django-activecollab-digger | 508c31eb4a3fe9887aa9d3a86ea160f3bc1e60b0 | [
"MIT"
] | null | null | null | activecollab_digger/views.py | kingsdigitallab/django-activecollab-digger | 508c31eb4a3fe9887aa9d3a86ea160f3bc1e60b0 | [
"MIT"
] | null | null | null | from django.conf import settings
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import JsonResponse
from django.views.generic.base import TemplateView
from .activecollab import get_activecollab, post_activecollab
class IndexPageView(LoginRequiredMixin, TemplateView):
template_name = 'activecollab_digger/index.html'
def tasks(request):
if request.method == 'POST':
return _post_task(request)
return _get_tasks(request)
def _get_tasks(request):
r = get_activecollab('projects/{}/tasks'.format(settings.AC_PROJECT_ID))
if r.status_code != 200:
return JsonResponse({'error': r.status_code,
'message': r.json()['message']})
return JsonResponse(r.json())
def _post_task(request):
params = {
'name': request.POST.get('name'),
'body': request.POST.get('body'),
'created_by_id': settings.AC_USER
}
r = post_activecollab('projects/{}/tasks'.format(settings.AC_PROJECT_ID),
params=params)
if r.status_code != 200:
return JsonResponse({'error': r.status_code,
'message': r.json()['message']})
return JsonResponse(r.json())
| 27.377778 | 77 | 0.659091 | 141 | 1,232 | 5.58156 | 0.347518 | 0.050826 | 0.055909 | 0.07878 | 0.360864 | 0.360864 | 0.360864 | 0.360864 | 0.233799 | 0.233799 | 0 | 0.006244 | 0.219968 | 1,232 | 44 | 78 | 28 | 0.812695 | 0 | 0 | 0.275862 | 0 | 0 | 0.109578 | 0.024351 | 0 | 0 | 0 | 0 | 0 | 1 | 0.103448 | false | 0 | 0.172414 | 0 | 0.551724 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
3b29c188db2f818925355ab781e7bf18b7f0fea1 | 933 | py | Python | pool.py | Reno-Greenleaf/tomb | 6a76f640e523903f32c5fa178295435a24289559 | [
"MIT"
] | null | null | null | pool.py | Reno-Greenleaf/tomb | 6a76f640e523903f32c5fa178295435a24289559 | [
"MIT"
] | null | null | null | pool.py | Reno-Greenleaf/tomb | 6a76f640e523903f32c5fa178295435a24289559 | [
"MIT"
] | null | null | null | from actor import Actor, Location, Passage, Switch, Ghost
from json import load
class Pool(dict):
""" Contains ingame objects. """
def fill(self):
with open('data/actors.json', 'r') as data:
actors = load(data)
for name, properties in actors.items():
self._build(properties, name)
with open('data/space.json', 'r') as data:
self.space = load(data)
def get_rooms(self):
return self.space
def _build(self, properties, name):
actor = Actor()
actor.load(properties)
if 'io' not in properties:
self[name] = actor
return
if 'labyrinth' in properties:
actor = Location(actor)
if 'labyrinth' in properties and 'right' in properties['labyrinth']:
actor = Passage(actor)
if 'access' in properties and 'used' in properties['access']:
actor = Switch(actor)
elif 'access' in properties:
actor = Ghost(actor)
self[name] = actor | 24.552632 | 72 | 0.639871 | 121 | 933 | 4.909091 | 0.355372 | 0.141414 | 0.040404 | 0.037037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.239014 | 933 | 38 | 73 | 24.552632 | 0.83662 | 0.025723 | 0 | 0.074074 | 0 | 0 | 0.09867 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0.074074 | 0.074074 | 0.037037 | 0.296296 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
3b2f210471d573b08554a8de2790e7a4843a9bc9 | 1,072 | py | Python | tests/test_equity.py | the-Arki/portfolio-tracker | eed07936876d720d29383d315003a11e9f6a2ee9 | [
"MIT"
] | null | null | null | tests/test_equity.py | the-Arki/portfolio-tracker | eed07936876d720d29383d315003a11e9f6a2ee9 | [
"MIT"
] | null | null | null | tests/test_equity.py | the-Arki/portfolio-tracker | eed07936876d720d29383d315003a11e9f6a2ee9 | [
"MIT"
] | null | null | null | from defer import return_value
from src.stock import Equity
from src.io_manager import read_json
import pytest
equity_info = read_json('./tests/equity_info.json')
equity = Equity('MSFT')
def test__get_info(mocker):
mocker.patch('src.stock.Equity._get_info', return_value=equity_info)
assert equity._get_info() == equity_info
def test_instance_info(mocker):
mocker.patch('src.stock.Equity._get_info', return_value=equity_info)
equity = Equity('MSFT')
assert equity.info == equity_info
def test_instance_currency(mocker):
mocker.patch('src.stock.Equity._get_info', return_value=equity_info)
equity = Equity('MSFT')
assert equity.currency == equity_info['currency']
def test_instance_tradeable(mocker):
mocker.patch('src.stock.Equity._get_info', return_value=equity_info)
equity = Equity('MSFT')
assert equity.tradeable == equity_info['tradeable']
def test_tradeable():
assert equity.is_tradeable() == equity_info['tradeable']
def test_tradeable_returns_boolean():
assert isinstance(equity.is_tradeable(), bool) | 31.529412 | 72 | 0.75653 | 147 | 1,072 | 5.217687 | 0.204082 | 0.156454 | 0.084746 | 0.104302 | 0.617992 | 0.617992 | 0.542373 | 0.42764 | 0.42764 | 0.42764 | 0 | 0 | 0.125933 | 1,072 | 34 | 73 | 31.529412 | 0.81857 | 0 | 0 | 0.32 | 0 | 0 | 0.158434 | 0.119292 | 0 | 0 | 0 | 0 | 0.24 | 1 | 0.24 | false | 0 | 0.16 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b35bf097dce90dcfad194835db3744ff6739f05 | 326 | py | Python | mindefuse/strategy/swaszek/agent/agent.py | sinistro14/mindefuse | c7371a81731d0b9a03d3ef18f91c336e4135c17d | [
"MIT"
] | null | null | null | mindefuse/strategy/swaszek/agent/agent.py | sinistro14/mindefuse | c7371a81731d0b9a03d3ef18f91c336e4135c17d | [
"MIT"
] | 1 | 2019-08-22T19:51:12.000Z | 2019-08-22T19:51:12.000Z | mindefuse/strategy/swaszek/agent/agent.py | sinistro14/mindefuse | c7371a81731d0b9a03d3ef18f91c336e4135c17d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.7
from abc import ABC, abstractmethod
class Agent(ABC):
@abstractmethod
def agent_choice(self, possibilities):
"""
Returns the choice of the specific agent
:param possibilities: list of all possible solutions of which the agent will pick one
"""
pass
| 21.733333 | 93 | 0.656442 | 41 | 326 | 5.195122 | 0.707317 | 0.159624 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008439 | 0.273006 | 326 | 14 | 94 | 23.285714 | 0.890295 | 0.460123 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0.2 | 0.2 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
3b36c4ed7d21b51f5c92a0c2c42d4ae3822cf137 | 4,497 | py | Python | Python/klampt/robotcspace.py | bbgw/Klampt | 3c022da372c81646ec9f7492fad499740431d38b | [
"BSD-3-Clause"
] | null | null | null | Python/klampt/robotcspace.py | bbgw/Klampt | 3c022da372c81646ec9f7492fad499740431d38b | [
"BSD-3-Clause"
] | null | null | null | Python/klampt/robotcspace.py | bbgw/Klampt | 3c022da372c81646ec9f7492fad499740431d38b | [
"BSD-3-Clause"
] | null | null | null | import cspace
import robotsim
import robotcollide
from cspaceutils import AdaptiveCSpace
class RobotCSpace(AdaptiveCSpace):
"""A basic robot cspace that allows collision free motion.
Warning: if your robot has non-standard joints, like a free-
floating base or continuously rotating (spin) joints, you will need to
overload the sample() method."""
def __init__(self,robot,collider=None):
AdaptiveCSpace.__init__(self)
self.robot = robot
self.bound = zip(*robot.getJointLimits())
self.collider = collider
#set this to false to turn off the adaptive tester, which may
#have some overhead
self.adaptive = True
#adaptive tests
self.addFeasibleTest(lambda(x): self.inJointLimits(x),"joint limits")
#TODO explode these into individual self collision / env collision
#tests
self.addFeasibleTest(lambda(x): not self.selfCollision(),"self collision")
self.addFeasibleTest(lambda(x): not self.envCollision(),"env collision")
def sample(self):
"""Overload this to implement custom sampling strategies or to handle
non-standard joints"""
return AdaptiveCSpace.sample(self)
def feasible(self,x):
"""Feasibility test. If self.adaptive=True, uses the adaptive
feasibility tester which may speed up collision testing."""
if self.adaptive:
#Use the adaptive tester
self.robot.setConfig(x)
return AdaptiveCSpace.feasible(self,x)
#Use the regular tester
if not self.inJointLimits(x): return False
#check collisions
if self.collider:
self.robot.setConfig(x)
if self.selfCollision(): return False
if self.envCollision(): return False
return True
def inJointLimits(self,x):
"""Checks joint limits of the configuration x"""
for (xi,bi) in zip(x,self.bound):
if xi < bi[0] or xi > bi[1]:
return False
return True
def selfCollision(self):
"""Checks whether the robot at its current configuration is in
self collision"""
#This should be faster than going through the collider...
return self.robot.selfCollides()
#if not self.collider: return False
#return any(self.collider.robotSelfCollisions(self.robot.index))
def envCollision(self):
"""Checks whether the robot at its current configuration is in
collision with the environment."""
if not self.collider: return False
for o in xrange(self.collider.world.numRigidObjects()):
if any(self.collider.robotObjectCollisions(self.robot.index,o)):
return True;
for o in xrange(self.collider.world.numTerrains()):
if any(self.collider.robotTerrainCollisions(self.robot.index,o)):
return True;
return False
class ClosedLoopRobotCSpace(RobotCSpace):
"""A closed loop cspace. Allows one or more IK constraints to be
maintained during the robot's motion."""
def __init__(self,robot,iks,collider=None):
RobotCSpace.__init__self(robot,collider)
self.solver = robotsim.IKSolver(robot)
if hasattr(iks,'__iter__'):
for ik in iks:
self.solver.add(ik)
else:
self.solver.add(ik)
#IK solve iterations
self.maxIters = 100
self.tol = 1e-3
#adaptive checker
self.addFeasibleTest(lambda(x): self.closedLoop())
def sample(self):
"""Samples directly on the contact manifold"""
self.solver.sampleInitial()
(res,iters) = self.solver.solve(self.maxIters,self.tol)
return self.robot.getConfig()
def feasible(self,x):
if self.adaptive:
#Use the adaptive tester
self.robot.setConfig(x)
return AdaptiveCSpace.feasible(self,x)
if not self.inJointLimits(x): return False
self.robot.setConfig(x)
if not self.closedLoop(): return False;
if self.selfCollision(): return False
if self.envCollision(): return False
return True
def closedLoop(self,tol=None):
"""Returns true if the closed loop constraint has been met at the
robot's current configuration."""
e = self.solver.getError()
if tol==None: tol = self.tol
return max(abs(ei) for ei in e) <= tol
| 35.976 | 82 | 0.632866 | 537 | 4,497 | 5.26257 | 0.318436 | 0.041401 | 0.015924 | 0.036801 | 0.302902 | 0.255485 | 0.194621 | 0.150035 | 0.150035 | 0.150035 | 0 | 0.002155 | 0.277741 | 4,497 | 124 | 83 | 36.266129 | 0.867919 | 0.096731 | 0 | 0.375 | 0 | 0 | 0.01528 | 0 | 0 | 0 | 0 | 0.008065 | 0 | 0 | null | null | 0 | 0.055556 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b3d82864cc0776c4af3325f627956c088809c05 | 673 | py | Python | footy/test/clubs/test_club_gateway.py | bryce-klinker/hello-python | c62ac61f40c1d9fcb77dbde49161da399787d96d | [
"MIT"
] | null | null | null | footy/test/clubs/test_club_gateway.py | bryce-klinker/hello-python | c62ac61f40c1d9fcb77dbde49161da399787d96d | [
"MIT"
] | null | null | null | footy/test/clubs/test_club_gateway.py | bryce-klinker/hello-python | c62ac61f40c1d9fcb77dbde49161da399787d96d | [
"MIT"
] | null | null | null | import unittest
from nose.tools import *
from footy.test_data.test_data_paths import premier_league_2015_2016_path
from footy.src.clubs.club_gateway import ClubGateway
class ClubGatewayTest(unittest.TestCase):
def setUp(self):
self.gateway = ClubGateway(premier_league_2015_2016_path)
def test_get_all_clubs(self):
clubs = self.gateway.get_all()
self.assertEquals(20, len(clubs))
def test_get_all_clubs_includes_club_name(self):
clubs = self.gateway.get_all()
self.assertEquals("Arsenal", clubs[0].name)
self.assertEquals("Aston Villa", clubs[1].name)
self.assertEquals("Bournemouth", clubs[2].name)
| 32.047619 | 73 | 0.732541 | 91 | 673 | 5.164835 | 0.428571 | 0.051064 | 0.07234 | 0.089362 | 0.361702 | 0.178723 | 0.178723 | 0.178723 | 0 | 0 | 0 | 0.0375 | 0.167905 | 673 | 20 | 74 | 33.65 | 0.801786 | 0 | 0 | 0.133333 | 0 | 0 | 0.043091 | 0 | 0 | 0 | 0 | 0 | 0.266667 | 1 | 0.2 | false | 0 | 0.266667 | 0 | 0.533333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
3b466b48be2d2a162d6c9c6df33ff4e475906464 | 735 | py | Python | k-way_merge/k_smallest_number.py | mridulpant2010/algorithms | 1234b8a0232d6fce647e868e1057b6ec87e0b3bb | [
"Unlicense"
] | null | null | null | k-way_merge/k_smallest_number.py | mridulpant2010/algorithms | 1234b8a0232d6fce647e868e1057b6ec87e0b3bb | [
"Unlicense"
] | null | null | null | k-way_merge/k_smallest_number.py | mridulpant2010/algorithms | 1234b8a0232d6fce647e868e1057b6ec87e0b3bb | [
"Unlicense"
] | null | null | null | import heapq
def find_k_closest_numbers(lis,k,n):
he=[]
#merged=[]
heapq.heapify(he)
for i in range(len(lis)):
heapq.heappush(he,(lis[i][0],(i,0)))
#print(he,len(he))
numberCount=0
top=0
while he:
top,pos=heapq.heappop(he)
#print(top,pos)
x,y=pos
numberCount+=1
if numberCount==k:
break
if n>y+1 :
heapq.heappush(he,(lis[x][y+1],(x,y+1)))
return top
if __name__ == '__main__':
arr=[[2, 6, 8], [3, 6, 7], [1, 3, 4]]
n=len(arr)
ans=find_k_closest_numbers(arr, 5,n)
arr2=[[2, 6, 8], [3, 7, 10], [5, 8, 11]]
ans2=find_k_closest_numbers(arr2,5,n)
print(ans,ans2) | 24.5 | 53 | 0.50068 | 117 | 735 | 3 | 0.393162 | 0.042735 | 0.102564 | 0.162393 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067864 | 0.318367 | 735 | 30 | 54 | 24.5 | 0.632735 | 0.054422 | 0 | 0 | 0 | 0 | 0.012066 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0 | 0.041667 | 0 | 0.125 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b49afd725f055d1a828c47230e22c3488471dc9 | 6,617 | py | Python | abintb/spin.py | abyellow/abin-tight-binding | 538aef632937b1840d5ffd184f162858637b01f5 | [
"MIT"
] | 1 | 2018-02-22T19:13:24.000Z | 2018-02-22T19:13:24.000Z | abintb/spin.py | abyellow/abin-tight-binding | 538aef632937b1840d5ffd184f162858637b01f5 | [
"MIT"
] | null | null | null | abintb/spin.py | abyellow/abin-tight-binding | 538aef632937b1840d5ffd184f162858637b01f5 | [
"MIT"
] | 1 | 2017-09-14T17:25:09.000Z | 2017-09-14T17:25:09.000Z | import numpy as np
from time import time
#from itertools import product
import sys
import matplotlib.pyplot as plt
#from matplotlib.colors import LogNorm
from sshPES import PES
from sshIniData import SSHIniData
from sshHf import SSHHf
#from mpl_toolkits.mplot3d import Axes3D
class pseudoSpin(PES):
def integral_PES(self,PESx,PESy,lb,ub,rang,form,line = True):
w_num = self.w_num
PESy = np.array(PESy)
PESx = np.array(PESx)
pl = int((np.pi-lb)/(2*np.pi) * w_num)
pu = int((np.pi-ub)/(2*np.pi) * w_num)
pr = int((rang)/(2*np.pi) * w_num)+1
absPESy = abs(PESy)
intPESy = []
intPESx = []
if form == 'box':
for i in range(len(PESy[0,:])):
intPESy.append(np.sum(PESy[pu:pl,i],axis=0))
intPESx.append(np.sum(PESx[pu:pl,i],axis=0))
if line:
PESx[pu,i] = 100
PESy[pl,i] = 100
PESx[pl,i] = 100
PESy[pu,i] = 100
elif form == 'wave':
for i in range(len(PESy[0,:])):
ind = pu + np.argmax(absPESy[pu:pl,i])
intPESy.append(np.sum(PESy[ind-pr:ind+pr,i],axis=0))
intPESx.append(np.sum(PESx[ind-pr:ind+pr,i],axis=0))
if line:
PESx[ind-pr,i] = 100
PESy[ind-pr,i] = 100
PESx[ind+pr,i] = 100
PESy[ind+pr,i] = 100
intPESx.append(intPESx[0])
intPESy.append(intPESy[0])
return np.array(intPESx), np.array(intPESy), PESx, PESy
def phase(self,x,y):
phi = np.sign(y)*np.arccos(x/np.sqrt(x**2+y**2))
phb = phi[-1]-phi[0]
x,y = self.norm_spin(x,y,factor=False)
ox = y[1:]-y[:-1]
oy = -(x[1:]-x[:-1])
drx = (x[1:]+x[:-1])/2.
dry = (y[1:]+y[:-1])/2.
pha = np.sum(ox*drx + oy*dry)
return pha, phb
def norm_spin(self,x,y,factor=True):
fac = 1
if factor:
fac = np.linspace(1,2,len(x))
r = np.sqrt(x**2+y**2)
x, y = fac*x/r, fac*y/r
#x=np.insert(x,0,1)
#y=np.insert(y,0,0)
#x=np.insert(x,len(x),2)
#y=np.insert(y,len(y),0)
return x, y
def plot_spin(self,x,y, norm = True):
if norm:
x, y = self.norm_spin(x,y)
plt.figure()
plt.quiver(x[:-1], y[:-1], x[1:]-x[:-1], y[1:]-y[:-1], scale_units='xy', angles='xy', scale=1)
#plt.plot(x, y, 'o-',linewidth=1.5)
plt.plot(0,0,'x',markersize = 20)
plt.plot(np.linspace(-3,3,len(x)),np.zeros(len(x)),'g--')
plt.xlabel('Px')
plt.ylabel('Py')
plt.xlim([-2.1,2.1])
plt.ylim([-2.1,2.1])
#plt.title('tp = %.1f, E0 = %.1f, Freq = %.2f, dt = %.1f' %(tp0,E0,freq,deltau))
#plt.savefig('figure/sshspin_spin.png')
def phase_PES(self,PESx,PESy):
PESint = np.sqrt(PESx**2 + PESy**2)
PESphi = np.pi*(1-np.sign(PESy))/2 + np.arccos(PESx/PESint) - np.pi
print np.amax(PESphi)
return PESint*PESphi/(4*np.pi)
if __name__ == "__main__":
tp0 = 0.
E0 = 1.
freq = 3.
tau = 1.
deltau = .5
paui = 'x'
pauj = 'y'
k_num = 12*6
width = 50
h_choose = 0
band = 'mix'
option = 0
int_form = 'box'
int_lb = -1.6#0#-3.#2.0
int_ub = -.8#.8#0#-2.#3.0
int_rang = .25
int_line = False
import argparse
pa = argparse.ArgumentParser()
pa.add_argument('--f', type = float)
pa.add_argument('--E', type = float)
pa.add_argument('--dt', type = float)
pa.add_argument('--tp', type = int)
pa.add_argument('--model', type = int)
pa.add_argument('--k', type = int)
pa.add_argument('--opt', type = int)
pa.add_argument('--wid', type = int)
pa.add_argument('--lb', type = float)
pa.add_argument('--ub', type = float)
args = pa.parse_args()
if args.f:
freq = args.f
if args.E:
E0 = args.E
if args.dt:
deltau = args.dt
if args.tp:
tp0 = args.tp
if args.model:
h_choose = args.model
if args.k:
k_num = args.k
if args.opt:
option = args.opt
if args.wid:
width = args.wid
if args.lb:
int_lb = args.lb
if args.ub:
int_ub = args.ub
cond = 'Conditions: model = %d, deltau = %.2f, freq = %.2f, E0 = %.2f, tp0 = %d, knum = %d, p_width = %d'\
%(h_choose, deltau, freq, E0, tp0, k_num,width)
print cond
t_s = -200
dt = .1
std1 = 15
t_in = t_s-std1*3
#print 'width of pulse: ',width #/ np.sqrt(2)
n_tot = int(-2*t_s/dt) + int(std1*6/dt)
t_rel = (np.array(range(n_tot-1)))*dt + t_in
ctrl = np.exp(-.5*(t_rel/width)**2) * E0 * np.sin(freq*t_rel)
m_max = 10
hf = SSHHf(deltau = (-1)**(h_choose) * deltau, m_max = m_max, freq = freq, E0 = E0, phase = 2, knum=k_num)
def plot_hf():
eps = hf.eps
spec = hf.spec()
nk = k_num
for i in range(np.shape(spec)[1]):
plt.plot(eps/2.,spec[:,i],'k--',linewidth=2.)
plt.xlim([-3.15/2.,3.15/2.])
plt.ylim([-3.15,3.15])
timea = time()
init = SSHIniData(tau, deltau, ctrl, knum=k_num, dt=dt, ham_choose = h_choose, iniband= band)
cvec1 = init.clc_cvec()
PES_spin = pseudoSpin(init, cvec1, tin = t_in, E0=E0, freq=freq, std = std1, width = 1)
try:
load_i = 'data/PES2ssh_ham_%d_dt_%.2f_ktimes_%d_tp_%.1f_E0_%.1f_freq_%.2f_deltau_%.1f_paui_%s_std_%.1f_band_%s.txt'\
%(h_choose, dt,k_num,tp0,E0,freq,deltau,paui,std1,band)
PESloadi = np.loadtxt(load_i)[::-1]
print "file1 exist, loading====>"
except IOError:
print load_i
print "no such file, calculating====>"
PESloadi = PES_spin.final_run(tp = tp0, pau = paui)[::-1]
try:
load_j = 'data/PES2ssh_ham_%d_dt_%.2f_ktimes_%d_tp_%.1f_E0_%.1f_freq_%.2f_deltau_%.1f_paui_%s_std_%.1f_band_%s.txt'\
%(h_choose, dt,k_num,tp0,E0,freq,deltau,pauj,std1,band)
PESloadj = np.loadtxt(load_j)[::-1]
print "file2 exist, loading====>"
except IOError:
print load_j
print "no such file, calculating====>"
PESloadj = PES_spin.final_run(tp = tp0, pau = pauj)[::-1]
#
timea = time()
PES2Dx = PESloadi
PES2Dy = PESloadj
print 'total time:', time()-timea
if option == 1:
x, y, PES2Dx, PES2Dy = PES_spin.integral_PES(PES2Dx,PES2Dy,int_lb,int_ub,int_rang,form=int_form,line = True)
PES_spin.plot_spin(x,y)
plt.title(cond)
plt.show()
x1,y1 = PES_spin.norm_spin(x,y,factor=False)
savename = 'data/spin_ham_%d_dt_%.2f_ktimes_%d_tp_%.1f_E0_%.1f_freq_%.2f_deltau_%.1f_std_%.1f_band_%s_lb_%.1f_ub_%.1f.txt'\
%(h_choose, dt,k_num,tp0,E0,freq,deltau,std1,band,int_lb,int_ub)
np.savetxt(savename,zip(x1,y1))
pha, phb = PES_spin.phase(x,y)
ra = np.round(pha/(2*np.pi),1) %2
rb = np.round(phb/(2*np.pi),1) %2
print 'phase number: ',pha, phb, ra, rb
fig = plt.figure()
ax1 = fig.add_subplot(121)
PES_spin.plot(PES2Dx,ax1)
plot_hf()
ax2 = fig.add_subplot(122)
PES_spin.plot(PES2Dy,ax2)
plot_hf()
fig.suptitle(cond)
plt.tight_layout()
plt.savefig('figure/sshspin_pxpy.png')
plt.show()
if option == 2:
PESphase = PES_spin.phase_PES(PES2Dx,PES2Dy)
fig = plt.figure()
ax = fig.add_subplot(111)
PES_spin.plot(PESphase,ax,color='hsv')
plot_hf()
plt.tight_layout()
plt.savefig('figure/sshspin_phase.png')
plt.show()
| 25.35249 | 125 | 0.626417 | 1,230 | 6,617 | 3.226829 | 0.196748 | 0.007055 | 0.032754 | 0.015117 | 0.272865 | 0.185437 | 0.146133 | 0.091711 | 0.077601 | 0.077601 | 0 | 0.047029 | 0.170923 | 6,617 | 260 | 126 | 25.45 | 0.676449 | 0.059997 | 0 | 0.108374 | 0 | 0.019704 | 0.109372 | 0.058719 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.039409 | null | null | 0.049261 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b588b6ce0959e11ca51f7007731ed436c1a00ca | 1,923 | py | Python | tests/test_types.py | juniorcarvalho/python-btr | 3c63fe939882d719a4ebbc07685c87429782c247 | [
"MIT"
] | null | null | null | tests/test_types.py | juniorcarvalho/python-btr | 3c63fe939882d719a4ebbc07685c87429782c247 | [
"MIT"
] | null | null | null | tests/test_types.py | juniorcarvalho/python-btr | 3c63fe939882d719a4ebbc07685c87429782c247 | [
"MIT"
] | 1 | 2018-12-03T00:32:55.000Z | 2018-12-03T00:32:55.000Z | from decimal import InvalidOperation
import pytest
from binance.types import Results, Trading, format_fee
def test_results_attributes():
tr = Results(
datetime_value='2018-11-13 01:58:03',
pair='STORMBTC',
type_operation='SELL',
order_price='0.0',
order_amount='2200.0',
avg_trading_price='0.00000126',
filled='2200.0',
total='0.002772',
status='Filled'
)
assert tr.datetime_value
with pytest.raises(ValueError):
tr.datetime_value = 'a'
assert tr.pair
assert tr.type_operation
assert tr.order_price
assert tr.order_amount
assert tr.avg_trading_price
assert tr.filled
assert tr.total
assert tr.status
with pytest.raises(InvalidOperation):
tr.order_price = 'a'
with pytest.raises(InvalidOperation):
tr.order_amount = 'a'
with pytest.raises(InvalidOperation):
tr.avg_trading_price = 'a'
with pytest.raises(InvalidOperation):
tr.filled = 'a'
with pytest.raises(InvalidOperation):
tr.total = 'a'
def test_trading_attributes():
tr = Trading(
datetime_value='2018-11-13 01:58:03',
filled='0.00000126',
total='2200',
fee='0.00277200',
fee_coin='0.00141268BNB'
)
assert tr.datetime_value
assert tr.filled
assert tr.total
assert tr.fee
assert tr.fee_coin
with pytest.raises(ValueError):
tr.datetime_value = 'a'
with pytest.raises(InvalidOperation):
tr.filled = 'a'
with pytest.raises(InvalidOperation):
tr.total = 'a'
with pytest.raises(InvalidOperation):
tr.fee = 'a'
def test_format_fee():
a, b = format_fee('0.01BTC')
assert a == '0.01'
assert b == 'BTC'
a, b = format_fee('BTC')
assert a is None
assert b == 'BTC'
a, b = format_fee('0.01')
assert a == '0.01'
assert b is None
| 23.740741 | 54 | 0.620905 | 249 | 1,923 | 4.666667 | 0.208835 | 0.096386 | 0.137694 | 0.22031 | 0.525818 | 0.51463 | 0.394148 | 0.319277 | 0.141136 | 0.141136 | 0 | 0.070014 | 0.264691 | 1,923 | 80 | 55 | 24.0375 | 0.751768 | 0 | 0 | 0.41791 | 0 | 0 | 0.085283 | 0 | 0 | 0 | 0 | 0 | 0.298507 | 1 | 0.044776 | false | 0 | 0.044776 | 0 | 0.089552 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b69aec95b3206db693240793ada22d1a026d313 | 1,417 | py | Python | tests/port_tests/polygon_tests/test_equals.py | skrat/martinez | 86db48324cb50ecb52be8ab2e4278a6d5cdd562b | [
"MIT"
] | 7 | 2020-05-07T08:13:44.000Z | 2021-12-17T07:33:51.000Z | tests/port_tests/polygon_tests/test_equals.py | skrat/martinez | 86db48324cb50ecb52be8ab2e4278a6d5cdd562b | [
"MIT"
] | 17 | 2019-11-29T23:17:26.000Z | 2020-12-20T15:47:17.000Z | tests/port_tests/polygon_tests/test_equals.py | skrat/martinez | 86db48324cb50ecb52be8ab2e4278a6d5cdd562b | [
"MIT"
] | 1 | 2020-12-17T22:44:21.000Z | 2020-12-17T22:44:21.000Z | from typing import Tuple
from hypothesis import given
from tests.port_tests.hints import PortedPolygon
from tests.utils import (equivalence,
implication)
from . import strategies
@given(strategies.polygons)
def test_reflexivity(polygon: PortedPolygon) -> None:
assert polygon == polygon
@given(strategies.polygons_pairs)
def test_symmetry(polygons_pair: Tuple[PortedPolygon, PortedPolygon]) -> None:
first_polygon, second_polygon = polygons_pair
assert equivalence(first_polygon == second_polygon,
second_polygon == first_polygon)
@given(strategies.polygons_triplets)
def test_transitivity(polygons_triplet: Tuple[PortedPolygon, PortedPolygon,
PortedPolygon]) -> None:
first_polygon, second_polygon, third_polygon = polygons_triplet
assert implication(first_polygon == second_polygon
and second_polygon == third_polygon,
first_polygon == third_polygon)
@given(strategies.polygons_pairs)
def test_connection_with_inequality(polygons_pair: Tuple[PortedPolygon,
PortedPolygon]
) -> None:
first_polygon, second_polygon = polygons_pair
assert equivalence(not first_polygon == second_polygon,
first_polygon != second_polygon)
| 33.738095 | 78 | 0.661962 | 134 | 1,417 | 6.723881 | 0.261194 | 0.119867 | 0.17758 | 0.194229 | 0.449501 | 0.378468 | 0.378468 | 0.224195 | 0.224195 | 0.224195 | 0 | 0 | 0.275229 | 1,417 | 41 | 79 | 34.560976 | 0.877313 | 0 | 0 | 0.142857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 1 | 0.142857 | false | 0 | 0.178571 | 0 | 0.321429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b7429619e17d7df3d00811edac6228a84a03e82 | 12,649 | py | Python | rsvis/tools/canvas/imgcv.py | Tom-Hirschberger/DataVisualization | 1aec6a85e2af7ba62ba47e6ee93dc9a7d99c6221 | [
"MIT"
] | null | null | null | rsvis/tools/canvas/imgcv.py | Tom-Hirschberger/DataVisualization | 1aec6a85e2af7ba62ba47e6ee93dc9a7d99c6221 | [
"MIT"
] | 4 | 2020-04-28T11:53:24.000Z | 2022-03-12T00:15:30.000Z | rsvis/tools/canvas/imgcv.py | Tom-Hirschberger/DataVisualization | 1aec6a85e2af7ba62ba47e6ee93dc9a7d99c6221 | [
"MIT"
] | 2 | 2020-07-01T15:35:29.000Z | 2021-03-11T17:53:23.000Z | # ===========================================================================
# imgcv.py ----------------------------------------------------------------
# ===========================================================================
# import ------------------------------------------------------------------
# ---------------------------------------------------------------------------
import rsvis.utils.imgtools as imgtools
import rsvis.utils.logger
import logging
import numpy as np
from PIL import Image, ImageTk
from tkinter import Canvas, NW
# class -------------------------------------------------------------------
# ---------------------------------------------------------------------------
class ImgCanvas(Canvas):
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def __init__(
self,
parent,
shift=[4,4],
sensitivity = 4,
logger=None,
**kwargs
):
super(ImgCanvas, self).__init__(parent)
self.bind("<Configure>", self.resize_image)
self._mask = [None]
self._mask_alpha = [150]
self._mask_color = [[0,0,0]]
self._mask_invert = [False]
self._shift = shift
self._scale = [1.0, 1.0]
self.set_size([self.winfo_reqwidth(), self.winfo_reqheight()])
self._parent = parent
self._logger = rsvis.utils.logger.Logger(logger=logger)
# key bindings ----------------------------------------------------
self._mouse_sensitivity = 4
self._mouse_box = [0, 0, 0, 0]
self._mouse_point = [0, 0]
self._mouse_event = [0, 0]
self._mouse_img = [0, 0]
self._keys = dict()
self.bind("<Button-1>", self.mouse_button_1_pressed)
self.bind("<ButtonRelease-1>", self.mouse_button_1_released)
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def clear(self, **kwargs):
pass
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_keys(self, **kwargs):
return self._keys
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_logger(self):
return self._logger
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def resize_image(self, event):
# determine the ratio of old width/height to new width/height
event_size = [event.width, event.height] #####################
self._scale = [float(e)/s for e, s in zip(event_size, self._size)]
self.set_size(event_size)
# resize the canvas
self.config(width=self._size[0], height=self._size[1]) #################
# rescale all the objects tagged with the "all" tag
self.scale("all", 0, 0, self._scale[0], self._scale[1]) ################
self.create_image()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def resize_boxes(self, boxes, inversion=False):
scale = [float(s)/i for s, i in zip(self.get_size(), self._img_size)]
if inversion:
scale = [1/s for s in scale]
boxes = boxes if isinstance(boxes[0], list) and len(boxes[0]) !=2 else [boxes]
return [self.resize_bbox(box, scale) for box in boxes]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def resize_bbox(self, box, scale):
if len(box)==4:
return [
int(box[0]*scale[1]), int(box[1]*scale[1]),
int(box[2]*scale[0]), int(box[3]*scale[0])
]
else:
return [[int(n[0] *scale[0]), int(n[1]*scale[1])] for n in box ]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def resize_points(self, points, inversion=False):
scale = [float(s)/i for s, i in zip(self.get_size(), self._img_size)]
if inversion:
scale = [1/s for s in scale]
points = points if isinstance(points[0], list) else [points]
return [self.resize_point(point, scale) for point in points]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def resize_point(self, point, scale):
return [int(point[0]*scale[1]), int(point[1]*scale[0])]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def resize_event(self, event):
ev = [event.y, event.x]
ev[0] = ev[0] if ev[0] >= 0 else 0
ev[0] = ev[0] if ev[0] < self._img_draw.size[1] else self._img_draw.size[1]-1
ev[1] = ev[1] if ev[1] >= 0 else 0
ev[1] = ev[1] if ev[1] < self._img_draw.size[0] else self._img_draw.size[0]-1
return ev
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_event_box(self, event):
return [
min([self._mouse_point[0], self._mouse_event[0]]),
max([self._mouse_point[0], self._mouse_event[0]]),
min([self._mouse_point[1], self._mouse_event[1]]),
max([self._mouse_point[1], self._mouse_event[1]])
]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_img(self, img, clear_mask=True):
if not isinstance(img, np.ndarray):
return
self._img_size = [img.shape[1], img.shape[0]]
self._data_img = imgtools.expand_image_dim(img)
if not isinstance(img.dtype, np.uint8):
img = imgtools.project_and_stack(img, dtype=np.uint8, factor=255)
self._img = Image.fromarray(img)
if clear_mask:
self.set_mask(show=False)
self.create_image()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_mask(self, mask=None, show=True, alpha=150, color=[0,0,0], invert=False):
self._mask = mask if isinstance(mask, list) else [mask]
self._mask_alpha = alpha if isinstance(alpha, list) else [alpha]
self._mask_color = color if isinstance(color[0], list) else [color]
self._mask_invert= invert if isinstance(invert, list) else [invert]
if show:
self.create_image()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_mask(self, index=None, resize=False):
if index is None:
mask = self._mask[0]
for idx in range(1, len(self._mask)):
if isinstance(self._mask[idx], np.ndarray):
mask = np.where(np.logical_and(mask, self._mask[idx]), 1, 0).astype(np.uint8)
return mask
else:
if isinstance(self._mask[index], np.ndarray):
return np.asarray(Image.fromarray(self._mask[index]).resize(self.get_size())) if resize else self._mask[index]
else:
return self._mask[index]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def is_mouse_event(self, bbox):
if not (bbox[1]-bbox[0] > self._mouse_sensitivity and bbox[3]-bbox[2] > self._mouse_sensitivity):
return False
return True
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_img(self, show=False):
if show:
return np.asarray(self._img).copy()
return self._data_img.copy()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_size(self, size):
self._size = [s - sh for s, sh in zip(size, self._shift)]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_size(self):
return [s + sh for s, sh in zip(self._size, self._shift)]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_shape(self):
size = self.get_size()
return (size[1], size[0], 3)
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_intial_draw_image(self):
return np.zeros(self.get_shape(), dtype=np.int16) - 1
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def create_image(self, **kwargs):
self._img_draw = self._img.resize(self.get_size())
if isinstance(self._mask[0], np.ndarray):
for idx, (mask, color, alpha, invert) in enumerate(zip(self._mask, self._mask_color, self._mask_alpha, self._mask_invert)):
mask = self.get_mask(index=idx, resize=True)
mask = mask if not invert else imgtools.invert_bool_img(mask)
mask = Image.fromarray(
imgtools.get_transparent_image(
imgtools.bool_to_img(mask, value=-1, dtype=np.int16, color=color, factor=255),
value=alpha
)
)
self._img_draw.paste(mask, (0, 0), mask)
image = Image.fromarray(
imgtools.get_transparent_image(self.draw_image(), value=200))
self._img_draw.paste(image, (0, 0), image)
self._img_canvas = ImageTk.PhotoImage(image=self._img_draw)
self._img_on_canvas = super(ImgCanvas, self).create_image(0, 0, image=self._img_canvas, anchor=NW)
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def draw_image(self, **kwargs):
img_assembly = self.get_intial_draw_image()
return img_assembly
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def mouse_button_1_pressed(self, event):
self.focus_set()
self._mouse_event = self.resize_event(event)
self._mouse_point = [self._mouse_event[0], self._mouse_event[1]]
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_roi(self):
if sum(self._mouse_box):
roi_xy = self.resize_boxes(self._mouse_box, inversion=True)[0]
roi = [roi_xy[2], roi_xy[0], roi_xy[3]-roi_xy[2], roi_xy[1]-roi_xy[0]]
else:
roi = [0, 0, self._data_img.shape[1]-1, self._data_img.shape[0]-1]
return roi
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def mouse_button_1_released(self, event):
self.focus_set()
self._mouse_event = self.resize_event(event)
self._mouse_box = self.get_event_box(event)
self._mouse_img = self.resize_points(self._mouse_event, inversion=True)[0]
self._logger("[MOUSE] Pixel: {}, Value: {}".format(self._mouse_img,
self._data_img[self._mouse_img[0], self._mouse_img[1], :]
)
) | 43.920139 | 135 | 0.386908 | 1,164 | 12,649 | 3.99055 | 0.131443 | 0.058127 | 0.03014 | 0.012917 | 0.227987 | 0.152637 | 0.117115 | 0.09085 | 0.065016 | 0.065016 | 0 | 0.015315 | 0.215353 | 12,649 | 288 | 136 | 43.920139 | 0.452695 | 0.342161 | 0 | 0.122807 | 0 | 0 | 0.00853 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.146199 | false | 0.005848 | 0.035088 | 0.035088 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b7cf7fe88491f39d41118c5372b2efff2e8da58 | 12,343 | py | Python | doclabel/core/models.py | sondh0127/doclabel | 2cadea9fc925435aea49ac0b56c29474664ade4e | [
"MIT"
] | null | null | null | doclabel/core/models.py | sondh0127/doclabel | 2cadea9fc925435aea49ac0b56c29474664ade4e | [
"MIT"
] | null | null | null | doclabel/core/models.py | sondh0127/doclabel | 2cadea9fc925435aea49ac0b56c29474664ade4e | [
"MIT"
] | null | null | null | import string
import os
from django.conf import settings
from django.dispatch import receiver
from django.db.models.signals import post_save, pre_delete, post_delete
from django.db import models
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.contrib.postgres.fields import JSONField
from django.core.files.storage import FileSystemStorage
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.exceptions import ValidationError
from polymorphic.models import PolymorphicModel
from .managers import AnnotationManager, Seq2seqAnnotationManager
User = get_user_model()
DOCUMENT_CLASSIFICATION = "TextClassificationProject"
SEQUENCE_LABELING = "SequenceLabelingProject"
SEQ2SEQ = "Seq2seqProject"
PDF_LABELING = "PdfLabelingProject"
PROJECT_CHOICES = (
(DOCUMENT_CLASSIFICATION, "Document Classification"),
(SEQUENCE_LABELING, "Sequence Labeling"),
(SEQ2SEQ, "Sequence to Sequence"),
(PDF_LABELING, "PDF Labeling Project"),
)
# Project
class Project(PolymorphicModel):
name = models.CharField(max_length=100, unique=True)
description = models.TextField(default="")
guideline = models.TextField(default="")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
users = models.ManyToManyField(User, related_name="projects")
project_type = models.CharField(max_length=30, choices=PROJECT_CHOICES, null=False)
annotator_per_example = models.IntegerField(default=3)
randomize_document_order = models.BooleanField(default=False)
# Allow see annotation from other user
collaborative_annotation = models.BooleanField(default=False)
public = models.BooleanField(default=False)
def get_absolute_url(self):
return reverse("upload", args=[self.id])
@property
def image(self):
raise NotImplementedError()
def get_annotation_serializer(self):
raise NotImplementedError()
def get_annotation_class(self):
raise NotImplementedError()
def get_storage(self, data):
raise NotImplementedError()
def __str__(self):
return self.name
class TextClassificationProject(Project):
@property
def image(self):
return staticfiles_storage.url("images/cats/text_classification.jpg")
def get_annotation_serializer(self):
from doclabel.core.serializers import DocumentAnnotationSerializer
return DocumentAnnotationSerializer
def get_annotation_class(self):
return DocumentAnnotation
def get_storage(self, data):
from .utils import ClassificationStorage
return ClassificationStorage(data, self)
class SequenceLabelingProject(Project):
@property
def image(self):
return staticfiles_storage.url("images/cats/sequence_labeling.jpg")
def get_annotation_serializer(self):
from .serializers import SequenceAnnotationSerializer
return SequenceAnnotationSerializer
def get_annotation_class(self):
return SequenceAnnotation
def get_storage(self, data):
from .utils import SequenceLabelingStorage
return SequenceLabelingStorage(data, self)
class Seq2seqProject(Project):
@property
def image(self):
return staticfiles_storage.url("images/cats/seq2seq.jpg")
def get_annotation_serializer(self):
from .serializers import Seq2seqAnnotationSerializer
return Seq2seqAnnotationSerializer
def get_annotation_class(self):
return Seq2seqAnnotation
def get_storage(self, data):
from .utils import Seq2seqStorage
return Seq2seqStorage(data, self)
class PdfLabelingProject(Project):
@property
def image(self):
return staticfiles_storage.url("images/cats/pdf_labeling.jpg")
def get_annotation_serializer(self):
from .serializers import PdfAnnotationSerializer
return PdfAnnotationSerializer
def get_annotation_class(self):
return PdfAnnotation
def get_storage(self, data):
from .utils import PdfLabelingStorage
return PdfLabelingStorage(data, self)
# Label
class Label(models.Model):
PREFIX_KEYS = (("ctrl", "ctrl"), ("shift", "shift"), ("ctrl shift", "ctrl shift"))
SUFFIX_KEYS = tuple((c, c) for c in string.ascii_lowercase)
text = models.CharField(max_length=100)
prefix_key = models.CharField(
max_length=10, blank=True, null=True, choices=PREFIX_KEYS
)
suffix_key = models.CharField(
max_length=1, blank=True, null=True, choices=SUFFIX_KEYS
)
project = models.ForeignKey(
Project, related_name="labels", on_delete=models.CASCADE
)
background_color = models.CharField(max_length=7, default="#209cee")
text_color = models.CharField(max_length=7, default="#ffffff")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.text
def clean(self):
# Don't allow shortcut key not to have a suffix key.
if self.prefix_key and not self.suffix_key:
raise ValidationError("Shortcut key may not have a suffix key.")
# each shortcut (prefix key + suffix key) can only be assigned to one label
if self.suffix_key or self.prefix_key:
other_labels = self.project.labels.exclude(id=self.id)
if other_labels.filter(
suffix_key=self.suffix_key, prefix_key=self.prefix_key
).exists():
raise ValidationError(
"A label with this shortcut already exists in the project"
)
super().clean()
class Meta:
unique_together = (("project", "text"),)
# Dataset
class Document(models.Model):
# text content or pdf content
text = models.TextField()
project = models.ForeignKey(
Project, related_name="documents", on_delete=models.CASCADE
)
meta = models.TextField(default="{}")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
annotations_approved_by = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True
)
def __str__(self):
return self.text[:50]
# Annotation
class Annotation(models.Model):
objects = AnnotationManager()
prob = models.FloatField(default=0.0)
manual = models.BooleanField(default=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
# user confirm finish task set
finished = models.BooleanField(default=False)
class Meta:
abstract = True
class DocumentAnnotation(Annotation):
document = models.ForeignKey(
Document, related_name="doc_annotations", on_delete=models.CASCADE
)
label = models.ForeignKey(Label, on_delete=models.CASCADE)
class Meta:
unique_together = (("document", "user", "label"),)
class SequenceAnnotation(Annotation):
document = models.ForeignKey(
Document, related_name="seq_annotations", on_delete=models.CASCADE
)
label = models.ForeignKey(Label, on_delete=models.CASCADE)
start_offset = models.IntegerField()
end_offset = models.IntegerField()
tokens = JSONField()
def clean(self):
if self.start_offset >= self.end_offset:
raise ValidationError("start_offset is after end_offset")
class Meta:
unique_together = (("document", "user", "label", "start_offset", "end_offset"),)
class Seq2seqAnnotation(Annotation):
# Override AnnotationManager for custom functionality
objects = Seq2seqAnnotationManager()
document = models.ForeignKey(
Document, related_name="seq2seq_annotations", on_delete=models.CASCADE
)
text = models.CharField(max_length=500)
class Meta:
unique_together = (("document", "user", "text"),)
class PdfAnnotation(Annotation):
document = models.ForeignKey(
Document, related_name="pdf_annotations", on_delete=models.CASCADE
)
label = models.ForeignKey(Label, on_delete=models.CASCADE)
content = JSONField()
position = JSONField()
class Meta:
unique_together = (("document", "user", "label", "content", "position"),)
class Role(models.Model):
name = models.CharField(max_length=100, unique=True)
description = models.TextField(default="")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class RoleMapping(models.Model):
user = models.ForeignKey(
User, related_name="role_mappings", on_delete=models.CASCADE
)
project = models.ForeignKey(
Project, related_name="role_mappings", on_delete=models.CASCADE
)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def clean(self):
other_rolemappings = self.project.role_mappings.exclude(id=self.id)
if other_rolemappings.filter(user=self.user, project=self.project).exists():
raise ValidationError(
"This user is already assigned to a role in this project."
)
class Meta:
unique_together = ("user", "project", "role")
@receiver(post_save, sender=RoleMapping)
def add_linked_project(sender, instance, created, **kwargs):
if not created:
return
userInstance = instance.user
projectInstance = instance.project
isAnnotator = instance.role.name == settings.ROLE_ANNOTATOR
if userInstance and projectInstance and isAnnotator:
user = User.objects.get(pk=userInstance.pk)
project = Project.objects.get(pk=projectInstance.pk)
user.projects.add(project)
user.save()
@receiver(post_save)
def add_superusers_to_project(sender, instance, created, **kwargs):
if not created:
return
if sender not in Project.__subclasses__():
return
superusers = User.objects.filter(is_superuser=True)
admin_role = Role.objects.filter(name=settings.ROLE_PROJECT_ADMIN).first()
if superusers and admin_role:
RoleMapping.objects.bulk_create(
[
RoleMapping(
role_id=admin_role.id, user_id=superuser.id, project_id=instance.id
)
for superuser in superusers
]
)
@receiver(post_save, sender=User)
def add_new_superuser_to_projects(sender, instance, created, **kwargs):
if created and instance.is_superuser:
admin_role = Role.objects.filter(name=settings.ROLE_PROJECT_ADMIN).first()
projects = Project.objects.all()
if admin_role and projects:
RoleMapping.objects.bulk_create(
[
RoleMapping(
role_id=admin_role.id,
user_id=instance.id,
project_id=project.id,
)
for project in projects
]
)
@receiver(pre_delete, sender=RoleMapping)
def delete_linked_project(sender, instance, using, **kwargs):
userInstance = instance.user
projectInstance = instance.project
isAnnotator = instance.role.name == settings.ROLE_ANNOTATOR
if userInstance and projectInstance and isAnnotator:
user = User.objects.get(pk=userInstance.pk)
project = Project.objects.get(pk=projectInstance.pk)
user.projects.remove(project)
user.save()
@receiver(post_delete, sender=Document)
def delete_file_on_remove(sender, instance, **kwargs):
file = instance.text
fs = FileSystemStorage(location=settings.MEDIA_ROOT + "/pdf_documents/")
if fs.exists(file):
fs.delete(file)
@receiver(post_delete, sender=PdfAnnotation)
def delete_anno_on_remove(sender, instance, **kwargs):
content = instance.content
if "image" in content:
doc = instance.document
fs = FileSystemStorage(
location=settings.MEDIA_ROOT + "/pdf_annotations/doc_" + str(doc.id) + "/"
)
if fs.exists(content["image"]):
fs.delete(content["image"])
| 31.894057 | 88 | 0.694969 | 1,369 | 12,343 | 6.09569 | 0.17385 | 0.011504 | 0.023487 | 0.032714 | 0.47166 | 0.424206 | 0.36429 | 0.297903 | 0.270102 | 0.251168 | 0 | 0.004217 | 0.212347 | 12,343 | 386 | 89 | 31.976684 | 0.854145 | 0.024548 | 0 | 0.341463 | 0 | 0 | 0.067919 | 0.015629 | 0 | 0 | 0 | 0 | 0 | 1 | 0.118467 | false | 0 | 0.076655 | 0.045296 | 0.550523 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
3b979d74f6e8f31c561e6e8ceb34cc6c5fafe859 | 3,197 | py | Python | config/HttpdParser.py | automicus/OpenAlarm | 68bcc4aa75338e8d5a2ff423029f7c4064d2b9c8 | [
"Apache-2.0"
] | 1 | 2016-03-30T05:41:19.000Z | 2016-03-30T05:41:19.000Z | config/HttpdParser.py | automicus/OpenAlarm | 68bcc4aa75338e8d5a2ff423029f7c4064d2b9c8 | [
"Apache-2.0"
] | null | null | null | config/HttpdParser.py | automicus/OpenAlarm | 68bcc4aa75338e8d5a2ff423029f7c4064d2b9c8 | [
"Apache-2.0"
] | null | null | null | from HTMLParser import HTMLParser
class HttpdParser(HTMLParser):
def __init_var__(self):
# create status variables
self._inHttpd = None
self._inKey = None
self.httpd = None
def read(self, fname):
self.__init_var__()
cache = open(fname).read()
self.feed(cache)
out = self.httpd
self.__init_var__()
return out
# Default Syntax Handlers
def handle_starttag(self, tag, attrs):
#print 'START: ' + str(tag)
if tag == 'httpd':
self.httpd_start()
elif tag == 'ip':
self.ip_start()
elif tag == 'port':
self.port_start()
else:
raise FormatError(self.getpos())
def handle_endtag(self, tag):
#print 'END: ' + str(tag)
if tag == 'httpd':
self.httpd_end()
elif tag == 'ip':
self.ip_end()
elif tag == 'port':
self.port_end()
else:
raise FormatError(self.getpos())
def handle_data(self, data):
data = data.strip()
if len(data) > 0:
#print 'DATA: ' + str(data)
if self._inHttpd:
self.httpd_data(data)
else:
raise FormatError(self.getpos())
# HTTPD tag handlers
def httpd_start(self):
if self._inHttpd is None:
self._inHttpd = {'ip': None, 'port': None}
else:
raise FormatError(self.getpos())
def httpd_end(self):
if self._inHttpd is not None and self._inKey is None:
if self._inHttpd['ip'] is not None \
and self._inHttpd['port'] is not None:
self.httpd = self._inHttpd
self._inHttpd = None
else:
raise FormatError(self.getpos())
else:
raise FormatError(self.getpos())
def httpd_data(self, data):
if self._inKey is not None:
self._inHttpd[self._inKey] = data
else:
raise FormatError(self.getpos())
# IP tag handlers
def ip_start(self):
if self._inHttpd is not None and self._inKey is None:
self._inKey = 'ip'
else:
raise FormatError(self.getpos())
def ip_end(self):
if self._inKey == 'ip' and self._inHttpd['ip'] is not None:
self._inKey = None
else:
raise FormatError(self.getpos())
# PORT tag handlers
def port_start(self):
if self._inHttpd is not None and self._inKey is None:
self._inKey = 'port'
else:
raise FormatError(self.getpos())
def port_end(self):
if self._inKey == 'port' and self._inHttpd['port'] is not None:
self._inKey = None
else:
raise FormatError(self.getpos())
class FormatError(Exception):
def __init__(self, pos):
super(FormatError, self).__init__()
self.pos = pos
def __str__(self):
return "Formatting error at: " + str(self.pos)
def readHttpd(fname):
parser = HttpdParser()
return parser.read(fname)
if __name__ == "__main__":
print readHttpd('../httpd.conf')
| 26.865546 | 71 | 0.541758 | 369 | 3,197 | 4.482385 | 0.165312 | 0.093108 | 0.133011 | 0.159613 | 0.515115 | 0.452237 | 0.312576 | 0.189238 | 0.159613 | 0.159613 | 0 | 0.000481 | 0.34939 | 3,197 | 118 | 72 | 27.09322 | 0.794712 | 0.055052 | 0 | 0.426966 | 0 | 0 | 0.031209 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.011236 | null | null | 0.011236 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3b9fb3dac066072f489c406ee52505f3cf651095 | 14,898 | py | Python | Script_Kali_Machine/DownloadTools_Old.py | manesec/tools4me | d6f0e41aabc3dc4fdc1ff8ad8bebaf578fdbe69e | [
"MIT"
] | null | null | null | Script_Kali_Machine/DownloadTools_Old.py | manesec/tools4me | d6f0e41aabc3dc4fdc1ff8ad8bebaf578fdbe69e | [
"MIT"
] | null | null | null | Script_Kali_Machine/DownloadTools_Old.py | manesec/tools4me | d6f0e41aabc3dc4fdc1ff8ad8bebaf578fdbe69e | [
"MIT"
] | 2 | 2022-02-09T07:30:12.000Z | 2022-03-06T08:00:22.000Z | # List slow update ..
Optional_Installation = {
# Update ExploitDB
"EXPLOITDB" : True,
# Install ZAP
# It have some bug in old kali linux, if you are running old kali linux please disable it.
"ZAP" : False,
# Install DBeaver
"DBEAVER" : True,
# Install Big Webshell Collection ~ 0.84 GByte
# I think kali buildin webshell just enough.
# URL: https://github.com/tennc/webshell.git
"BIG_WEBSHELL" : False,
# Install Dictionary-Of-Pentesting (like seclist) ~ 1.2 GByte
"DOP" : False,
}
#######################################################################
################################# END #################################
#######################################################################
print("""
▄▀▀▄ ▄▀▄ ▄▀▀█▄ ▄▀▀▄ ▀▄ ▄▀▀█▄▄▄▄ ▄▀▀▀▀▄ ▄▀▀█▄▄▄▄ ▄▀▄▄▄▄
█ █ ▀ █ ▐ ▄▀ ▀▄ █ █ █ █ ▐ ▄▀ ▐ █ █ ▐ ▐ ▄▀ ▐ █ █ ▌
▐ █ █ █▄▄▄█ ▐ █ ▀█ █▄▄▄▄▄ ▀▄ █▄▄▄▄▄ ▐ █
█ █ ▄▀ █ █ █ █ ▌ ▀▄ █ █ ▌ █
▄▀ ▄▀ █ ▄▀ ▄▀ █ ▄▀▄▄▄▄ █▀▀▀ ▄▀▄▄▄▄ ▄▀▄▄▄▄▀
█ █ ▐ ▐ █ ▐ █ ▐ ▐ █ ▐ █ ▐
▐ ▐ ▐ ▐ ▐ ▐
Download Tools on AMD64 - Tools4me by Mane.
Version: 20220301
https://github.com/manesec/tools4me
---------------------------------------------------------------""")
import os
# Mkdir
os.system("rm -rf Linux")
os.system("rm -rf Windows")
os.system("rm -rf Tools")
os.system("rm -rf Additions")
os.system("rm -rf Wordlists")
os.system("rm -rf Tools4mane")
os.mkdir("Linux")
os.mkdir("Windows")
os.mkdir("Tools")
os.mkdir("Additions")
os.mkdir("Wordlists")
print(" :: Apt pre-install ::")
os.system("sudo apt update && sudo apt -y install python3-pip neo4j gobuster zaproxy hashcat nikto feroxbuster")
print("---------------------------------------------------------------")
if Optional_Installation["EXPLOITDB"]:
print(" :: Updating ExploitDB And MSF ::")
os.system("sudo apt update && sudo apt -y install exploitdb metasploit-framework && sudo searchsploit -u")
os.system("sudo msfdb reinit")
print("---------------------------------------------------------------")
if Optional_Installation["ZAP"] :
print(" :: Setting up zaproxy ::")
print("[!] Set up for zaproxy it need to take a long time.")
import pexpect,time
print("[>] Installing all additions ...")
zap = pexpect.spawn('zaproxy -addoninstallall -daemon -port 12345',timeout=60*10)
zap.expect("ZAP is now listening")
print(" - Waiting for 5 second ...")
time.sleep(5)
zap.kill(9)
print("[>] Updating all additions ...")
zap = pexpect.spawn('zaproxy -addonupdate -daemon -port 12345',timeout=60*10)
zap.expect("ZAP is now listening")
print(" - Waiting for 5 second ...")
time.sleep(5)
zap.kill(9)
print("[>] Uninstall Non-compatible additions ...")
zap = pexpect.spawn('zaproxy -addonuninstall browserView -daemon -port 12345',timeout=60*10)
zap.expect("ZAP is now listening")
print(" - Waiting for 5 second ...")
time.sleep(5)
zap.kill(9)
if Optional_Installation["DBEAVER"]:
print(" :: Setting up DBeaver ::")
os.mkdir("tmp")
os.chdir("tmp")
os.system("wget https://github.com/dbeaver/dbeaver/releases/download/21.3.5/dbeaver-ce_21.3.5_amd64.deb -O dbeaver.deb")
os.system("sudo dpkg -i dbeaver.deb")
os.chdir("..")
os.system("rm -rf tmp")
print("---------------------------------------------------------------")
print(" :: pip pre install ::")
print("[>] Getting pwncat-cs ...")
os.system("sudo pip3 install pwncat-cs")
print("---------------------------------------------------------------")
print(" :: Installing Tools ::")
print("[>] Getting tools4mane ...")
os.system("git clone https://github.com/manesec/tools4mane.git Tools4mane")
print("[>] Getting nmapAutomator ...")
os.system("wget https://raw.githubusercontent.com/21y4d/nmapAutomator/master/nmapAutomator.sh --quiet -O Tools/nmapAutomator.sh")
print("[>] Getting Godzilla ...")
os.system("wget https://github.com/BeichenDream/Godzilla/releases/latest/download/godzilla.jar --quiet -O Tools/godzilla.jar")
print("[>] Getting Chisel ...")
os.chdir("Tools")
os.mkdir("Chisel")
os.chdir("Chisel")
os.system("wget https://github.com/jpillora/chisel/releases/latest/download/chisel_1.7.7_linux_386.gz --quiet -O chisel_linux_386.gz")
os.system("wget https://github.com/jpillora/chisel/releases/latest/download/chisel_1.7.7_linux_amd64.gz --quiet -O chisel_linux_amd64.gz")
os.system("wget https://github.com/jpillora/chisel/releases/latest/download/chisel_1.7.7_windows_386.gz --quiet -O chisel_windows_386.gz")
os.system("wget https://github.com/jpillora/chisel/releases/latest/download/chisel_1.7.7_windows_amd64.gz --quiet -O chisel_windows_amd64.gz")
os.system("gzip -d *.gz")
os.chdir("..")
os.chdir("..")
print("---------------------------------------------------------------")
print(" :: Installing For Windows Tools ::")
print("[>] Getting Beroot ...")
os.chdir("Windows")
os.system("wget https://github.com/AlessandroZ/BeRoot/releases/latest/download/beRoot.zip -O beRoot.zip --quiet")
os.system("unzip beRoot.zip")
os.system("rm -r beRoot.zip")
os.chdir("..")
print("[>] Getting BloodHound ...")
os.chdir("Windows")
os.system("wget https://github.com/BloodHoundAD/BloodHound/releases/latest/download/BloodHound-linux-x64.zip -O BloodHound-linux-x64.zip")
os.system("unzip BloodHound-linux-x64.zip")
os.system("rm -rf BloodHound-linux-x64.zip")
os.chdir("..")
print("[>] Getting PowerSploit ...")
os.chdir("Windows")
os.system("git clone https://github.com/PowerShellMafia/PowerSploit.git PowerSoloit_dev -b dev")
os.system("git clone https://github.com/PowerShellMafia/PowerSploit.git PowerSoloit_master -b master")
os.chdir("..")
print("[>] Getting Evil-winrm ...")
os.chdir("Windows")
os.system("git clone https://github.com/Hackplayers/evil-winrm.git Evil-winrm")
os.system("sudo gem install evil-winrm")
os.chdir("..")
print("[>] Getting Nishang ...")
os.chdir("Windows")
os.system("git clone https://github.com/samratashok/nishang.git Nishang")
os.chdir("..")
print("[>] Getting RedTeamPowershellScripts ...")
os.chdir("Windows")
os.system("git clone https://github.com/Mr-Un1k0d3r/RedTeamPowershellScripts.git RedTeamPowershellScripts")
os.chdir("..")
print("[>] Getting gosecretsdump ...")
os.chdir("Windows")
os.system("wget https://github.com/C-Sto/gosecretsdump/releases/download/v0.3.1/gosecretsdump_win_v0.3.1.exe --quiet")
os.chdir("..")
print("[>] Getting python Impacket ...")
os.chdir("Windows")
os.system("git clone https://github.com/SecureAuthCorp/impacket.git Impacket")
os.chdir("Impacket")
os.system("pip3 install .")
os.chdir("..")
os.chdir("..")
print("[>] Getting WinPEAS ...")
os.chdir("Windows")
os.mkdir("WinPEAS")
os.chdir("WinPEAS")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/winPEAS.bat --quiet -O winPEAS.bat")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/winPEASany.exe --quiet -O winPEASany.exe")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/winPEASany_ofs.exe --quiet -O winPEASany_ofs.exe")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/winPEASx64.exe --quiet -O winPEASx64.exe")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/winPEASx64_ofs.exe --quiet -O winPEASx64_ofs.exe")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/winPEASx86.exe --quiet -O winPEASx86.exe")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/winPEASx86_ofs.exe --quiet -O winPEASx86_ofs.exe")
os.chdir("..")
os.chdir("..")
print("[>] Getting Mimikatz ...")
os.mkdir("tmp")
os.chdir("tmp")
os.system("wget https://github.com/gentilkiwi/mimikatz/releases/latest/download/mimikatz_trunk.zip --quiet -O mimikatz.zip")
os.system("unzip mimikatz.zip")
os.system("rm mimikatz.zip")
os.chdir("..")
os.system("mv tmp Windows/Mimikatz")
print("[>] Getting AD Collector ...")
os.chdir("Windows")
os.system("wget https://github.com/dev-2null/ADCollector/releases/download/Release/ADCollector.exe --quiet")
os.chdir("..")
print("[>] Getting ADACLScanner ...")
os.chdir("Windows")
os.system("wget https://github.com/canix1/ADACLScanner/releases/latest/download/ADACLScan.ps1 --quiet")
os.chdir("..")
print("[>] Getting WinPwn ...")
os.chdir("Windows")
os.system("wget https://github.com/S3cur3Th1sSh1t/WinPwn/releases/latest/download/WinPwn.exe --quiet")
os.system("wget https://github.com/S3cur3Th1sSh1t/WinPwn/releases/latest/download/WinPwn.ps1 --quiet")
os.chdir("..")
print("[>] Getting juicy-potato ...")
os.chdir("Windows")
os.system("wget https://github.com/ohpe/juicy-potato/releases/latest/download/JuicyPotato.exe --quiet")
os.chdir("..")
print("[>] Getting Lovely-Potato ...")
os.chdir("Windows")
os.system("git clone https://github.com/TsukiCTF/Lovely-Potato.git")
os.chdir("..")
print("[>] Getting PowerUpSQL ...")
os.chdir("Windows")
os.system("wget https://raw.githubusercontent.com/NetSPI/PowerUpSQL/master/PowerUpSQL.ps1 -O PowerUpSQL.ps1 --quiet")
os.chdir("..")
print("[>] Getting kerbrute ...")
os.chdir("Windows")
os.system("git clone https://github.com/TarlogicSecurity/kerbrute")
os.chdir("kerbrute")
os.system("pip3 install -r requirements.txt")
os.chdir("..")
os.chdir("..")
print("[>] Getting Sharp Collection ...")
os.chdir("Windows")
os.system("git clone https://github.com/Flangvik/SharpCollection.git SharpCollection")
os.chdir("..")
print("[>] Getting Sharp ADModule ...")
os.chdir("Windows")
os.system("git clone https://github.com/samratashok/ADModule.git ADModule")
os.chdir("..")
print("[>] Getting ADCS.ps1 ...")
os.chdir("Windows")
os.system("wget https://raw.githubusercontent.com/cfalta/PoshADCS/master/ADCS.ps1 -O ADCS.ps1 --quiet")
os.chdir("..")
print("[>] Getting Privesc.ps1 ...")
os.chdir("Windows")
os.system("wget https://raw.githubusercontent.com/enjoiz/Privesc/master/privesc.ps1 -O Privesc.ps1 --quiet")
os.chdir("..")
print("[>] Getting SharpView ...")
os.chdir("Windows")
os.system("wget https://github.com/tevora-threat/SharpView/raw/master/Compiled/SharpView.exe -O SharpView.exe --quiet")
os.chdir("..")
print("[>] Getting NetSPI PowerShell Scripts ...")
os.chdir("Windows")
os.system("git clone https://github.com/NetSPI/PowerShell.git NetSPIPowerShell")
os.chdir("..")
print("---------------------------------------------------------------")
print(" :: Installing For Linux Tools ::")
print("[>] Getting pspy ...")
os.chdir("Linux")
os.mkdir("Pspy")
os.chdir("Pspy")
os.system("wget https://github.com/DominicBreuker/pspy/releases/latest/download/pspy32 --quiet -O pspy32")
os.system("wget https://github.com/DominicBreuker/pspy/releases/latest/download/pspy64 --quiet -O pspy64")
os.system("wget https://github.com/DominicBreuker/pspy/releases/latest/download/pspy32s --quiet -O pspy32s")
os.system("wget https://github.com/DominicBreuker/pspy/releases/latest/download/pspy64s --quiet -O pspy64s")
os.chdir("..")
os.chdir("..")
print("[>] Getting LinPEAS ...")
os.system("wget https://github.com/carlospolop/PEASS-ng/releases/latest/download/linpeas.sh --quiet -O Linux/linpeas.sh")
print("[>] Getting LinuxSmartEnumeration ...")
os.system("wget https://raw.githubusercontent.com/diego-treitos/linux-smart-enumeration/master/lse.sh --quiet -O Linux/lse.sh")
print("[>] Getting LinEnum ...")
os.system("wget https://raw.githubusercontent.com/rebootuser/LinEnum/master/LinEnum.sh --quiet -O Linux/LinEnum.sh")
print("[>] Getting unix-privesc-check ...")
os.chdir("Linux")
os.system("git clone https://github.com/pentestmonkey/unix-privesc-check.git Unix-privesc-check")
os.system("tar -cf Unix-privesc-check.tar.gz Unix-privesc-check")
os.system("rm -rf Unix-privesc-check")
os.chdir("..")
print("[>] Getting SUDO_KILLER ...")
os.chdir("Linux")
os.system("git clone https://github.com/TH3xACE/SUDO_KILLER.git Sudo_Killer")
os.system("tar -cf Sudo_Killer.tar.gz Sudo_Killer")
os.system("rm -rf Sudo_Killer")
os.chdir("..")
print("---------------------------------------------------------------")
print(" :: Installing Additions Tools ::")
if Optional_Installation["BIG_WEBSHELL"] :
print("[>] Getting Big Webshell Collection ...")
os.chdir("Additions")
os.system("git clone https://github.com/tennc/webshell.git")
os.chdir("webshell")
os.system("git submodule update --init --recursive")
os.chdir("..")
os.chdir("..")
print("[>] Getting Hack-browser ...")
os.chdir("Additions")
os.mkdir("Hack-browser")
os.chdir("Hack-browser")
os.system("wget https://github.com/moonD4rk/HackBrowserData/releases/latest/download/hack-browser-data--linux-amd64.zip --quiet ")
os.system("wget https://github.com/moonD4rk/HackBrowserData/releases/latest/download/hack-browser-data--linux-386.zip --quiet ")
os.system("wget https://github.com/moonD4rk/HackBrowserData/releases/latest/download/hack-browser-data--windows-32bit.zip --quiet ")
os.system("wget https://github.com/moonD4rk/HackBrowserData/releases/latest/download/hack-browser-data--windows-64bit.zip --quiet ")
os.system("unzip hack-browser-data--linux-amd64.zip")
os.system("unzip hack-browser-data--linux-386.zip")
os.system("unzip hack-browser-data--windows-32bit.zip")
os.system("unzip hack-browser-data--windows-64bit.zip")
os.remove("hack-browser-data--linux-amd64.zip")
os.remove("hack-browser-data--linux-386.zip")
os.remove("hack-browser-data--windows-32bit.zip")
os.remove("hack-browser-data--windows-64bit.zip")
os.chdir("..")
os.chdir("..")
print("---------------------------------------------------------------")
print(" :: Installing Wordlists ::")
print("[>] Getting secLists ...")
os.chdir("Wordlists")
os.system("git clone https://github.com/danielmiessler/SecLists.git")
os.chdir("..")
print("[>] Getting Auto_Wordlists ...")
os.chdir("Wordlists")
os.system("git clone https://github.com/carlospolop/Auto_Wordlists.git")
os.chdir("..")
print("[>] Getting rockyou.txt ...")
os.system("wget https://github.com/brannondorsey/naive-hashcat/releases/download/data/rockyou.txt -O Wordlists/rockyou.txt")
if Optional_Installation["DOP"]:
print("[>] Getting Dictionary-Of-Pentesting ...")
os.chdir("Wordlists")
os.system("git clone https://github.com/insightglacier/Dictionary-Of-Pentesting.git")
os.chdir("..")
print("[>] Getting Update.sh")
os.system("wget https://raw.githubusercontent.com/manesec/tools4me/main/Script_Kali_Machine/Update.sh --quiet")
os.system("chmod u+x Update.sh")
print("If you want to update \"DownloadTools.py\" just run ./Update.sh ")
print("-------------------------- Total ------------------------------")
os.system("du -h --max-depth=1 .")
print("\nDone! -- by manesec.")
| 39.941019 | 142 | 0.647872 | 1,964 | 14,898 | 4.974033 | 0.161914 | 0.07534 | 0.075955 | 0.069608 | 0.530556 | 0.483673 | 0.415293 | 0.361142 | 0.352032 | 0.314566 | 0 | 0.015756 | 0.105383 | 14,898 | 372 | 143 | 40.048387 | 0.704607 | 0.024097 | 0 | 0.319728 | 0 | 0.142857 | 0.716461 | 0.083595 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.006803 | 0 | 0.006803 | 0.238095 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3ba11498a776ef72e094f9860944bc017a359568 | 7,375 | py | Python | metrics/recall/recall.py | leondz/datasets | 4110fb6034f79c5fb470cf1043ff52180e9c63b7 | [
"Apache-2.0"
] | 3,395 | 2020-05-13T21:16:50.000Z | 2020-09-10T14:36:50.000Z | metrics/recall/recall.py | leondz/datasets | 4110fb6034f79c5fb470cf1043ff52180e9c63b7 | [
"Apache-2.0"
] | 370 | 2020-05-13T21:28:57.000Z | 2020-09-10T11:03:38.000Z | metrics/recall/recall.py | leondz/datasets | 4110fb6034f79c5fb470cf1043ff52180e9c63b7 | [
"Apache-2.0"
] | 258 | 2020-05-15T01:17:09.000Z | 2020-09-10T12:41:43.000Z | # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Recall metric."""
from sklearn.metrics import recall_score
import datasets
_DESCRIPTION = """
Recall is the fraction of the positive examples that were correctly labeled by the model as positive. It can be computed with the equation:
Recall = TP / (TP + FN)
Where TP is the true positives and FN is the false negatives.
"""
_KWARGS_DESCRIPTION = """
Args:
- **predictions** (`list` of `int`): The predicted labels.
- **references** (`list` of `int`): The ground truth labels.
- **labels** (`list` of `int`): The set of labels to include when `average` is not set to `binary`, and their order when average is `None`. Labels present in the data can be excluded in this input, for example to calculate a multiclass average ignoring a majority negative class, while labels not present in the data will result in 0 components in a macro average. For multilabel targets, labels are column indices. By default, all labels in y_true and y_pred are used in sorted order. Defaults to None.
- **pos_label** (`int`): The class label to use as the 'positive class' when calculating the recall. Defaults to `1`.
- **average** (`string`): This parameter is required for multiclass/multilabel targets. If None, the scores for each class are returned. Otherwise, this determines the type of averaging performed on the data. Defaults to `'binary'`.
- `'binary'`: Only report results for the class specified by `pos_label`. This is applicable only if the target labels and predictions are binary.
- `'micro'`: Calculate metrics globally by counting the total true positives, false negatives, and false positives.
- `'macro'`: Calculate metrics for each label, and find their unweighted mean. This does not take label imbalance into account.
- `'weighted'`: Calculate metrics for each label, and find their average weighted by support (the number of true instances for each label). This alters `'macro'` to account for label imbalance. Note that it can result in an F-score that is not between precision and recall.
- `'samples'`: Calculate metrics for each instance, and find their average (only meaningful for multilabel classification).
- **sample_weight** (`list` of `float`): Sample weights Defaults to `None`.
- **zero_division** (): Sets the value to return when there is a zero division. Defaults to .
- `'warn'`: If there is a zero division, the return value is `0`, but warnings are also raised.
- `0`: If there is a zero division, the return value is `0`.
- `1`: If there is a zero division, the return value is `1`.
Returns:
- **recall** (`float`, or `array` of `float`): Either the general recall score, or the recall scores for individual classes, depending on the values input to `labels` and `average`. Minimum possible value is 0. Maximum possible value is 1. A higher recall means that more of the positive examples have been labeled correctly. Therefore, a higher recall is generally considered better.
Examples:
Example 1-A simple example with some errors
>>> recall_metric = datasets.load_metric('recall')
>>> results = recall_metric.compute(references=[0, 0, 1, 1, 1], predictions=[0, 1, 0, 1, 1])
>>> print(results)
{'recall': 0.6666666666666666}
Example 2-The same example as Example 1, but with `pos_label=0` instead of the default `pos_label=1`.
>>> recall_metric = datasets.load_metric('recall')
>>> results = recall_metric.compute(references=[0, 0, 1, 1, 1], predictions=[0, 1, 0, 1, 1], pos_label=0)
>>> print(results)
{'recall': 0.5}
Example 3-The same example as Example 1, but with `sample_weight` included.
>>> recall_metric = datasets.load_metric('recall')
>>> sample_weight = [0.9, 0.2, 0.9, 0.3, 0.8]
>>> results = recall_metric.compute(references=[0, 0, 1, 1, 1], predictions=[0, 1, 0, 1, 1], sample_weight=sample_weight)
>>> print(results)
{'recall': 0.55}
Example 4-A multiclass example, using different averages.
>>> recall_metric = datasets.load_metric('recall')
>>> predictions = [0, 2, 1, 0, 0, 1]
>>> references = [0, 1, 2, 0, 1, 2]
>>> results = recall_metric.compute(predictions=predictions, references=references, average='macro')
>>> print(results)
{'recall': 0.3333333333333333}
>>> results = recall_metric.compute(predictions=predictions, references=references, average='micro')
>>> print(results)
{'recall': 0.3333333333333333}
>>> results = recall_metric.compute(predictions=predictions, references=references, average='weighted')
>>> print(results)
{'recall': 0.3333333333333333}
>>> results = recall_metric.compute(predictions=predictions, references=references, average=None)
>>> print(results)
{'recall': array([1., 0., 0.])}
"""
_CITATION = """
@article{scikit-learn, title={Scikit-learn: Machine Learning in {P}ython}, author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V. and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P. and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.}, journal={Journal of Machine Learning Research}, volume={12}, pages={2825--2830}, year={2011}
"""
@datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
class Recall(datasets.Metric):
def _info(self):
return datasets.MetricInfo(
description=_DESCRIPTION,
citation=_CITATION,
inputs_description=_KWARGS_DESCRIPTION,
features=datasets.Features(
{
"predictions": datasets.Sequence(datasets.Value("int32")),
"references": datasets.Sequence(datasets.Value("int32")),
}
if self.config_name == "multilabel"
else {
"predictions": datasets.Value("int32"),
"references": datasets.Value("int32"),
}
),
reference_urls=["https://scikit-learn.org/stable/modules/generated/sklearn.metrics.recall_score.html"],
)
def _compute(
self,
predictions,
references,
labels=None,
pos_label=1,
average="binary",
sample_weight=None,
zero_division="warn",
):
score = recall_score(
references,
predictions,
labels=labels,
pos_label=pos_label,
average=average,
sample_weight=sample_weight,
zero_division=zero_division,
)
return {"recall": float(score) if score.size == 1 else score}
| 54.62963 | 503 | 0.672949 | 975 | 7,375 | 5.029744 | 0.315897 | 0.037113 | 0.027121 | 0.037113 | 0.226754 | 0.199429 | 0.184747 | 0.184747 | 0.155791 | 0.140498 | 0 | 0.030833 | 0.21722 | 7,375 | 134 | 504 | 55.037313 | 0.818638 | 0.085288 | 0 | 0.203884 | 0 | 0.174757 | 0.792509 | 0.084869 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019417 | false | 0.009709 | 0.019417 | 0.009709 | 0.067961 | 0.067961 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e605be6a5e8d416273e08bffcd14fcff9ddc548 | 848 | py | Python | blog/home/migrations/0001_initial.py | iflyBird/blog | 33dbf6345ae4ae64f726c7ce5353b7c7864351af | [
"MIT"
] | null | null | null | blog/home/migrations/0001_initial.py | iflyBird/blog | 33dbf6345ae4ae64f726c7ce5353b7c7864351af | [
"MIT"
] | null | null | null | blog/home/migrations/0001_initial.py | iflyBird/blog | 33dbf6345ae4ae64f726c7ce5353b7c7864351af | [
"MIT"
] | null | null | null | # Generated by Django 2.1.8 on 2020-05-27 15:24
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ArticleCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=100)),
('created', models.DateField(default=datetime.datetime(2020, 5, 27, 15, 24, 34, 743876, tzinfo=utc))),
],
options={
'verbose_name': '类别管理',
'verbose_name_plural': '类别管理',
'db_table': 'tb_category',
},
),
]
| 28.266667 | 118 | 0.568396 | 88 | 848 | 5.375 | 0.659091 | 0.069767 | 0.02537 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.062925 | 0.306604 | 848 | 29 | 119 | 29.241379 | 0.741497 | 0.053066 | 0 | 0 | 1 | 0 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.136364 | 0 | 0.318182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e66f42aa9f3d76581d68b02d53d96cb65653cfe | 1,066 | py | Python | src/compas_ghpython/artists/polylineartist.py | Sam-Bouten/compas | 011c7779ded9b69bb602568b470bb0443e336f62 | [
"MIT"
] | null | null | null | src/compas_ghpython/artists/polylineartist.py | Sam-Bouten/compas | 011c7779ded9b69bb602568b470bb0443e336f62 | [
"MIT"
] | null | null | null | src/compas_ghpython/artists/polylineartist.py | Sam-Bouten/compas | 011c7779ded9b69bb602568b470bb0443e336f62 | [
"MIT"
] | null | null | null | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import compas_ghpython
from compas.artists import PrimitiveArtist
from .artist import GHArtist
class PolylineArtist(GHArtist, PrimitiveArtist):
"""Artist for drawing polylines.
Parameters
----------
polyline : :class:`compas.geometry.Polyline`
A COMPAS polyline.
**kwargs : dict, optional
Additional keyword arguments.
See :class:`compas_ghpython.artists.GHArtist` and :class:`compas.artists.PrimitiveArtist` for more info.
"""
def __init__(self, polyline, **kwargs):
super(PolylineArtist, self).__init__(primitive=polyline, **kwargs)
def draw(self):
"""Draw the polyline.
Returns
-------
:rhino:`Rhino.Geometry.Polyline`.
"""
polylines = [self._get_args(self.primitive)]
return compas_ghpython.draw_polylines(polylines)[0]
@staticmethod
def _get_args(primitive):
return {'points': map(list, primitive.points)}
| 26.65 | 112 | 0.680113 | 110 | 1,066 | 6.318182 | 0.445455 | 0.043165 | 0.069065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00119 | 0.212008 | 1,066 | 39 | 113 | 27.333333 | 0.82619 | 0.336773 | 0 | 0 | 0 | 0 | 0.009554 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.4 | 0.066667 | 0.8 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
8e67cd9cfa819f5435d8aaa629ca0362047fd00b | 2,356 | py | Python | auctions/utils.py | zebadiahtaylor/cs50-Commerce | c8c56a0da861a32b1929053dd62e926e37f6b1b5 | [
"MIT"
] | null | null | null | auctions/utils.py | zebadiahtaylor/cs50-Commerce | c8c56a0da861a32b1929053dd62e926e37f6b1b5 | [
"MIT"
] | null | null | null | auctions/utils.py | zebadiahtaylor/cs50-Commerce | c8c56a0da861a32b1929053dd62e926e37f6b1b5 | [
"MIT"
] | null | null | null | from .models import Auction, Bid, Comment, User
from django.db.models import Max
def has_bids(auction):
"""
Returns True if others have bid on the item.
TODO: Throws TypeError if user not logged in
"""
# auction = Auction.objects.get(id=auction)
has_bids = False
try:
current_bid = Bid.objects.filter(auction=auction).aggregate(Max('bid_amount'))
if current_bid['bid_amount__max']:
has_bids = True
except Bid.DoesNotExist:
pass
return has_bids
def has_high_bid(user, auction):
"""
Returns True if User has highest bid.
"""
try:
max_bid = Bid.objects.filter(auction=auction).aggregate(max_bid = Max('bid_amount'))
user_high_bid = Bid.objects.filter(auction=auction, user=user).aggregate(user_bid = Max('bid_amount'))
if user_high_bid['user_bid'] is not None and max_bid['max_bid'] == user_high_bid['user_bid']:
return True
else:
return False
except Bid.DoesNotExist:
return False
def is_users_auction(user, auction):
"""
Returns True if the user is the owner of the auction.
"""
auction = Auction.objects.get(id=auction)
is_users_auction = False
if auction.user == user:
is_users_auction = True
return is_users_auction
def is_watched(user, auction):
user = User.objects.get(username=user)
watchlist = user.watchlist.all()
is_watched = False
for item in watchlist:
if auction == item.id:
is_watched = True
return is_watched
def return_active_auctions():
auctions = Auction.objects.all()
auctions = [auction for auction in auctions if auction.active]
for auction in auctions:
auction.current_bid = return_highest_bid(auction.id)
return auctions
def return_highest_bid(auction):
auction_object = Auction.objects.get(id=auction)
current_bid = auction_object.starting_bid
try:
max_bid = Bid.objects.filter(auction=auction).aggregate(max_bid = Max('bid_amount'))
if max_bid['max_bid']:
current_bid = max_bid['max_bid']
return current_bid
except Bid.DoesNotExist:
pass
return current_bid
def return_all_comments(auction):
comments = Comment.objects.filter(auction=auction)
return comments
| 24.8 | 110 | 0.660866 | 311 | 2,356 | 4.807074 | 0.189711 | 0.056187 | 0.04214 | 0.090301 | 0.318395 | 0.190635 | 0.124415 | 0.124415 | 0.124415 | 0.092308 | 0 | 0 | 0.247029 | 2,356 | 94 | 111 | 25.06383 | 0.842728 | 0.095076 | 0 | 0.254545 | 0 | 0 | 0.044231 | 0 | 0 | 0 | 0 | 0.010638 | 0 | 1 | 0.127273 | false | 0.036364 | 0.036364 | 0 | 0.345455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e6c720ed11080cd24768da2e5b31afb17339d1c | 1,778 | py | Python | tests/test_urls.py | kverdecia/dj-mypypi2 | 4ca5f67901a6a41029b4e1b0eccf8d74d4958b58 | [
"MIT"
] | 1 | 2021-08-12T08:59:09.000Z | 2021-08-12T08:59:09.000Z | tests/test_urls.py | kverdecia/dj-mypypi2 | 4ca5f67901a6a41029b4e1b0eccf8d74d4958b58 | [
"MIT"
] | 8 | 2021-04-24T06:08:07.000Z | 2021-07-25T07:18:03.000Z | tests/test_urls.py | kverdecia/dj-mypypi2 | 4ca5f67901a6a41029b4e1b0eccf8d74d4958b58 | [
"MIT"
] | null | null | null | import cuid
from django.urls import reverse, resolve
from django.test import TestCase
from djmypypi2 import models
from djmypypi2 import factories
class TestUrls(TestCase):
def test_package_list_reverse(self):
url = reverse('djmypypi2:package-list')
self.assertEqual(url, '/mypypi2/')
def test_package_list_resolve(self):
view_name = resolve('/mypypi2/').view_name
self.assertEqual(view_name, 'djmypypi2:package-list')
def test_package_detail_reverse(self):
package: models.Package = factories.PackageFactory()
url = reverse('djmypypi2:package-detail', kwargs={'package_name': package.name})
self.assertEqual(url, f'/mypypi2/{package.name}/')
def test_package_detail_resolve(self):
package: models.Package = factories.PackageFactory()
view_name = resolve(f'/mypypi2/{package.name}/').view_name
self.assertEqual(view_name, 'djmypypi2:package-detail')
def test_download_reverse(self):
version: models.Version = factories.VersionFactory()
url = reverse('djmypypi2:download-package', kwargs={'archive_name': version.archive_name})
self.assertEqual(url, f'/mypypi2/@download/{version.archive_name}')
def test_download_resolve(self):
version: models.Version = factories.VersionFactory()
view_name = resolve(f'/mypypi2/@download/{version.archive_name}').view_name
self.assertEqual(view_name, 'djmypypi2:download-package')
def test_upload_reverse(self):
url = reverse('djmypypi2:upload-package')
self.assertEqual(url, f'/mypypi2/@upload/')
def test_upload_resolve(self):
view_name = resolve(f'/mypypi2/@upload/').view_name
self.assertEqual(view_name, 'djmypypi2:upload-package')
| 35.56 | 98 | 0.707537 | 207 | 1,778 | 5.898551 | 0.154589 | 0.078624 | 0.093366 | 0.075348 | 0.548731 | 0.398034 | 0.149058 | 0.116298 | 0 | 0 | 0 | 0.012212 | 0.170979 | 1,778 | 49 | 99 | 36.285714 | 0.816147 | 0 | 0 | 0.117647 | 0 | 0 | 0.223847 | 0.181102 | 0 | 0 | 0 | 0 | 0.235294 | 1 | 0.235294 | false | 0 | 0.147059 | 0 | 0.411765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e6f1d30087ea8e2e9784ed81ab2535019f36f7a | 2,263 | py | Python | google-datacatalog-tableau-connector/src/google/datacatalog_connectors/tableau/scrape/rest_api_helper.py | Parkman328/datacatalog-connectors-bi | 630de336617728713779d7224eeab140d5abaec2 | [
"Apache-2.0"
] | 27 | 2020-04-27T21:55:49.000Z | 2022-02-18T22:09:13.000Z | google-datacatalog-tableau-connector/src/google/datacatalog_connectors/tableau/scrape/rest_api_helper.py | Parkman328/datacatalog-connectors-bi | 630de336617728713779d7224eeab140d5abaec2 | [
"Apache-2.0"
] | 36 | 2020-05-01T15:26:14.000Z | 2022-03-26T00:09:19.000Z | google-datacatalog-tableau-connector/src/google/datacatalog_connectors/tableau/scrape/rest_api_helper.py | Parkman328/datacatalog-connectors-bi | 630de336617728713779d7224eeab140d5abaec2 | [
"Apache-2.0"
] | 18 | 2020-04-30T22:14:09.000Z | 2022-01-13T10:28:03.000Z | #!/usr/bin/python
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
from google.datacatalog_connectors.tableau.scrape import \
authenticator, constants
class RestAPIHelper:
def __init__(self,
server_address,
api_version,
username,
password,
site_content_url=None):
self.__server_address = server_address
self.__api_version = api_version
self.__username = username
self.__password = password
self.__site_content_url = site_content_url
self.__base_api_endpoint = f'{server_address}/api/{api_version}'
self.__common_headers = {
'Content-Type': constants.JSON_CONTENT_TYPE,
'Accept': constants.JSON_CONTENT_TYPE
}
self.__auth_credentials = None
def get_all_sites_for_server(self):
self.__set_up_auth_credentials()
url = f'{self.__base_api_endpoint}/sites'
headers = self.__common_headers.copy()
headers[constants.X_TABLEAU_AUTH_HEADER_NAME] = \
self.__auth_credentials['token']
response = requests.get(url=url, headers=headers).json()
return response['sites']['site'] \
if response and response.get('sites') \
and 'site' in response['sites'] \
else []
def __set_up_auth_credentials(self):
if self.__auth_credentials:
return
self.__auth_credentials = \
authenticator.Authenticator.authenticate(
self.__server_address,
self.__api_version,
self.__username,
self.__password,
self.__site_content_url)
| 31 | 74 | 0.646929 | 260 | 2,263 | 5.280769 | 0.434615 | 0.0437 | 0.040787 | 0.023307 | 0.077203 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004875 | 0.274856 | 2,263 | 72 | 75 | 31.430556 | 0.83181 | 0.249669 | 0 | 0 | 0 | 0 | 0.066548 | 0.039216 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0.071429 | 0.047619 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8e90797c1a396d8d5df4b10107f151b6eb6a4b1d | 445 | py | Python | Symbol Patterns/symbolpattern143.py | vaidehisinha1/Python-PatternHouse | 49f71bcc5319a838592e69b0e49ef1edba32bf7c | [
"MIT"
] | null | null | null | Symbol Patterns/symbolpattern143.py | vaidehisinha1/Python-PatternHouse | 49f71bcc5319a838592e69b0e49ef1edba32bf7c | [
"MIT"
] | 471 | 2022-01-15T07:07:18.000Z | 2022-02-28T16:01:42.000Z | Symbol Patterns/symbolpattern143.py | vaidehisinha1/Python-PatternHouse | 49f71bcc5319a838592e69b0e49ef1edba32bf7c | [
"MIT"
] | 2 | 2022-01-17T09:43:16.000Z | 2022-01-29T15:15:47.000Z | height = int(input())
for i in range(1,height+1):
for j in range(1, height+1):
if(i == height//2 or i == height or j == 1 or j == height and i >= height//2 or (j%2==1 and i<= height//2)):
print("*",end=" ")
else:
print(end=" ")
print()
# Sample Input :- 7
# Output :-
# * * * *
# * * * *
# * * * * * * *
# * *
# * *
# * *
# * * * * * * *
| 18.541667 | 116 | 0.346067 | 52 | 445 | 2.961538 | 0.365385 | 0.181818 | 0.155844 | 0.181818 | 0.194805 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043478 | 0.431461 | 445 | 23 | 117 | 19.347826 | 0.565217 | 0.285393 | 0 | 0 | 0 | 0 | 0.012987 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.375 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e9a24be54fa83ecc2a85a73f853297e027edb0d | 4,001 | py | Python | ejercicios/alarma.py | proto-tools-docs/Soluciones | 7a619e00572a3496cb08a90702f152f52c6d5e56 | [
"MIT"
] | null | null | null | ejercicios/alarma.py | proto-tools-docs/Soluciones | 7a619e00572a3496cb08a90702f152f52c6d5e56 | [
"MIT"
] | null | null | null | ejercicios/alarma.py | proto-tools-docs/Soluciones | 7a619e00572a3496cb08a90702f152f52c6d5e56 | [
"MIT"
] | null | null | null | """AyudaEnPython: https://www.facebook.com/groups/ayudapython
Crear una aplicación de consola que permita al usuario programar alarmas
de tiempo. Para realizar esta aplicación deberá presentarle al usuario
las siguientes opciones: ver alarmas activas, agregar nueva alarma,
agregar nueva alarma con tiempo aleatorio, editar alarma existente y
quitar alarma.
Para este ejercicio debe crear una clase llamada Reloj que contenga los
atributos necesarios para almacenar el tiempo (horas, minutos y segundos),
guiarse de las siguientes restricciones y utilizar el diagrama de clase:
- Programe un método constructor vacío que cree objetos con un tiempo
(horas, minutos y segundos) aleatorio.
- Programe un método que reciba las horas, minutos y segundos para la nueva
alarma.
- Cree un método para modificar los segundos.
- Cree un método para modificar los minutos.
- Cree un método para modificar las horas.
- Programe un método que devuelva una cadena de texto que incluya la hora
actual de la variable en formato hh:mm:ss.
* Considere el valor actual y el valor máximo que puede contener cada uno
de los atributos al momento de añadir tiempo.
+----------------------------------------+
| Reloj |
+----------------------------------------+
| - horas: int |
| - minutos: int |
| - segundos: int |
+----------------------------------------+
| + agregar_horas(int horas): void |
| + agregar_minutos(int minutos): void |
| + agregar_segundos(int segundos): void |
| + visualizar(): string |
+----------------------------------------+
"""
from random import randint
from prototools.menu import EzMenu
from prototools.entradas import entrada_int
class Reloj:
def __init__(self) -> None:
self._horas = randint(0, 24)
self._minutos = randint(0, 59)
self._segundos = randint(0, 59)
def agregar_horas(self, horas):
self._horas = horas
def agregar_minutos(self, minutos):
self._minutos = minutos
def agregar_segundos(self, segundos):
self._segundos = segundos
def visualizar(self):
return f"{self._horas:02}:{self._minutos:02}:{self._segundos:02}"
alarma = Reloj()
alarmas = []
def _entradas():
horas = entrada_int("Ingrese la hora: ", min=0, max=24)
minutos = entrada_int("Ingrese los minutos: ", min=0, max=59)
segundos = entrada_int("Ingrese los segundos: ", min=0, max=59)
return horas, minutos, segundos
def _agregar(alarma, horas, minutos, segundos):
alarma.agregar_horas(horas)
alarma.agregar_minutos(minutos)
alarma.agregar_segundos(segundos)
def ver_alarmas():
if alarmas == []:
print("No hay alarmas por el momento")
for n, alarma in enumerate(alarmas, 1):
print(f"{n}. {alarma.visualizar()}")
def nueva_alarma():
alarma = Reloj()
_agregar(alarma, *_entradas())
alarmas.append(alarma)
def alarma_aleatorio():
alarmas.append(Reloj())
print("Alarma aleatoria agregada")
def editar_alarma():
ver_alarmas()
print("Seleccionar la alarma a ser editada")
n = int(input(">>> "))
alarma = alarmas[n-1]
_agregar(alarma, *_entradas())
def quitar_alarma():
ver_alarmas()
print("Seleccionar la alarma a ser removida")
n = int(input(">>> "))
alarmas.pop(n-1)
if __name__ == "__main__":
menu = EzMenu(ancho=40)
menu.titulo("Alarmas")
menu.agregar_opciones(
"ver alarmas activas",
"agregar nueva alarma",
"agregar alarma aleatoria",
"editar alarmas existente",
"quitar alarma",
"salir",
)
menu.agregar_funciones(
ver_alarmas,
nueva_alarma,
alarma_aleatorio,
editar_alarma,
quitar_alarma,
)
menu.run() | 31.015504 | 75 | 0.610847 | 460 | 4,001 | 5.195652 | 0.31087 | 0.025105 | 0.022594 | 0.02636 | 0.135146 | 0.102092 | 0.078661 | 0.078661 | 0.03682 | 0 | 0 | 0.009781 | 0.258935 | 4,001 | 129 | 76 | 31.015504 | 0.79629 | 0.453887 | 0 | 0.115942 | 0 | 0 | 0.180817 | 0.034878 | 0 | 0 | 0 | 0.046512 | 0 | 1 | 0.173913 | false | 0 | 0.043478 | 0.014493 | 0.26087 | 0.072464 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e9d82a4e05b9da4170cbe9911065b74ac889a13 | 2,976 | py | Python | climops/calculate_statistics.py | YakelynRJ/climops | 79384df69a499784e9b4be6ed4f81c1866e1b29d | [
"MIT"
] | null | null | null | climops/calculate_statistics.py | YakelynRJ/climops | 79384df69a499784e9b4be6ed4f81c1866e1b29d | [
"MIT"
] | 8 | 2018-12-06T22:30:26.000Z | 2018-12-12T02:32:12.000Z | climops/calculate_statistics.py | HamidPahlavan/project | 79384df69a499784e9b4be6ed4f81c1866e1b29d | [
"MIT"
] | null | null | null | """
This module is used to generate correlation (R) and regression (b)
coefficients for relationships between the 2015 Census,
2018 Yale Climate Opinion Maps (YCOM) and land area datasets,
as well as p values for these relationships.
"""
import numpy as np
import pandas as pd
from scipy.stats import linregress
def calculate_stats_outputs(n_ycom, n_census, ycom_county, census):
"""
Function to estimate regression coefficients correlation between YCOM data variables and US
Census variables.
Inputs: n_ycom, a full list of names for ycom variables,
n_census, a full list of names for census variables
Outputs: a matrix of correlation values between each variable each dataset
"""
stats_outputs = np.zeros((len(n_ycom), len(n_census), 5))
for yind, yvar in enumerate(n_ycom):
for cind, cvar in enumerate(n_census):
ycom_notnull = ycom_county[yvar][census[cvar].notnull()]
census_notnull = census[cvar][census[cvar].notnull()]
stats_outputs[yind, cind, 0:5] = linregress(ycom_notnull, census_notnull)
return stats_outputs
def calculate_stats_outputs_standard(n_ycom, n_census, ycom_county, census):
"""
Function to estimate regression coefficients between YCOM data variables and US
Census variables on standardized variables
standardized_column = (column - mean(column)) / std(column)
Inputs: n_ycom, a full list of names for ycom variables,
n_census, a full list of names for census variables
Outputs: a matrix of correlation values between each variable each dataset
"""
stats_outputs_standard = np.zeros((len(n_ycom), len(n_census), 5))
for yind, yvar in enumerate(n_ycom):
for cind, cvar in enumerate(n_census):
ycom_notnull = ycom_county[yvar][census[cvar].notnull()]
census_notnull = census[cvar][census[cvar].notnull()]
#also doing calculations on standardized variables
census_standard = (census_notnull - np.mean(census_notnull)) / np.std(census_notnull)
stats_outputs_standard[yind, cind, 0:5] = linregress(ycom_notnull, census_standard)
return stats_outputs_standard
def get_regs_df(stats_outputs_standard, n_census, n_ycom):
"""
making dataframe of regression coefficients
these are kinda standardized -they show what % change in an opinion is given
a 1 standard deviation change in a census variable
"""
regs = pd.DataFrame(stats_outputs_standard[:, :, 0], columns=n_census, index=n_ycom)
return regs
def get_cors_df(stats_outputs, n_census, n_ycom):
"""
making dataframe of correlation coefficients
"""
cors = pd.DataFrame(stats_outputs[:, :, 2], columns=n_census, index=n_ycom)
return cors
def get_pvalues_df(stats_outputs, n_census, n_ycom):
"""
making dataframes of pvalues
"""
pval = pd.DataFrame(stats_outputs[:, :, 3], columns=n_census, index=n_ycom)
return pval
| 39.68 | 97 | 0.713374 | 412 | 2,976 | 4.975728 | 0.254854 | 0.081951 | 0.058537 | 0.021463 | 0.555122 | 0.555122 | 0.555122 | 0.491707 | 0.381463 | 0.381463 | 0 | 0.007595 | 0.203629 | 2,976 | 74 | 98 | 40.216216 | 0.857384 | 0.403898 | 0 | 0.275862 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.172414 | false | 0 | 0.103448 | 0 | 0.448276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e9dcbc8e5b65bb12dd6691d988f8f04cfc8ae26 | 275 | py | Python | vFXT/version.py | ekpgh/AvereSDK-1 | 1e3584f08d2fb519ea3870e7e440cded9aacacd2 | [
"MIT"
] | null | null | null | vFXT/version.py | ekpgh/AvereSDK-1 | 1e3584f08d2fb519ea3870e7e440cded9aacacd2 | [
"MIT"
] | 2 | 2019-03-07T23:59:08.000Z | 2019-03-20T21:47:25.000Z | vFXT/version.py | ekpgh/AvereSDK-1 | 1e3584f08d2fb519ea3870e7e440cded9aacacd2 | [
"MIT"
] | null | null | null | # Copyright (c) 2015-2020 Avere Systems, Inc. All Rights Reserved.
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root for license information.
__version__ = "0.5.4.3"
__version_info__ = (0, 5, 4, 3)
| 45.833333 | 90 | 0.745455 | 42 | 275 | 4.666667 | 0.690476 | 0.102041 | 0.173469 | 0.040816 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.068966 | 0.156364 | 275 | 5 | 91 | 55 | 0.775862 | 0.770909 | 0 | 0 | 0 | 0 | 0.118644 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8e9e302f177c972e4caf914f4f0b636eed885e2d | 251 | py | Python | exercises/solution_F4.py | dataXcode/IPP | c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b | [
"MIT"
] | null | null | null | exercises/solution_F4.py | dataXcode/IPP | c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b | [
"MIT"
] | null | null | null | exercises/solution_F4.py | dataXcode/IPP | c9b94ad2d7dc14b01e6657a4fa555507bbc7e93b | [
"MIT"
] | null | null | null | house = [ ['hallway', 14.35],
['kitchen', 15.0],
['living room', 19.0],
['bedroom', 12.5],
['bathroom', 8.75] ]
# Code the for loop
for x in house:
print(str(x[0]) + ' area is ' + str(x[1]) + 'm')
| 25.1 | 50 | 0.426295 | 35 | 251 | 3.057143 | 0.8 | 0.074766 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.110429 | 0.350598 | 251 | 9 | 51 | 27.888889 | 0.546012 | 0.067729 | 0 | 0 | 0 | 0 | 0.215517 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8eaa9e9b44dd4cdbcc7d177b9fd352b66bc449f6 | 1,050 | py | Python | vuln_server/vulnerabilities/subprocess_vuln.py | denny00786/CASoftwareDevelopment | d03c82b6bb033a39b4270115ec464eca773e0814 | [
"Apache-2.0"
] | 1 | 2020-04-02T00:29:16.000Z | 2020-04-02T00:29:16.000Z | vuln_server/vulnerabilities/subprocess_vuln.py | denny00786/CASoftwareDevelopment | d03c82b6bb033a39b4270115ec464eca773e0814 | [
"Apache-2.0"
] | null | null | null | vuln_server/vulnerabilities/subprocess_vuln.py | denny00786/CASoftwareDevelopment | d03c82b6bb033a39b4270115ec464eca773e0814 | [
"Apache-2.0"
] | 4 | 2021-04-01T21:31:01.000Z | 2022-03-23T08:22:44.000Z | import subprocess
from vuln_server.outputgrabber import OutputGrabber
from flask import request, redirect, render_template
class SubprocessVuln():
def bypass(self):
if request.method == 'POST':
# Check if data is not empty, post forms has all params defined
# which may be empty and cause unexpected behaviour.
if request.form['input_data'] != '':
try:
# Instanciate a different stdout grabber for subprocess
output = OutputGrabber()
with output:
# Execute system command with an unsafe input parameter
subprocess.call("ping -c1 " +
request.form['input_data'], shell=True)
return output.capturedtext
except Exception as e:
return "Server Error: {}:".format(str(e))
else:
return redirect(request.url)
return render_template('subprocess.html')
| 38.888889 | 79 | 0.552381 | 102 | 1,050 | 5.637255 | 0.686275 | 0.048696 | 0.055652 | 0.069565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001534 | 0.379048 | 1,050 | 26 | 80 | 40.384615 | 0.880368 | 0.209524 | 0 | 0 | 0 | 0 | 0.078788 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0.055556 | 0.166667 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8eab0af3b0aca18605cc36d406b8f68048ee7f83 | 2,050 | py | Python | tests/test_resource.py | luhn/pyramid-resource | 7de2f4d136ef39f8b223ef675def6a04d71311ae | [
"MIT"
] | 1 | 2019-02-05T03:06:24.000Z | 2019-02-05T03:06:24.000Z | tests/test_resource.py | luhn/pyramid-resource | 7de2f4d136ef39f8b223ef675def6a04d71311ae | [
"MIT"
] | null | null | null | tests/test_resource.py | luhn/pyramid-resource | 7de2f4d136ef39f8b223ef675def6a04d71311ae | [
"MIT"
] | null | null | null | import pytest
from pyramid_resource import Resource
def test_default_lookup():
class SubResource(Resource):
pass
class MyResource(Resource):
__children__ = {
"sub": SubResource,
}
root = MyResource("request")
sub = root["sub"]
assert isinstance(sub, SubResource)
assert sub.request == "request"
assert sub.__name__ == "sub"
assert sub.__parent__ is root
with pytest.raises(KeyError):
root["sub2"]
def test_custom_lookup_subclass():
class SubResource(Resource):
pass
class MyResource(Resource):
def get_child(self, key):
assert key == "sub"
return SubResource
root = MyResource("request")
sub = root["sub"]
assert isinstance(sub, SubResource)
assert sub.request == "request"
assert sub.__name__ == "sub"
assert sub.__parent__ is root
def test_custom_lookup_tuple():
class SubResource(Resource):
pass
class MyResource(Resource):
def get_child(self, key):
assert key == "sub"
return SubResource, {"foo": "bar"}
root = MyResource("request")
sub = root["sub"]
assert isinstance(sub, SubResource)
assert sub.request == "request"
assert sub.__name__ == "sub"
assert sub.__parent__ is root
assert sub.foo == "bar"
def test_getattr():
class SubResource(Resource):
pass
class MyResource(Resource):
subfoo = "subbar"
@property
def prop(self):
return "myprop"
parent = MyResource("request")
child = SubResource("request", "sub", parent, foo="bar")
grandchild = SubResource("request", "sub", child)
with pytest.raises(AttributeError):
assert parent.foo
assert parent.subfoo == "subbar"
assert parent.prop == "myprop"
assert child.foo == "bar"
assert child.subfoo == "subbar"
assert child.prop == "myprop"
assert grandchild.foo == "bar"
assert grandchild.subfoo == "subbar"
assert grandchild.prop == "myprop"
| 24.117647 | 60 | 0.621463 | 218 | 2,050 | 5.66055 | 0.201835 | 0.072934 | 0.077796 | 0.090762 | 0.544571 | 0.544571 | 0.544571 | 0.461912 | 0.461912 | 0.461912 | 0 | 0.000664 | 0.264878 | 2,050 | 84 | 61 | 24.404762 | 0.818182 | 0 | 0 | 0.523077 | 0 | 0 | 0.080976 | 0 | 0 | 0 | 0 | 0 | 0.369231 | 1 | 0.107692 | false | 0.061538 | 0.030769 | 0.015385 | 0.338462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
8eb957a2a53df1c654647771e4169777cc775e3d | 1,222 | py | Python | pile.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | 3 | 2021-01-21T02:06:12.000Z | 2022-03-14T10:26:43.000Z | pile.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | null | null | null | pile.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | 1 | 2021-08-29T07:47:12.000Z | 2021-08-29T07:47:12.000Z | import random
from card import Card, CardType
class Pile:
def __init__(self, cards):
self.cards = cards
def __getitem__(self, key):
return self.cards[key]
def __str__(self):
representation = f'Pile with {len(self.cards)} cards:\n'
for card in self.cards:
representation += f'{card}\n'
return representation
@staticmethod
def from_config_file(path_to_file):
new_cards = [
Card(None, CardType.WILDCARD),
Card(None, CardType.WILDCARD)
]
with open(path_to_file) as f:
card_count = int(f.readline().strip())
for _ in range(card_count):
territory = f.readline().strip()
card_type = f.readline().strip()
new_cards.append(Card(territory, CardType[card_type]))
return Pile(new_cards)
def shuffle(self):
random.shuffle(self.cards)
def remove_card(self, card):
self.cards.remove(card)
def remove_card_with_index(self, index):
self.cards.remove(self.cards[index])
def add_card(self, card):
self.cards.append(card)
def draw_card(self):
return self.cards.pop(0)
| 24.44 | 70 | 0.594926 | 151 | 1,222 | 4.602649 | 0.311258 | 0.142446 | 0.060432 | 0.069065 | 0.060432 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001163 | 0.296236 | 1,222 | 49 | 71 | 24.938776 | 0.806977 | 0 | 0 | 0 | 0 | 0 | 0.036007 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.257143 | false | 0 | 0.057143 | 0.057143 | 0.457143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ec74f7148172e1125c6011ff0477dea31725ff0 | 1,038 | py | Python | CH02/2.5.py | MonoHaru/Deep-Learning-from-Scratch_2 | e7dd6e7c82a34fadfa17331c2934f5f9ae2c3ec3 | [
"MIT"
] | null | null | null | CH02/2.5.py | MonoHaru/Deep-Learning-from-Scratch_2 | e7dd6e7c82a34fadfa17331c2934f5f9ae2c3ec3 | [
"MIT"
] | null | null | null | CH02/2.5.py | MonoHaru/Deep-Learning-from-Scratch_2 | e7dd6e7c82a34fadfa17331c2934f5f9ae2c3ec3 | [
"MIT"
] | null | null | null | # 2.5 정리
# 이번 장에서는 자연어를 대상으로,
# 특히 컴퓨터에게 '단어의 의미'를 이해하기 위한 주제로 진행함
# 시소러스 기법
'''
단어들의 관련성을 사람이 수작업으로 하나씩 정의한다.
이 작업은 매우 힘들고 (느낌의 미세한 차이를 나타낼 수 없다 등) 표현력에도 한계가 있다.
'''
# 통계 기반 기법
'''
말뭉치로부터 단어의 의미를 자동으로 추출하고, 그 의미를 벡터로 표현한다.
구체적으로
1. 단어의 동시발생 행렬을 만든다.
2. PPMI 행렬로 변환한다.
3. 안정성을 높이기 위해 SVD를 이용해 차원을 감소시켜, 각 단어의 분산 표현을 만든다.
4. 분산 표현에 따르면 의미가 (그리고 문법적인 용법면에서) 비슷한 단어들이 벡터 공간에서도 서로 가까이 모여 있음을 확인한다.
'''
# 전처리 함수
'''
cos_similarity(): 벡터 간 유사도를 측정하는 함수
most_similar(): 유사 단어의 랭킹을 표시하는 함수
'''
# 이번 장에서 배운 내용
'''
1. WordNet 등의 시소러스를 이용하면 유의어를 얻거나 단어 사이의 유사도를 측정하는 등 유용한 작업을 할 수 있다.
2. 시소러스 기반 기법은 시소러스를 작성하는 데 엄청난 인적 자원이 든다거나 새로운 단어에 대응하기 어렵다는 문제가 있다.
3. 현재는 말뭉치를 이용해 단어를 백터화하는 방식이 주로 쓰인다.
4. 최근의 단어 벡터화 기법들은 대부분 '단어의 의미는 주변 단어에 의해 형성된다'는 분포 가설에 기초한다.
5. 통계 기반 기법은 말뭉치 안의 각 단어에 대해서 그 단어의 주변 단어의 빈도를 집계한다(동시발생 행렬).
6. 동시발생 행렬 PPMI 행렬로 변환하고 다시 차원을 감소시킴으로써, 거대한 '희소벡터'를 작은 '밀집벡터'로 변환할 수 있다.
7. 단어의 벡터 공간에서는 의미가 가까운 단어는 그 거리도 가까울 것으로 기대된다.
''' | 28.833333 | 78 | 0.60501 | 216 | 1,038 | 2.898148 | 0.75463 | 0.01278 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018336 | 0.316956 | 1,038 | 36 | 79 | 28.833333 | 0.864598 | 0.17341 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8eca58ddc6a8a4fc980e7ffec65c56c6b1e8dc5f | 343 | py | Python | data/bug_dataset.py | happygirlzt/soft_alignment_model_bug_deduplication | 9c529542749a52e377baeb99d1782920bc72df49 | [
"Unlicense"
] | 2 | 2020-11-11T00:26:25.000Z | 2020-12-21T16:17:28.000Z | data/bug_dataset.py | happygirlzt/soft_alignment_model_bug_deduplication | 9c529542749a52e377baeb99d1782920bc72df49 | [
"Unlicense"
] | 5 | 2020-12-22T10:59:38.000Z | 2021-07-13T15:00:46.000Z | data/bug_dataset.py | irving-muller/soft_alignment_model_bug_deduplication | abf786a17f526d965f1b6c303b06f26662d22f32 | [
"Unlicense"
] | 6 | 2020-09-25T01:01:37.000Z | 2022-02-20T19:29:31.000Z | """
Each dataset has bug report ids and the ids of duplicate bug reports.
"""
class BugDataset(object):
def __init__(self, file):
f = open(file, 'r')
self.info = f.readline().strip()
self.bugIds = [id for id in f.readline().strip().split()]
self.duplicateIds = [id for id in f.readline().strip().split()]
| 28.583333 | 71 | 0.612245 | 49 | 343 | 4.204082 | 0.612245 | 0.131068 | 0.203884 | 0.087379 | 0.271845 | 0.271845 | 0.271845 | 0.271845 | 0 | 0 | 0 | 0 | 0.230321 | 343 | 11 | 72 | 31.181818 | 0.780303 | 0.201166 | 0 | 0 | 0 | 0 | 0.003759 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8eca5e2a175ef4b1ea3f4ed24d6fd9463dbced1b | 517 | py | Python | orm_sqlfan/libreria/migrations/0004_auto_20191125_0518.py | rulotr/djangorm_sqlfan | 4bcadd1459e5a39584bb5ad8bafaaf3993324f6a | [
"MIT"
] | 2 | 2021-09-29T01:08:56.000Z | 2022-02-14T03:34:37.000Z | orm_sqlfan/libreria/migrations/0004_auto_20191125_0518.py | rulotr/djangorm_sqlfan | 4bcadd1459e5a39584bb5ad8bafaaf3993324f6a | [
"MIT"
] | 4 | 2020-02-12T02:52:19.000Z | 2021-04-08T20:46:05.000Z | orm_sqlfan/libreria/migrations/0004_auto_20191125_0518.py | rulotr/djangorm_sqlfan | 4bcadd1459e5a39584bb5ad8bafaaf3993324f6a | [
"MIT"
] | null | null | null | # Generated by Django 2.2.7 on 2019-11-25 05:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('libreria', '0003_auto_20191125_0515'),
]
operations = [
migrations.RemoveField(
model_name='libro',
name='id',
),
migrations.AlterField(
model_name='libro',
name='isbn',
field=models.CharField(max_length=13, primary_key=True, serialize=False),
),
]
| 22.478261 | 85 | 0.580271 | 54 | 517 | 5.425926 | 0.777778 | 0.061433 | 0.095563 | 0.122867 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.091667 | 0.303675 | 517 | 22 | 86 | 23.5 | 0.722222 | 0.087041 | 0 | 0.25 | 1 | 0 | 0.1 | 0.048936 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
8ece4878dfbf0ffee3c017f8812769aed1333588 | 4,582 | py | Python | tests/h/services/flag_test.py | pombredanne/h | 9c4c2dc0d53ed5bed5183936c24b4c27b23070b4 | [
"BSD-2-Clause"
] | 2,103 | 2015-01-07T12:47:49.000Z | 2022-03-29T02:38:25.000Z | tests/h/services/flag_test.py | pombredanne/h | 9c4c2dc0d53ed5bed5183936c24b4c27b23070b4 | [
"BSD-2-Clause"
] | 4,322 | 2015-01-04T17:18:01.000Z | 2022-03-31T17:06:02.000Z | tests/h/services/flag_test.py | admariner/h | 25ef1b8d94889df86ace5a084f1aa0effd9f4e25 | [
"BSD-2-Clause"
] | 389 | 2015-01-24T04:10:02.000Z | 2022-03-28T08:00:16.000Z | import pytest
from h import models
from h.services import flag
class TestFlagServiceFlagged:
def test_it_returns_true_when_flag_exists(self, svc, flag):
assert svc.flagged(flag.user, flag.annotation) is True
def test_it_returns_false_when_flag_does_not_exist(self, svc, user, annotation):
assert not svc.flagged(user, annotation)
def test_it_handles_missing_values(self, svc, user, annotation):
assert not svc.flagged(None, annotation)
assert not svc.flagged(user, None)
def test_it_uses_the_cache_if_possible(self, svc, user, annotation):
assert not svc.flagged(user, annotation)
svc._flagged_cache[ # pylint:disable=protected-access
(user.id, annotation.id)
] = True
assert svc.flagged(user, annotation)
def test_it_lists_flagged_ids(self, svc, user, flag, noise):
annotation_ids = [flag.annotation_id for flag in noise]
annotation_ids.append(flag.annotation_id)
all_flagged = svc.all_flagged(user, annotation_ids)
assert all_flagged == {flag.annotation_id}
assert svc._flagged_cache == { # pylint:disable=protected-access
(user.id, noise[0].annotation_id): False,
(user.id, noise[1].annotation_id): False,
(user.id, flag.annotation_id): True,
}
def test_it_handles_all_flagged_with_no_ids(self, svc, user):
assert svc.all_flagged(user, []) == set()
def test_it_handles_all_flagged_with_no_user(self, svc, annotation):
assert svc.all_flagged(None, [annotation.id]) == set()
@pytest.fixture
def flag(self, factories, user, annotation):
return factories.Flag(user=user, annotation=annotation)
@pytest.fixture
def user(self, factories):
return factories.User()
@pytest.fixture
def annotation(self, factories):
return factories.Annotation()
@pytest.fixture(autouse=True)
def noise(self, factories):
return factories.Flag.create_batch(2)
class TestFlagServiceCreate:
def test_it_creates_flag(self, svc, db_session, factories):
user = factories.User()
annotation = factories.Annotation(userid=user.userid)
svc.create(user, annotation)
flag = (
db_session.query(models.Flag)
.filter_by(user_id=user.id, annotation_id=annotation.id)
.first()
)
assert flag is not None
def test_it_skips_creating_flag_when_already_exists(
self, svc, db_session, factories
):
existing = factories.Flag()
svc.create(existing.user, existing.annotation)
assert (
db_session.query(models.Flag)
.filter_by(user_id=existing.user.id, annotation_id=existing.annotation.id)
.count()
== 1
)
class TestFlagServiceCount:
def test_flag_count_returns_zero_for_unflagged_annotation(self, svc, unflagged):
assert not svc.flag_count(unflagged)
def test_flag_count_returns_zero_for_None(self, svc):
assert not svc.flag_count(None)
def test_flag_count_returns_flag_count_for_flagged_annotation(self, svc, flagged):
assert svc.flag_count(flagged) == 2
def test_flag_count_uses_the_cache(self, svc, flagged):
svc._flag_count_cache[flagged.id] = 99999 # pylint:disable=protected-access
assert svc.flag_count(flagged) == 99999
def test_flag_counts(self, svc, flagged, unflagged):
ann_ids = [flagged.id, unflagged.id]
flag_counts = svc.flag_counts(ann_ids)
assert ( # pylint:disable=protected-access
flag_counts == svc._flag_count_cache == {flagged.id: 2, unflagged.id: 0}
)
def test_flag_counts_returns_empty_dict_for_no_ids(self, svc):
assert svc.flag_counts([]) == {}
def test_flag_counts_returns_zero_for_unflagged_annotation(self, svc, unflagged):
flag_counts = svc.flag_counts([unflagged.id])
assert not flag_counts[unflagged.id]
@pytest.fixture
def unflagged(self, factories):
return factories.Annotation()
@pytest.fixture
def flagged(self, factories):
annotation = factories.Annotation()
factories.Flag.create_batch(2, annotation=annotation)
return annotation
class TestFlagServiceFactory:
def test_it_returns_flag_service(self, pyramid_request):
svc = flag.flag_service_factory(None, pyramid_request)
assert isinstance(svc, flag.FlagService)
@pytest.fixture
def svc(db_session):
return flag.FlagService(db_session)
| 31.6 | 86 | 0.685072 | 576 | 4,582 | 5.182292 | 0.154514 | 0.039866 | 0.030151 | 0.029481 | 0.370854 | 0.254271 | 0.225796 | 0.162144 | 0.094472 | 0.036181 | 0 | 0.005045 | 0.221301 | 4,582 | 144 | 87 | 31.819444 | 0.831558 | 0.027717 | 0 | 0.138614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.19802 | 1 | 0.237624 | false | 0 | 0.029703 | 0.059406 | 0.376238 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.