hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e2bfcef93fc96b8dc91446c56c75aa9e0e7b89e2 | 5,253 | py | Python | hw12/myscript.py | ranstotz/ece_3822 | 0fad15070f9047a9eccdab9178e4a38cfc148987 | [
"MIT"
] | null | null | null | hw12/myscript.py | ranstotz/ece_3822 | 0fad15070f9047a9eccdab9178e4a38cfc148987 | [
"MIT"
] | null | null | null | hw12/myscript.py | ranstotz/ece_3822 | 0fad15070f9047a9eccdab9178e4a38cfc148987 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# import required modules:
#
import os
import sys
import string
import random
from random import shuffle
from pathlib2 import Path
import linecache
import time
# This class shuffles songs without repeating and keeps track of where
# it left off. See '-help' option for more details.
#
class shuffler:
# define constructor, take arguments as parameters
#
def __init__(self):
self.argv_a = []
# end of constructor
# Method to print arguments from command line provided
#
def printArgs(self):
print "Arguments provided are: ", self.argv_a
return
# Set command line arguments provided, do not include script name
#
def setter(self, commandArgs):
# Set data
#
self.argv_a = commandArgs[1:]
return
# Check for a '-help' option and print help information
#
def check_options(self):
for args in self.argv_a:
if args == '-help':
print "\nsynopsis: This class shuffles the files in the provided command line argument path, then plays each song unrepeated until all songs have been played. Then it will reshuffle the songs and continue the same process.\n"
print "desc: see above.\n"
print "example: provide a path /songs/. Will capture the length of files in that directory and begin the shuffle.\n"
print "options: supports a '-help' option as shown here.\n"
print "arguments: path to files to be shuffled and '-help'.\n"
print "man page: none.\n"
# Exit program if help argument provided
#
sys.exit()
return
# Method to play the shuffler
#
def play(self):
# Get file list from data path in command line argument
#
for root, dir, files in os.walk(self.argv_a[0]):
# store the files from the path as a list in 'mysongs'
#
mysongs = files
# Start an infinite loop
#
while True:
# Check if counter file exists, if not, generate one to hold the counter
# in a scratch file. Also check if the counter has surpassed the number
# of songs
#
my_file = Path("./counter.txt")
if not my_file.is_file() or open("./counter.txt").readline() >= str(len(mysongs)):
# Set counter to 1 for first line in a file
#
songcounter = 1
# Write (or overwrite) song counter to file. Open, write, close the file.
#
counterOut = open("./counter.txt", "w")
counterOut.write(str(songcounter))
counterOut.close()
# Shuffle songs and write (or overwrite them) to a file line by line for each song
#
# Shuffle the list of songs fromt the arguments
#
shuffledList = mysongs
random.shuffle(shuffledList)
shuffleOut = open("./shuffle.txt", "w")
# Write shuffled list into file
#
for i in shuffledList:
shuffleOut.write("%s\n" % i)
# Loop over songs in list
#
for j in range(0, len(mysongs)):
# Get counter for index from file, cast to int, then print counter
#
tempCounter = int(open("./counter.txt").readline())
print tempCounter
# Get random song from the shuffle.txt file according to
# the counter above
#
currentSong = linecache.getline("./shuffle.txt", tempCounter)
# Print the song
#
print currentSong
# Increment counter, overwrite scratch file, and close
#
songcounter = tempCounter
songcounter += 1
counterOut = open("./counter.txt", "w")
counterOut.write(str(songcounter))
counterOut.close()
# Sleep for 1 second as to print 1 song per second
#
time.sleep(1)
# Exit gracefully
return
# main: this is the main function of this Python script
#
def main(argv):
# Create instance of the shuffler class
#
myshuffle = shuffler()
# Set the command line arguments as the input for the class
#
myshuffle.setter(argv)
# Check if the help option is invoked
#
myshuffle.check_options()
# Print the arguments provided to the class from the setter method
#
myshuffle.printArgs()
# Play the shuffler
#
myshuffle.play()
# End gracefully
#
return
# begin gracefully
#
if __name__ == "__main__":
main(sys.argv[0:])
#
# end of file
| 30.540698 | 241 | 0.524843 | 575 | 5,253 | 4.756522 | 0.321739 | 0.014625 | 0.016453 | 0.016088 | 0.050457 | 0.050457 | 0.050457 | 0.050457 | 0.050457 | 0.050457 | 0 | 0.003532 | 0.407196 | 5,253 | 171 | 242 | 30.719298 | 0.874759 | 0.312393 | 0 | 0.171875 | 0 | 0.03125 | 0.169539 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.125 | null | null | 0.171875 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2c75223c441e931f761b5c72b816629ae0fb22c | 656 | py | Python | mbrl-tools/tests/small_acrobot/submissions/dummy_kit/generative_regressor.py | ramp-kits/rl_simulator | c651904b890c7e97cbb29ffae82e00a76788c88c | [
"BSD-3-Clause"
] | 11 | 2021-03-24T08:57:58.000Z | 2022-03-23T14:25:17.000Z | mbrl-tools/tests/small_acrobot/submissions/dummy_kit/generative_regressor.py | ramp-kits/rl_simulator | c651904b890c7e97cbb29ffae82e00a76788c88c | [
"BSD-3-Clause"
] | 1 | 2020-10-23T17:13:57.000Z | 2021-03-23T17:46:24.000Z | mbrl-tools/tests/small_acrobot/submissions/dummy_kit/generative_regressor.py | ramp-kits/rl_simulator | c651904b890c7e97cbb29ffae82e00a76788c88c | [
"BSD-3-Clause"
] | 1 | 2021-06-17T01:18:31.000Z | 2021-06-17T01:18:31.000Z | import numpy as np
from rampwf.utils import BaseGenerativeRegressor
class GenerativeRegressor(BaseGenerativeRegressor):
def __init__(self, max_dists, target_dim):
self.decomposition = 'autoregressive'
def fit(self, X_array, y_array):
pass
def predict(self, X_array):
# constant prediction with value equal to 10
n_samples = X_array.shape[0]
types = ['norm']
means = np.full(shape=(n_samples, 1), fill_value=10)
sigmas = np.zeros((n_samples, 1))
params = np.concatenate((means, sigmas), axis=1)
weights = np.ones((n_samples, 1))
return weights, types, params
| 28.521739 | 60 | 0.655488 | 83 | 656 | 5 | 0.60241 | 0.077108 | 0.06506 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018109 | 0.242378 | 656 | 22 | 61 | 29.818182 | 0.816901 | 0.064024 | 0 | 0 | 0 | 0 | 0.029412 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0.066667 | 0.133333 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
e2c7ac772ba67bc802ebf29dae748fc6d17103e6 | 29,982 | py | Python | fluxcompensator/image.py | koepferl/FluxCompensator | 751cac08971845069da8c962bc83459f091ba0f8 | [
"BSD-2-Clause"
] | 9 | 2017-06-22T15:29:01.000Z | 2021-03-24T11:55:41.000Z | fluxcompensator/image.py | koepferl/FluxCompensator | 751cac08971845069da8c962bc83459f091ba0f8 | [
"BSD-2-Clause"
] | 1 | 2020-06-16T21:01:51.000Z | 2020-06-16T21:01:51.000Z | fluxcompensator/image.py | koepferl/FluxCompensator | 751cac08971845069da8c962bc83459f091ba0f8 | [
"BSD-2-Clause"
] | 5 | 2017-06-22T14:57:24.000Z | 2020-06-14T16:46:44.000Z | from copy import deepcopy
import os
ROOT = os.path.dirname(os.path.abspath(__file__)) + '/'
import numpy as np
from numpy.random import normal
from astropy import log as logger
from astropy.io import fits
from astropy.wcs import WCS
from .psf import GaussianPSF, FilePSF, FunctionPSF
from .utils.plot import MakePlots
from .utils.resolution import ConservingZoom, central
from .utils.tools import properties, grid_units, get_slices, average_collapse, central_wav
from .utils.units import ConvertUnits
class SyntheticImage(object):
'''
SyntheticImage is part the FluxCompensator. It converts
input_arrays (e. g. HYPERION ModelOutput in 2D) to "realistic" synthetic observations
(e. g. by accounting for PSF and noise).
It contains attributes like ModelOutput (see Notes).
If input_array is already a SyntheticImage object, the attributes are
passed. If input_array is not a SyntheticImage object, SyntheticImage
specific attributes are defined and then passed.
Parameters
----------
input_array : SyntheticImage, ModelOutput, optional
input_array also reads arrays with ModelOutput like properties.
unit_out : str, optional
The output units for SyntheticImage val. Valid options are:
* ``'ergs/cm^2/s'``
* ``'ergs/cm^2/s/Hz'``
* ``'Jy'``
* ``'mJy'``
* ``'MJy/sr'``
The default is ``'ergs/cm^2/s'``.
name : str
The name of the FluxCompensator object until another
input_array is called. The default is ``None``.
Attributes
----------
wav : numpy.ndarray
The wavelength of the val image in microns.
val : numpy.ndarray
The 2D image with shape (x, y).
units : str
Current units of the val image.
distance : str
Distance to the observed object in cm.
x_min : float
Physical offset from axis origin in FOV in cm.
x_max : float
Physical offset from axis origin in FOV in cm.
y_min : float
Physical offset from axis origin in FOV in cm.
y_max : float
Physical offset from axis origin in FOV in cm.
lon_min : float
Minimal longitudinal angle.
lon_max : float
Maximal longitudinal angle.
lat_min : float
Minimal latitudinal angle.
lat_max : float
Maximal latitudinal angle.
pix_area_sr : float
Pixel area per sr.
Notes
-----
unit_in : str
Unit of val in input_array. Valid options are:
* ``'ergs/cm^2/s'``
* ``'ergs/cm^2/s/Hz'``
* ``'Jy'``
* ``'mJy'``
* ``'MJy/sr'``
grid_unit : float
Physical unit of FOV axis in cm. Valid options are:
* ``au`` in cm
* ``pc`` in cm
* ``kpc`` in cm
grid_unit_name
Astronomical unit of FOV axis. Valid options are:
* ``'au'``
* ``'pc'``
* ``'kpc'``
FOV : tuple
Tuple ``FOV(x,y)`` of Field of View pixel entries:
* pixel in x direction: ``FOV[0]``
* pixel in y direction: ``FOV[1]``
name : str
The name of the FluxCompensator object until another
input_array is called. The default is ``None``.
stage : str
Gives current operation stage of SyntheticImage.
E. g. ``'SyntheticImage: convolve_PSF'``
log : list
List of strings of the previous and current stages.
filter : dict
Dictionary filter = ``{name, waf_0, waf_min, waf_max}``
of the applied filter:
* name of filter: ``filter['name']``
* central wavelength: ``filter['waf_0']``
* minimal wavelength: ``filter['waf_min']``
* maximal wavelength: ``filter['waf_max']``
Returns
-------
image : SyntheticImage
2D val array with SyntheticImage properties.
flux : SyntheticFlux
0D val array (scalar) with SyntheticFlux properties.
'''
def __init__(self, input_array, unit_out='ergs/cm^2/s', name=None):
# Hyperion ModelOutput attributes
#print input_array.val.ndim, input_array.val.shape[2]
#if input_array.val.ndim == 3 and input_array.val.shape[2] == 1:
#self.val = np.array(deepcopy(input_array.val[:,:,0]))
#if input_array.val.ndim == 2:
self.val = np.array(deepcopy(input_array.val))
#else:
# raise Exception('input_array does not have the right dimensions. numpy array of (x, y) or (x, y, 1) is required.')
self.wav = np.array(deepcopy(input_array.wav))
self.units = input_array.units
self.distance = input_array.distance
self.x_max = input_array.x_max
self.x_min = input_array.x_min
self.y_max = input_array.y_max
self.y_min = input_array.y_min
self.lon_min = input_array.lon_min
self.lon_max = input_array.lon_max
self.lat_min = input_array.lat_min
self.lat_max = input_array.lat_max
self.pix_area_sr = input_array.pix_area_sr
##################
# new attributes #
##################
from .cube import SyntheticCube
if isinstance(input_array, SyntheticImage) or isinstance(input_array, SyntheticCube):
# attributes with are passed, since input_array is SyntheticCube or SyntheticImage
# physical values
self.unit_in = input_array.unit_in
self.unit_out = input_array.unit_out
self.grid_unit = input_array.grid_unit
self.grid_unit_name = input_array.grid_unit_name
# properties of image
self.FOV = deepcopy(input_array.FOV)
# name
self.name = input_array.name
self.stage = input_array.stage
self.log = deepcopy(input_array.log)
# filter
self.filter = deepcopy(input_array.filter)
else: # attributes are defined, since input_array is NOT SyntheticCube or Image
# physical values
self.unit_in = input_array.units
self.unit_out = unit_out
self.grid_unit = grid_units(self.x_max - self.x_min)['grid_unit']
self.grid_unit_name = grid_units(self.x_max - self.x_min)['grid_unit_name']
self.FOV = (self.x_max - self.x_min, self.y_max - self.y_min)
# name
self.name = name
self.stage = 'SyntheticImage: initial'
self.log = [self.stage]
# filter
self.filter = {'name': None, 'waf_0': None, 'waf_min': None, 'waf_max': None}
# convert into val units into unit_out
s = ConvertUnits(wav=self.wav, val=self.val)
self.val = s.get_unit(in_units=self.unit_in, out_units=self.unit_out, input_resolution=self.resolution['arcsec'])
self.units = self.unit_out
def extinction(self, A_v, input_opacities=None):
'''
Accounts for reddening.
Parameters
----------
A_v : Value of the visible extinction.
input_opacities : ``None``, str
If ``None`` standard extinction law is used.
Otherwise a e. g. input_opacities.txt file can be passed
as a str to read an opacity file with column #1 wav in microns
and column #2 in cm^2/g.
Default is ``None``.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: extinction'
# read own extinction law
if input_opacities is None:
t = np.loadtxt(ROOT + 'database/extinction/extinction_law.txt')
else:
t = np.loadtxt(input_opacities)
wav_ext = t[:, 0]
k_lam = t[:, 1]
# wav_ext monotonically increasing
if wav_ext[0] > wav_ext[1]:
wav_ext = wav_ext[::-1]
k_lam = k_lam[::-1]
k_v = np.interp(0.550, wav_ext, k_lam)
# interpolate to get A_int for a certain wavelength
k = np.interp(self.wav, wav_ext, k_lam)
A_int_lam = A_v * (k / k_v)
# apply extinction law
val_ext = self.val * 10 ** (-0.4 * A_int_lam)
# return SimulateImage
i = SyntheticImage(self)
i.val = val_ext
i.stage = stage
i.log.append(i.stage)
return i
def change_resolution(self, new_resolution, grid_plot=None):
'''
Changes the resolution of val image.
Parameters
----------
new_resolution : Resolution which the val array should get in
``arcsec/pixel.``
grid_plot : ``None``, ``True``
If ``True`` old and new resolution is visualized in a plot.
Default is ``None``.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: change_resolution'
# debugging comment
logger.debug('-' * 70)
logger.debug(stage)
logger.debug('-' * 70)
logger.debug('total value before zoom : ' + str('%1.4e' % np.sum(self.val)) + ' ' + str(self.units))
# match resolution of psf and val slice
f = ConservingZoom(array=self.val, initial_resolution=self.resolution['arcsec'], new_resolution=new_resolution)
zoomed_val = f.zoom()
# average after changing resolution for MJy/sr
if self.units == 'MJy/sr' or self.units == 'Jy/arcsec^2':
# size of new pixel in units of old pixel
size = new_resolution ** 2 / self.resolution['arcsec'] ** 2
zoomed_val = zoomed_val / size
if grid_plot is not None:
f.zoom_grid(self.name)
# debugging comment
logger.debug('total val after zoom : ' + str('%1.4e' % np.sum(zoomed_val)) + ' ' + str(self.units))
# return SimulateCube
i = SyntheticImage(self)
i.val = zoomed_val
i.stage = stage
i.log.append(i.stage)
i.FOV = (f.len_nx / f.len_nrx * self.FOV[0], f.len_ny / f.len_nry * self.FOV[1])
return i
def central_pixel(self, dx, dy):
'''
Move array right and up to create a central pixel.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: central_pixel'
# match resolution of psf and val slice
ce = central(array=self.val, dx=dx, dy=dy)
len_x_old = float(self.pixel[0])
len_x_new = float(len(ce[:,0]))
len_y_old = float(self.pixel[1])
len_y_new = float(len(ce[0,:]))
old_FOV = self.FOV
new_FOV = (len_x_new / len_x_old * old_FOV[0], len_y_new / len_y_old * old_FOV[1])
# return SimulateCube
i = SyntheticImage(self)
i.val = ce
i.stage = stage
i.log.append(i.stage)
i.FOV = new_FOV
return i
def convolve_psf(self, psf):
'''
Convolves the val image with a PSF of choice.
Parameters
----------
psf : GaussianPSF, FilePSF, database, FunctionPSF
* GaussianPSF(self, diameter): Convolves val with Gaussian PSF.
* FilePSF(self, psf_file, condensed) : Reads PSF from input file.
* database : object
If PSF ``name_PSF`` from FluxCompensator database is used.
* FunctionPSF(self, psf_function, width): Convolves defined PSF.
2D val image of SyntheticImage.val convolved with PSF.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: convolve_PSF'
# debugging comments
if isinstance(psf, GaussianPSF):
logger.debug('-' * 70)
logger.debug(stage + 'with GaussianPSF')
logger.debug('-' * 70)
# convolve val with classes GaussianPSF, FilePSF and FunctionPSF
val = psf.convolve(wav=self.wav, array=self.val, resolution=self.resolution)
# return SyntheticImage
i = SyntheticImage(self)
i.stage = stage
i.log.append(i.stage)
i.val = np.array(val)
return i
def add_noise(self, mu_noise, sigma_noise, seed=None, diagnostics=None):
'''
Adds normal distributed noise to the val image of SyntheticImage.
Parameters
----------
mu_noise : float
Mean of the normal distribution.
Good choice: mu_noise = 0.
sigma_noise : float
Standard deviation of the normal distribution.
Good choice arround:
* ``'ergs/cm^2/s'`` : sigma_noise = 10.**(-13)
* ``'ergs/cm^2/s/Hz'`` : sigma_noise = 10.**(-26)
* ``'Jy'`` : sigma_noise = 10.**(-3)
* ``'mJy'`` : sigma_noise = 10.**(-1)
* ``'MJy/sr'`` : sigma_noise = 10.**(-10)
seed : float, ``None``
When float seed fixes the random numbers to a certain sequence in order to create reproducible results.
Default is ``None``.
diagnostics : truetype
When ``True`` noise array is stored in a fits file.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: add_noise'
if sigma_noise != 0. and sigma_noise != 0:
if seed is not None:
np.random.seed(seed=seed)
noise = normal(mu_noise, sigma_noise, self.pixel)
if sigma_noise == 0. or sigma_noise == 0:
noise = np.zeros(self.pixel)
# Get noise.fits file
if diagnostics is True:
fits.writeto(self.name + '_' + 'process-output_SI-noise.fits', noise, clobber=True)
# add noise if val is already collapsed (x, y)
val = self.val.copy() + noise
# return SyntheticImage
i = SyntheticImage(self)
i.stage = stage
i.log.append(i.stage)
i.val = np.array(val)
return i
def get_total_val(self):
'''
Collapses the val image of SyntheticImage into a 0D val array.
Returns
-------
flux : SyntheticFlux
'''
stage = 'SyntheticImage: get_total_val'
if self.unit_out == 'MJy/sr' or self.unit_out == 'Jy/arcsec^2':
s = ConvertUnits(wav=self.wav, val=self.val)
val = s.get_unit(in_units=self.units, out_units='Jy', input_resolution=self.resolution['arcsec'])
else: val = self.val
# collapse 2D image to a single scalar val
total_val = np.sum(val)
if self.unit_out == 'MJy/sr' or self.unit_out == 'Jy/arcsec^2':
s = ConvertUnits(wav=self.wav, val=total_val)
total_val = s.get_unit(in_units='Jy', out_units=self.unit_out, input_resolution=self.resolution['arcsec'] * self.pixel[0])
# return SyntheticFlux
from .flux import SyntheticFlux
f = SyntheticFlux(self)
f.log.append(stage)
f.stage = 'SyntheticFlux: initial'
f.log.append(f.stage)
f.val = np.array(total_val)
return f
def plot_image(self, prefix=None, name=None, multi_cut=None, single_cut=None, set_cut=None, dpi=None):
'''
Plots the val image of SyntheticImage. The wavelength interval
around the central wavelength labels the plot.
Parameters
----------
prefix : str
Name of the image. Default naming chain is switched off.
name : str
Name of image within the default naming chain to distinguish the
plot files. E. g. 'PSF_gaussian'
mulit_cut : ``True``, ``None``
* ``True`` : plots chosen image slice at cuts of [100, 99, 95, 90]%.
* ``None`` : no mulit-plot is returned.
Default is ``None``.
single_cut : float [0,100], ``None``
* float : cut level for single plot of image slice.
* ``None`` : no single plot is returned.
set_cut : tuple, ``None``
* tuple : set_cut(v_min, v_max)
Minimal and maximal physical val presented in the colorbars.
* ``None`` : no plot with minimal and maximal cut is returned.
Default is ``None``.
dpi : ``None``, scalar > 0
The resolution in dots per inch.
``None`` is default and will use the val savefig.dpi
in the matplotlibrc file.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: plot_image'
if prefix is None and name is None:
raise Exception('If prefix name is not given, you need to give the a name to enable the default naming chain.')
if prefix is not None:
if multi_cut is True and (single_cut is not None or set_cut is not None):
raise Exception('If prefix naming is enabled only one plotting option can be chosen.')
elif multi_cut is None and (single_cut is not None and set_cut is not None):
raise Exception('If prefix naming is enabled only one plotting option can be chosen.')
plot = MakePlots(prefix=prefix, name=name, input_array=SyntheticImage(self), multi_cut=multi_cut, single_cut=single_cut, set_cut=set_cut, dpi=dpi)
# return SyntheticImage
i = SyntheticImage(self)
i.stage = stage
i.log.append(i.stage)
return i
def add_to_observation(self, fits_file, name, position_pix=None, position_world=None, zero_edges=None):
'''
Blends the modeled realistic synthetic observation to a real observation in a fits file.
Parameters
----------
fits_file : str
fits_file of the observation.
name : str
Name of the output fits file.
position_pix : list, ``None``
Center position of the model in observation pixel coordinates.
Default is ``None``.
position_world : list, ``None``
Center position of the model in observation world coordinates.
Default is ``None``.
zero_edges : ``True``, ``None``
If ``True`` edges of model are normalized to zero.
Default is ``None``.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: add_to_observation'
# world coordinates from fits_file
w = WCS(fits_file)
if position_world is None and position_pix is None:
raise Exception('WARNING: Position of model center needs to be given either in world or pixel coordinates.')
if position_pix is not None:
pos = position_pix
p_x_pos, p_y_pos = pos[0], pos[1]
else:
pos = position_world
p_x_pos, p_y_pos = w.wcs_world2pix(pos[0], pos[1], 1)
# center position in pixel and adjust position in current grid
x_round = np.round(p_x_pos, 0)
x_int = int(p_x_pos)
y_round = np.round(p_y_pos, 0)
y_int = int(p_y_pos)
# even or odd
if len(self.val[0]) % 2 == 0 and len(self.val[1]) % 2 == 0:
pos = np.array([x_round, y_round])
else:
if x_int == int(x_round):
if y_int == int(y_round):
pos = np.array([x_round + 0.5, y_round + 0.5])
else:
pos = np.array([x_round + 0.5, y_round - 0.5])
else:
if y_int == int(y_round):
pos = np.array([x_round - 0.5, y_round + 0.5])
else:
pos = np.array([x_round - 0.5, y_round - 0.5])
# limits of model in observation
start_x = pos[0] - len(self.val[0]) / 2.
stop_x = pos[0] + len(self.val[0]) / 2.
start_y = pos[1] - len(self.val[1]) / 2.
stop_y = pos[1] + len(self.val[1]) / 2.
# normalized that edges are zero
if zero_edges is True:
model = self.val.copy() - np.min(self.val)
else:
model = self.val.copy()
# open fits_file
hdulist = fits.open(fits_file)
hdu = hdulist[0]
header = hdu.header
if np.allclose(np.abs(header['CDELT1'] * 3600), self.resolution['arcsec']) is not True:
raise Exception('WARNING: make sure that resolution of observation and model are the same! E. g. change resolution of FC_object first.')
image = hdu.data
# add model to observation
image[start_y:stop_y, start_x:stop_x] = image[start_y:stop_y, start_x:stop_x] + model
# store to name.fits file
fits.writeto(name + '.fits', image, clobber=True)
# return SyntheticImage
i = SyntheticImage(self)
i.stage = stage
i.log.append(i.stage)
return i
def add_field_stars(self, extinction_map, database=None, star_file=None, seed=None, ISMextinction=None):
'''
Adds field stars to synthetic image.
Parameters
----------
extinction_map : object
Created with ``fluxcompensator.utils.fieldstars.extract_extinction_map``.
database : dict, ``None``
Dictionary sets the parameters for field stars loaded for the respective
band from the built-in database.
dict = {'number':200, 'distance_range':[3*kpc, 50*kpc], 'ground': 0.02}
The dictionary is structured as follows:
* ``'number'`` : int in [0,288]
* ``'distance_range'`` : list
Distance lower and upper limit in units of cm
* ``'ground'`` : str, float
Distribution of stars before (``'foreground'``) or behind (``'background'``) the synthetic object.
When ``'ground'`` is a ``float`` in the limits of [0,1] then this is the fraction of foreground stars.
Default is ``None``.
star_file : str, ``None``
To load individual file with field stars in the format of (distance[pc], mag[band]).
Default is ``None``.
seed : int, ``None``
To create reproducible results for the positions of field stars.
Default is ``None``.
ISMextinction : float, ``None``
Optical extinction A_V along the line of sight in units mag/kpc.
Default is ``None``.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticImage: add_field_stars'
# make sure resolution and PSF was not applied before
if 'SyntheticImage: convolve_PSF' in self.log or 'SyntheticCube: convolve_PSF' in self.log \
or 'SyntheticImage: change_resolution' in self.log \
or 'SyntheticCube: change_resolution' in self.log:
raise Exception('WARNING: Adding field stars should happen before changing resolution or convolution with PSF.')
# make sure that filter was applied before
if 'SyntheticCube: convolve_filter' not in self.log:
raise Exception('WARNING: Image must be convolved with the transmission of a detector.')
if extinction_map.shape != self.val.shape:
raise Exception('WARNING: Extinction map and val of SyntheticImage do not have the same dimension.')
# load file or give parameters to read from database
if database is None and star_file is None:
raise Exception('WARNING: Either database or star_file need to be different from None.')
# read from database
if database is not None:
from utils.fieldstars import get_stars_from_database
mag, star_distance = get_stars_from_database(band=self.filter['name'], number=database['number'],
distance_range=database['distance_range'], ground=database['ground'],
object_distance=self.distance, seed=seed)
# read field star data from file, distance in pc, mag in magnitudes
pc = 3.08568025e+18
if database is None and star_file is not None:
print 'CAUTION: only stars in the same band as the image should be loaded.'
print 'CAUTION: units of distance is in pc, stellar photometry in mag.'
f = np.loadtxt(star_file)
star_distance = f[:,0] * pc # pc>cm
mag = f[:,1]
# ensure that random numbers are the same every time
if seed is not None:
np.random.seed(seed)
#print star_distance - self.distance
# extinction from extinction map for objects
x = np.random.uniform(0, self.pixel[0], len(mag)).astype('int')
y = np.random.uniform(0, self.pixel[1], len(mag)).astype('int')
A_obj = extinction_map[x,y]
# convert to from A_v to A_filter
print 'CAUTION: Extinction law from Kim et al. is used.'
wav_ext, k_lam = np.loadtxt(ROOT + 'database/extinction/extinction_law.txt', unpack=True)
k_v = np.interp(0.550, wav_ext, k_lam)
k = np.interp(self.wav, wav_ext, k_lam)
A_filter = A_obj * (k / k_v)
MAG_ext = np.where([star_distance[i] >= self.distance for i in range(len(star_distance))], mag + A_filter, mag)
if ISMextinction is not None:
ISM_extinction_filter = ISMextinction * (k / k_v)
MAG_ext_ISM = np.where([star_distance[i] >= self.distance for i in range(len(star_distance))], MAG_ext + ISM_extinction_filter * star_distance/(1e3 * pc), MAG_ext)
#print mag[10], MAG_ext[10], MAG_ext_ISM[10]
MAG_ext = MAG_ext_ISM
# zero-point
import database.missions as filters
zero_point = getattr(filters, self.filter['name'] + '_ZERO')
wav_1D = np.ones(np.shape(MAG_ext))*self.wav
# converting mag to flux
flux = ConvertUnits(wav=wav_1D, val=MAG_ext)
if self.units == 'MJy/sr' or self.units == 'Jy/arcsec^2':
starflux = flux.get_unit(in_units='mag', out_units=self.units, zero_point=zero_point, input_resolution=self.resolution['arcsec'])
else:
starflux = flux.get_unit(in_units='mag', out_units=self.units, zero_point=zero_point)
# position of star on image
add_stellar_flux = self.val.copy()
for i in range(len(starflux)):
add_stellar_flux[x[i],y[i]] = add_stellar_flux[x[i],y[i]] + starflux[i]
# return SyntheticImage
i = SyntheticImage(self)
i.val = add_stellar_flux
i.stage = stage
i.log.append(i.stage)
return i
@property
def spacing_wav(self):
'''
The property spacing_wav estimates the width of the logarithmic
spaced wav entries.
'''
if self.wav.ndim != 0:
spacing_wav = np.log10(self.wav[0] / self.wav[-1]) / (len(self.wav) - 1)
else:
spacing_wav = None
return spacing_wav
@property
def pixel(self):
'''
The property pixel is a tuple which resembles the current pixel in a
val slice. ``pixel(x,y)`` are calls as follows:
``x = pixel[0]``
``y = pixel[1]``
'''
if self.val.ndim in (0, 1):
pixel = (None, None)
if self.val.ndim in (2, 3):
pixel = (self.val.shape[0], self.val.shape[1])
return pixel
@property
def shape(self):
'''
The property shape is a string, which resembles the current shape of
the val array.
scalar: ``'()'``
1D: ``'(wav)'``
2D: ``'(x, y)'``
3D: ``'(x, y , wav)'``
'''
if self.val.ndim == 0:
shape = '()'
if self.val.ndim == 1:
shape = '(wav)'
if self.val.ndim == 2:
shape = '(x, y)'
if self.val.ndim == 3:
shape = '(x, y, wav)'
return shape
@property
def resolution(self):
'''
The property resolution tells you the current resolution. If we are already
in the SED or val dimension everything is considered as one large pixel.
resolution in arcsec per pixel : ``resolution['arcsec']``
resolution in rad per pixel : ``resolution['rad']``
'''
resolution = {}
if self.pixel[0] is None:
resolution['rad'] = self.FOV[0] / 1. / self.distance
else:
resolution['rad'] = self.FOV[0] / self.pixel[0] / self.distance
resolution['arcsec'] = np.degrees(resolution['rad']) * 3600
return resolution
| 33.9547 | 175 | 0.550797 | 3,702 | 29,982 | 4.336845 | 0.132631 | 0.026783 | 0.011336 | 0.003986 | 0.285332 | 0.233074 | 0.189038 | 0.17085 | 0.143071 | 0.122641 | 0 | 0.011867 | 0.347942 | 29,982 | 882 | 176 | 33.993197 | 0.809361 | 0.069008 | 0 | 0.227723 | 0 | 0.006601 | 0.115414 | 0.006527 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.052805 | null | null | 0.009901 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2c813723c1876d28ad71bddb2cb82d4afe3cc1c | 435 | py | Python | generator.py | madkira/SCXML_to_FSM_for_Arduino | 2e2443eca70ff6d5a8b8ac8c32b0c7e1d4440201 | [
"MIT"
] | 1 | 2020-05-13T23:03:19.000Z | 2020-05-13T23:03:19.000Z | generator.py | madkira/SCXML_to_FSM_for_Arduino | 2e2443eca70ff6d5a8b8ac8c32b0c7e1d4440201 | [
"MIT"
] | null | null | null | generator.py | madkira/SCXML_to_FSM_for_Arduino | 2e2443eca70ff6d5a8b8ac8c32b0c7e1d4440201 | [
"MIT"
] | 1 | 2019-01-20T12:46:37.000Z | 2019-01-20T12:46:37.000Z | #!/usr/bin/python
import argparse
from src.SCXML_Parser.Scxml_parsor import Scxml_parsor
from src.arduino_helper.generate_fsm import generate_fsm
parser = argparse.ArgumentParser()
parser.add_argument('-f', action='store', dest='file', type=str, required=False, default="fsm.xml")
inargs = parser.parse_args()
print ("Beginning of the arduino fsm generator")
parser = Scxml_parsor(inargs.file)
generate_fsm(parser)
print("End") | 22.894737 | 99 | 0.77931 | 62 | 435 | 5.306452 | 0.596774 | 0.100304 | 0.103343 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096552 | 435 | 19 | 100 | 22.894737 | 0.83715 | 0.036782 | 0 | 0 | 1 | 0 | 0.140811 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.3 | 0 | 0.3 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2c87ee36a16287beb7f717f937785ecfe37b5d0 | 769 | py | Python | data/external/repositories/145085/kaggle_Microsoft_Malware-master/kaggle_Microsoft_malware_full/rebuild_code.py | Keesiu/meta-kaggle | 87de739aba2399fd31072ee81b391f9b7a63f540 | [
"MIT"
] | 1 | 2015-11-08T05:19:43.000Z | 2015-11-08T05:19:43.000Z | microsoft malware/Malware_Say_No_To_Overfitting/kaggle_Microsoft_malware_small/rebuild_code.py | bikash/kaggleCompetition | c168f5a713305f6cf6ef41db60d8b1f4cdceb2b1 | [
"Apache-2.0"
] | null | null | null | microsoft malware/Malware_Say_No_To_Overfitting/kaggle_Microsoft_malware_small/rebuild_code.py | bikash/kaggleCompetition | c168f5a713305f6cf6ef41db60d8b1f4cdceb2b1 | [
"Apache-2.0"
] | 1 | 2019-12-04T08:23:33.000Z | 2019-12-04T08:23:33.000Z | import os,array
import pickle
import numpy as np
import sys
xid=pickle.load(open(sys.argv[1]))
asm_code_path=sys.argv[2]
train_or_test=asm_code_path.split('_')[-1]
X = np.zeros((len(xid),2000))
for cc,i in enumerate(xid):
f=open(asm_code_path+'/'+i+'.asm')
ln = os.path.getsize(asm_code_path+'/'+i+'.asm') # length of file in bytes
width = int(ln**0.5)
rem = ln%width
a = array.array("B") # uint8 array
a.fromfile(f,ln-rem)
f.close()
a=np.array(a)
#im = Image.open('asmimage/'+i+'.png')
a.resize((2000,))
#im1 = im.resize((64,64),Image.ANTIALIAS); # for faster computation
#des = leargist.color_gist(im1)
X[cc] = a#[0,:1000] #des[0:320]
print cc*1.0/len(xid)
pickle.dump(X,open('Xcode_'+train_or_test+'.p','w'))
| 29.576923 | 78 | 0.63329 | 138 | 769 | 3.42029 | 0.492754 | 0.059322 | 0.09322 | 0.050847 | 0.063559 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047988 | 0.159948 | 769 | 25 | 79 | 30.76 | 0.682663 | 0.243173 | 0 | 0 | 0 | 0 | 0.036649 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.190476 | null | null | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2cd02aade65d0b6969f2b1d510da3c44e2f7198 | 563 | py | Python | rabbitmq/python/topic_producer.py | alovn/tutorials | 84f9c5fc563e042eeff9ffa4bce4eaae0fcc6e9a | [
"MIT"
] | 7 | 2019-12-20T12:37:37.000Z | 2021-12-15T08:42:10.000Z | rabbitmq/python/topic_producer.py | alovn/tutorials | 84f9c5fc563e042eeff9ffa4bce4eaae0fcc6e9a | [
"MIT"
] | null | null | null | rabbitmq/python/topic_producer.py | alovn/tutorials | 84f9c5fc563e042eeff9ffa4bce4eaae0fcc6e9a | [
"MIT"
] | 1 | 2021-12-15T08:44:55.000Z | 2021-12-15T08:44:55.000Z | # encoding:utf-8
import pika
import time
credentials = pika.PlainCredentials('guest', 'guest')
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='s1004.lab.org',
port=5672,
virtual_host='/',
credentials=credentials))
channel = connection.channel()
channel.exchange_declare(exchange='topic_logs', type='topic')
message = 'Hello, World!'
channel.basic_publish(exchange='topic_logs',
routing_key='topic.logs.info',
body=message)
print " [x] Sent %r" % (message,)
connection.close() | 24.478261 | 63 | 0.680284 | 61 | 563 | 6.180328 | 0.639344 | 0.071618 | 0.090186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019523 | 0.181172 | 563 | 23 | 64 | 24.478261 | 0.798265 | 0.024867 | 0 | 0 | 0 | 0 | 0.162409 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.125 | null | null | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2cd268e8522aa01fa610bfaf6b0ddd0d937eb64 | 295 | py | Python | settings_default.py | iticus/photomap | 46ce664412bd44d5bcd6292b04191cacbee7c446 | [
"MIT"
] | null | null | null | settings_default.py | iticus/photomap | 46ce664412bd44d5bcd6292b04191cacbee7c446 | [
"MIT"
] | 2 | 2015-11-19T21:37:01.000Z | 2015-11-25T22:37:45.000Z | settings_default.py | iticus/photomap | 46ce664412bd44d5bcd6292b04191cacbee7c446 | [
"MIT"
] | null | null | null | """
Created on Nov 1, 2015
@author: ionut
"""
import logging
DEBUG = False
LOG_LEVEL = logging.INFO
DSN = "dbname=photomap user=postgres password=pwd host=127.0.0.1 port=5432"
TEMPLATE_PATH = "templates"
STATIC_PATH = "static"
MEDIA_PATH = "/home/ionut/nginx/media"
SECRET = "some_secret"
| 15.526316 | 75 | 0.725424 | 44 | 295 | 4.75 | 0.795455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.059289 | 0.142373 | 295 | 18 | 76 | 16.388889 | 0.766798 | 0.128814 | 0 | 0 | 0 | 0.125 | 0.465863 | 0.092369 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.125 | 0.125 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
e2cfc3393806f7bb4f40dc3f9cc091f1aa70db37 | 387 | py | Python | exercicios/ex 061 a 070/ex063.py | CarlosWillian/python | f863578245fbf402e5b46f844a247355afed0d62 | [
"MIT"
] | null | null | null | exercicios/ex 061 a 070/ex063.py | CarlosWillian/python | f863578245fbf402e5b46f844a247355afed0d62 | [
"MIT"
] | null | null | null | exercicios/ex 061 a 070/ex063.py | CarlosWillian/python | f863578245fbf402e5b46f844a247355afed0d62 | [
"MIT"
] | null | null | null | print('Sequência de Fibonacci')
print('='*24)
t = int(input('Número de termos da sequência: '))
print('='*24)
c = 3
termo1 = 0
termo2 = 1
print('A sequência é ({}, {}, '.format(termo1, termo2), end='')
while c <= t:
termo3 = termo1 + termo2
print('{}'.format(termo3), end='')
print(', ' if c < t else '', end='')
c += 1
termo1 = termo2
termo2 = termo3
print(')')
| 22.764706 | 63 | 0.563307 | 53 | 387 | 4.113208 | 0.471698 | 0.165138 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066007 | 0.217054 | 387 | 16 | 64 | 24.1875 | 0.653465 | 0 | 0 | 0.125 | 0 | 0 | 0.21447 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.4375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
e2d0873043c167f2f68be47cd5ad16d9ad3d23a9 | 469 | py | Python | myadsp/emails.py | kelockhart/myADSPipeline | 21c453a6d7c35d7ce019a71854010fb80b1bc56f | [
"MIT"
] | null | null | null | myadsp/emails.py | kelockhart/myADSPipeline | 21c453a6d7c35d7ce019a71854010fb80b1bc56f | [
"MIT"
] | null | null | null | myadsp/emails.py | kelockhart/myADSPipeline | 21c453a6d7c35d7ce019a71854010fb80b1bc56f | [
"MIT"
] | null | null | null | """email templates"""
from builtins import object
class Email(object):
"""
Data structure that contains email content data
"""
msg_plain = ''
msg_html = ''
subject = u''
salt = ''
class myADSTemplate(Email):
"""
myADS email template
"""
msg_plain = """
SAO/NASA ADS: myADS Personal Notification Service Results
{payload}
"""
msg_html = """{payload}"""
subject = u'myADS Notification'
| 18.038462 | 65 | 0.575693 | 47 | 469 | 5.659574 | 0.617021 | 0.06015 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.298507 | 469 | 25 | 66 | 18.76 | 0.808511 | 0.179104 | 0 | 0 | 0 | 0 | 0.358166 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.769231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e2d1181ba43764099ea9ef3959a87a0948ac70c3 | 2,645 | py | Python | ext/app/decorators.py | FNLF/fnlf-backend | 060d675d7cf8d0eff46af6eb4be7035b8cd68d36 | [
"MIT"
] | 1 | 2015-01-14T22:08:27.000Z | 2015-01-14T22:08:27.000Z | ext/app/decorators.py | FNLF/fnlf-backend | 060d675d7cf8d0eff46af6eb4be7035b8cd68d36 | [
"MIT"
] | 103 | 2015-01-08T13:45:38.000Z | 2022-01-13T00:38:39.000Z | ext/app/decorators.py | FNLF/fnlf-backend | 060d675d7cf8d0eff46af6eb4be7035b8cd68d36 | [
"MIT"
] | null | null | null | """
Custom decorators
=================
Custom decorators for various tasks and to bridge Flask with Eve
"""
from flask import current_app as app, request, Response, abort
from functools import wraps
from ext.auth.tokenauth import TokenAuth
from ext.auth.helpers import Helpers
# Because of circular import in context
from ext.app.eve_helper import eve_abort
class AuthenticationFailed(Exception):
"""Raise custom error"""
class AuthenticationNoToken(Exception):
"""Raise custom error"""
def require_token(allowed_roles=None):
""" Custom decorator for token auth
Wraps the custom TokenAuth class used by Eve and sends it the required param
"""
def decorator(f):
@wraps(f)
def wrapped(*args, **kwargs):
try:
# print(request.headers.get('User-Agent'))
# No authorization in request
# Let it raise an exception
try:
authorization_token = request.authorization.get('username', None)
except Exception as e:
raise AuthenticationFailed
# Do the authentication
# Need to remove prefix + / for request.path
auth = TokenAuth()
auth_result = auth.check_auth(token=authorization_token, # Token
method=request.method,
resource=request.path[len(app.globals.get('prefix')) + 1:],
allowed_roles=allowed_roles)
if auth_result is not True:
raise AuthenticationFailed
# Catch exceptions and handle correctly
except AuthenticationFailed:
eve_abort(401, 'Please provide proper credentials')
except Exception as e:
eve_abort(500, 'Server error')
return f(*args, **kwargs)
return wrapped
return decorator
def require_superadmin():
"""Require user to be in a group of hardcoded user id's
Should use Helpers then get administrators
@TODO: use a switch for ref [superadmin, admin,..]?
@TODO: in ext.auth.helpers define a get_users_in_roles_by_ref(ref)?
"""
def decorator(f):
@wraps(f)
def wrapped(*args, **kwargs):
h = Helpers()
if int(app.globals['user_id']) not in h.get_superadmins(): # [99999]: # # #
eve_abort(401, 'You do not have sufficient privileges')
return f(*args, **kwargs)
return wrapped
return decorator
| 30.402299 | 105 | 0.579206 | 288 | 2,645 | 5.239583 | 0.423611 | 0.021206 | 0.014579 | 0.033135 | 0.111332 | 0.111332 | 0.111332 | 0.111332 | 0.05169 | 0 | 0 | 0.008552 | 0.336862 | 2,645 | 86 | 106 | 30.755814 | 0.851767 | 0.270699 | 0 | 0.45 | 0 | 0 | 0.056009 | 0 | 0 | 0 | 0 | 0.023256 | 0 | 1 | 0.15 | false | 0 | 0.125 | 0 | 0.475 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2e63f122a8263e057c7c5f1b88e244fdf783447 | 5,948 | py | Python | test_action40.py | gmayday1997/pytorch_CAM | c51a0c7f7701005b8f031ed9a0f9b3b9680cf560 | [
"MIT"
] | 23 | 2018-02-13T00:50:11.000Z | 2021-02-04T01:49:34.000Z | test_action40.py | gmayday1997/pytorch-CAM | c51a0c7f7701005b8f031ed9a0f9b3b9680cf560 | [
"MIT"
] | null | null | null | test_action40.py | gmayday1997/pytorch-CAM | c51a0c7f7701005b8f031ed9a0f9b3b9680cf560 | [
"MIT"
] | 5 | 2017-12-19T10:48:22.000Z | 2021-02-04T01:49:35.000Z | import os
import numpy as np
import torch
import torch.nn as nn
import torchvision
import torch.utils.data as Data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
from torch.autograd import Variable
from torch.nn import functional as F
from action40_config import config
import vgg16_model as models
import utils as utils
import fold as imgfolder
import transforms as trans
import shutil
import cv2
import json
import matplotlib.pyplot as plt
import collections
configs = config()
resume = 1
def parse_json(file_path):
import json
json_file = file(file_path)
j = json.load(json_file)
return j
######## source code from offical code ###############
def returnCAM(feature_conv, weight_softmax, class_idx,probs):
# generate the class activation maps upsample to 256x256
top_number = len(class_idx)
size_upsample = (256, 256)
bz, nc, h, w = feature_conv.shape
output_cam = {}
output_cam_imgs = []
output_cam_prob = {}
#out = collections.OrderedDict()
for idx,prob in zip(class_idx,probs):
cam = weight_softmax[idx].dot(feature_conv.reshape((nc, h*w)))
cam = cam.reshape(h, w)
cam = cam - np.min(cam)
cam_img = cam / np.max(cam)
cam_img = np.uint8(255 * cam_img)
#out.setdefault(str(idx),[cv2.resize(cam_img, size_upsample),prob])
output_cam.setdefault(idx,[cv2.resize(cam_img, size_upsample),prob])
output_cam_prob.setdefault(prob,cv2.resize(cam_img,size_upsample))
output_cam_imgs.append(cv2.resize(cam_img,size_upsample))
return output_cam_imgs
def untransform(transform_img):
transform_img = transform_img.transpose(1,2,0)
transform_img *= [0.229, 0.224, 0.225]
transform_img += [0.4001, 0.4401, 0.4687]
transform_img = transform_img * 255
transform_img = transform_img.astype(np.uint8)
transform_img = transform_img[:,:,::-1]
return transform_img
def test(net, testloader):
net.eval()
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(testloader):
inputs, targets = inputs.cuda(), targets.cuda()
inputs, targets = Variable(inputs, volatile=True), Variable(targets)
outputs, _ = net(inputs)
_, predicted = torch.max(outputs.data, 1)
total += targets.size(0)
correct += predicted.eq(targets.data).cpu().sum()
'''''''''
progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)'
% (test_loss/(batch_idx+1), 100.*correct/total, correct, total))
'''''''''
print(100.* correct/total)
return 100.*correct/total
def main():
######## load training data ########
######### action 40 ############
normalize = trans.Normalize(mean=[0.4001, 0.4401, 0.4687],
std=[0.229, 0.224, 0.225])
transform = trans.Compose([
trans.Scale((224,224)),
trans.ToTensor(),
normalize,
])
test_data = imgfolder.ImageFolder(os.path.join(configs.data_dir,'img/test'),transform=transform)
test_loader = Data.DataLoader(test_data,batch_size=configs.batch_size,
shuffle= False, num_workers= 4, pin_memory= True)
classes = {int(key): value for (key, value)
in parse_json(configs.class_info_dir).items()}
######### build vgg model ##########
vgg_cam = models.vgg_cam()
vgg_cam = vgg_cam.cuda()
checkpoint = torch.load(configs.best_ckpt_dir)
vgg_cam.load_state_dict(checkpoint['state_dict'])
# hook the feature extractor
features_blobs = []
def hook_feature(module, input, output):
features_blobs.append(output.data.cpu().numpy())
finalconv_name = 'classifier' # this is the last conv layer of the network
vgg_cam._modules.get(finalconv_name).register_forward_hook(hook_feature)
# get the softmax weight
params = list(vgg_cam.parameters())
weight_softmax = np.squeeze(params[-1].data.cpu().numpy())
save_cam_dir = os.path.join(configs.py_dir,'predict')
if not os.path.exists(save_cam_dir):
os.mkdir(save_cam_dir)
top_number = 5
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(test_loader):
inputs, targets = inputs.cuda(), targets.cuda()
transformed_img = inputs.cpu().numpy()[0]
target_name = classes[targets.cpu().numpy()[0]]
transformed_img = untransform(transformed_img)
inputs, targets = Variable(inputs, volatile=True), Variable(targets)
outputs, _ = vgg_cam(inputs)
_, predicted = torch.max(outputs.data, 1)
total += targets.size(0)
correct += predicted.eq(targets.data).cpu().sum()
h_x = F.softmax(outputs).data.squeeze()
probs, idx = h_x.sort(0, True)
prob = probs.cpu().numpy()[:top_number]
idx_ = idx.cpu().numpy()[:top_number]
OUT_CAM = returnCAM(features_blobs[-1],weight_softmax,idx_,prob)
save_fig_dir = os.path.join(save_cam_dir, 'cam_' + str(batch_idx) + '.jpg')
plt.figure(1, figsize=(8, 6))
ax = plt.subplot(231)
img1 = transformed_img[:, :, (2, 1, 0)]
ax.set_title(('{}').format(target_name),fontsize=14)
ax.imshow(img1)
for b_index, (idx,prob_in,cam) in enumerate(zip(idx_,prob,OUT_CAM)):
cl = str(classes[idx])
#save_fig_dir1 = os.path.join(save_cam_dir, 'cam_cv_' + str(batch_idx) + '_' + cl + '.jpg')
height, width, _ = transformed_img.shape
heatmap = cv2.applyColorMap(cv2.resize(cam, (width, height)), cv2.COLORMAP_JET)
result = heatmap * 0.3 + transformed_img * 0.7
ax = plt.subplot(2,3,b_index+2)
ax.imshow(result.astype(np.uint8)[:,:,(2,1,0)])
ax.set_title(('{}:{}').format(cl,('%.3f' % prob_in)), fontsize=8)
plt.savefig(save_fig_dir)
print batch_idx
print(100.* correct/total)
if __name__ == '__main__':
main()
| 33.41573 | 102 | 0.643746 | 809 | 5,948 | 4.538937 | 0.283066 | 0.039216 | 0.01634 | 0.03268 | 0.206427 | 0.206427 | 0.165033 | 0.130719 | 0.130719 | 0.097495 | 0 | 0.033319 | 0.212845 | 5,948 | 177 | 103 | 33.60452 | 0.750961 | 0.069771 | 0 | 0.135338 | 0 | 0.007519 | 0.042647 | 0.008824 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.157895 | null | null | 0.022556 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2f1d37bd8721b99e8bd17fdefb5d3f548a12c16 | 538 | py | Python | retuo.py | Azi-Dahaka/- | 8d47d8e18a4b4fcfee4d2649c8efa819d4cd357e | [
"MIT"
] | 1 | 2021-11-25T03:28:30.000Z | 2021-11-25T03:28:30.000Z | retuo.py | Azi-Dahaka/- | 8d47d8e18a4b4fcfee4d2649c8efa819d4cd357e | [
"MIT"
] | null | null | null | retuo.py | Azi-Dahaka/- | 8d47d8e18a4b4fcfee4d2649c8efa819d4cd357e | [
"MIT"
] | 2 | 2021-09-06T07:41:48.000Z | 2021-11-25T09:28:07.000Z | # -*- coding:utf-8 -*-
# 1.导入拓展
from flask import Flask
from flask_restful import Api
import config
from app.api.view.auth import wx_login
from app.api.view.talk import Reply
# 2.创建flask应用实例,__name__用来确定资源所在的路径
app = Flask(__name__)
app.config.from_object(config.DevelopmentConfig)
api = Api(app)
# 3.定义全局变量
# 4.定义路由和视图函数
# 定义restful api
app.add_url_rule('/auth/wxlogin', view_func=wx_login.as_view('wxlogin'))
app.add_url_rule('/reply', view_func=Reply.as_view('reply'))
# 4.启动程序
if __name__ == '__main__':
app.run(debug=True)
| 20.692308 | 72 | 0.749071 | 86 | 538 | 4.360465 | 0.476744 | 0.048 | 0.053333 | 0.074667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012579 | 0.113383 | 538 | 25 | 73 | 21.52 | 0.773585 | 0.19145 | 0 | 0 | 0 | 0 | 0.091335 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.416667 | 0 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
e2f2a441790ca7a9000ae20c712a0f4467b4c1c4 | 1,334 | py | Python | Modulos/ProvasPassadas/aux_scraping.py | gabrielfava/asap | be6211190d4acfca7aacef45d7dc467e2237496d | [
"Apache-2.0"
] | 2 | 2018-03-16T19:24:35.000Z | 2018-03-20T01:15:21.000Z | Modulos/ProvasPassadas/aux_scraping.py | jvalv/asaPY | 97cdc9359d8afeb9747f4372b253b179131d2be4 | [
"Apache-2.0"
] | 1 | 2018-02-24T23:43:15.000Z | 2018-02-24T23:43:15.000Z | Modulos/ProvasPassadas/aux_scraping.py | gabrielfava/asapy | be6211190d4acfca7aacef45d7dc467e2237496d | [
"Apache-2.0"
] | 1 | 2018-02-28T14:45:52.000Z | 2018-02-28T14:45:52.000Z | #ASAPY
import requests
__URL_GLOBAL = "https://www.urionlinejudge.com.br";
def printme(pagina):
body = getCorpo(__URL_GLOBAL+"/judge/pt/problems/view/"+pagina);
iInicio = find_str(body, "<iframe");
pos = (body[iInicio:]);
iFim = find_str(pos, ">")+1;
tupla = pos[:iFim];
page2 = getAttr(tupla,"src");
bodyframe = getCorpo(__URL_GLOBAL+page2);
print(bodyframe);
return;
def find_str(s, char):
index = 0
if char in s:
c = char[0]
for ch in s:
if ch == c:
if s[index:index+len(char)] == char:
return index
index += 1
return -1
#TODO - TRATAR EQUIVALENCIA DE SINTAXE !
def getAttr(tupla, atributo):
tamanhoAtr = len(atributo)+2; #ja apaga atributo="
inicioAtr = find_str(tupla, atributo)+tamanhoAtr;
if inicioAtr == -1:
return "ERRO"
fimAttr = find_str(tupla[inicioAtr:], '"');
return tupla[inicioAtr:inicioAtr+fimAttr];
def getCorpo(req):
page = requests.get(req);
return str(page.content);
printme("2166")
#print("titulo => URI Online Judge - Problema 2166 - Raiz Quadrada de 2")
#print("autor => M.C. Pinto, UNILA")
#print("probm => ma das formas de calcular a raiz quadrada de um n\xc3\xbamero natural") | 27.22449 | 90 | 0.586957 | 167 | 1,334 | 4.60479 | 0.491018 | 0.045514 | 0.044213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019648 | 0.275112 | 1,334 | 49 | 90 | 27.22449 | 0.775595 | 0.196402 | 0 | 0 | 0 | 0 | 0.075564 | 0.023553 | 0 | 0 | 0 | 0.020408 | 0 | 1 | 0.121212 | false | 0 | 0.030303 | 0 | 0.30303 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2f315499e462d747fce1af2b55052eeb6910f0b | 2,911 | py | Python | toontown/safezone/DistributedButterflyAI.py | TrueBlueDogemon/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | 1 | 2021-02-25T06:22:49.000Z | 2021-02-25T06:22:49.000Z | toontown/safezone/DistributedButterflyAI.py | TrueBlueDogemon/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | null | null | null | toontown/safezone/DistributedButterflyAI.py | TrueBlueDogemon/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | 2 | 2020-11-08T03:38:35.000Z | 2021-09-02T07:03:47.000Z | from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectAI import DistributedObjectAI
from direct.distributed.ClockDelta import *
import ButterflyGlobals
import random
class DistributedButterflyAI(DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory("DistributedButterflyAI")
def __init__(self, air):
DistributedObjectAI.__init__(self, air)
self.area = 0
self.playground = 0
self.stateIndex = 0
self.curIndex = 0
self.destIndex = 0
self.time = 0
self.timestamp = 0
def generate(self):
ButterflyGlobals.generateIndexes(self.doId, self.playground)
fr = ButterflyGlobals.getFirstRoute(self.playground, self.area, self.doId)
self.b_setState(ButterflyGlobals.FLYING, fr[1], fr[3], fr[4], globalClockDelta.getRealNetworkTime())
taskMgr.doMethodLater(fr[4], self.__land, 'landButterfly%i' % self.doId, [])
def __land(self):
ttl = random.uniform(0, ButterflyGlobals.MAX_LANDED_TIME)
self.b_setState(ButterflyGlobals.LANDED, self.curIndex, self.destIndex, ttl, globalClockDelta.getRealNetworkTime())
taskMgr.doMethodLater(ttl, self.__fly, 'flyButterfly%i' % self.doId, [])
def __fly(self):
next = ButterflyGlobals.getNextPos(ButterflyGlobals.ButterflyPoints[self.playground][self.area][self.destIndex], self.playground, self.area, self.doId)
self.b_setState(ButterflyGlobals.FLYING, self.destIndex, next[1], next[2], globalClockDelta.getRealNetworkTime())
taskMgr.doMethodLater(next[2], self.__land, 'landButterfly%i' % self.doId, [])
def setArea(self, playground, area):
self.area = area
self.playground = playground
def d_setArea(self, playground, area):
self.sendUpdate('setArea', [playground, area])
def b_setArea(self, playground, area):
self.setArea(playground, area)
self.d_setArea(playground, area)
def getArea(self):
return [self.playground, self.area]
def setState(self, stateIndex, curIndex, destIndex, time, timestamp):
self.stateIndex = stateIndex
self.curIndex = curIndex
self.destIndex = destIndex
self.time = time
self.timestamp = timestamp
def d_setState(self, stateIndex, curIndex, destIndex, time, timestamp):
self.sendUpdate('setState', [stateIndex, curIndex, destIndex, time, timestamp])
def b_setState(self, stateIndex, curIndex, destIndex, time, timestamp):
self.setState(stateIndex, curIndex, destIndex, time, timestamp)
self.d_setState(stateIndex, curIndex, destIndex, time, timestamp)
def getState(self):
return [self.stateIndex, self.curIndex, self.destIndex, self.time, self.timestamp]
def avatarEnter(self):
pass
| 41 | 159 | 0.684988 | 300 | 2,911 | 6.55 | 0.206667 | 0.071247 | 0.082443 | 0.094656 | 0.32112 | 0.263613 | 0.23715 | 0.151654 | 0.066158 | 0.066158 | 0 | 0.006536 | 0.211611 | 2,911 | 70 | 160 | 41.585714 | 0.849673 | 0 | 0 | 0 | 0 | 0 | 0.027835 | 0.00756 | 0 | 0 | 0 | 0 | 0 | 1 | 0.240741 | false | 0.018519 | 0.092593 | 0.037037 | 0.407407 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e2f4dd0ee18ce021318531b4d9a81e9c00ac0a21 | 1,637 | py | Python | proj01_ifelse/proj01.py | CalvinsHyper/Vanderbilt-2018 | fa67c1f08f5d29bac4bd7747ec4a9110e5b3de00 | [
"MIT"
] | null | null | null | proj01_ifelse/proj01.py | CalvinsHyper/Vanderbilt-2018 | fa67c1f08f5d29bac4bd7747ec4a9110e5b3de00 | [
"MIT"
] | null | null | null | proj01_ifelse/proj01.py | CalvinsHyper/Vanderbilt-2018 | fa67c1f08f5d29bac4bd7747ec4a9110e5b3de00 | [
"MIT"
] | null | null | null | # Name:
# Date:
# proj01: A Simple Program
# Part I:
# This program asks the user for his/her name and grade.
#Then, it prints out a sentence that says the number of years until they graduate.
print "Hello"
Your_Name = raw_input("What's your name?")
print "Your name is "+ Your_Name
Your_Grade = raw_input("What Grade are you in?")
print "you are in"+ Your_Grade
x = 16-int(Your_Grade)
print "you wil graduate in" +str(x) + "Years"
# Part II:
# This program asks the user for his/her name and birth month.
# Then, it prints a sentence that says the number of days and months until their birthday
print "Part II"
Current_Month = int(raw_input("what is the current month NUMBER"))
Current_Day = int(raw_input("What is the current day NUMBER"))
Your_Month = int(raw_input("what is your birth month NUMBER?"))
Your_Day = int(raw_input("what day of the month is your Birthday NUMBER?"))
q = (Your_Month-Current_Month)
w = (12-Current_Month-Your_Month)
e = (Your_Day-Current_Day)
r = (30-Current_Day-Your_Day)
if Your_Month>Current_Month:
print "the number of days until your bday is " + str( q)
else:
print"the number of days until your bday is " + str( w)
if Your_Day >= Current_Day:
print "the number of months until your bday is" + str(e)
else:
print "The number of months until your birthday is" + str(r)
# If you complete extensions, describe your extensions here!
Your_Age=int(raw_input("how old are you"))
if Your_Age<13:
print ("you may only see G and PG movies")
if Your_Age>13:
print ("You can see any movies except for R movies")
if Your_Age>17:
print ("you can watch any rated movie")
| 29.232143 | 89 | 0.718998 | 291 | 1,637 | 3.934708 | 0.292096 | 0.048908 | 0.057642 | 0.052402 | 0.372926 | 0.339738 | 0.282969 | 0.132751 | 0.132751 | 0.132751 | 0 | 0.010479 | 0.183873 | 1,637 | 55 | 90 | 29.763636 | 0.846557 | 0.242517 | 0 | 0.064516 | 0 | 0 | 0.421657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.387097 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3900f50cc35a91f1b2e65d295de22b272d80a5f7 | 802 | py | Python | quizzes/mixins.py | NeedsSoySauce/testme | dfc11737779809c1df475e9224e753ca7117c981 | [
"MIT"
] | 1 | 2020-11-22T22:38:02.000Z | 2020-11-22T22:38:02.000Z | quizzes/mixins.py | NeedsSoySauce/testme | dfc11737779809c1df475e9224e753ca7117c981 | [
"MIT"
] | 3 | 2021-06-04T23:59:02.000Z | 2021-09-22T19:39:14.000Z | quizzes/mixins.py | NeedsSoySauce/testme | dfc11737779809c1df475e9224e753ca7117c981 | [
"MIT"
] | null | null | null | from rest_framework.mixins import CreateModelMixin
from rest_framework.viewsets import GenericViewSet
class CreateUserLinkedModelMixin(CreateModelMixin, GenericViewSet):
"""
Set the user related to an object being created to the user who made the request.
Usage:
Override the class and set the `.queryset` and `.serializer_class` attributes. Make sure to call the super
'perform_create' method if you override it. Set the USER_FIELD class attribute to the name of the model's user
field (default is 'creator').
"""
USER_FIELD = 'creator'
def perform_create(self, serializer):
save_kwargs = {}
if not self.request.user.is_anonymous:
save_kwargs[self.USER_FIELD] = self.request.user
serializer.save(**save_kwargs)
| 34.869565 | 118 | 0.714464 | 104 | 802 | 5.394231 | 0.509615 | 0.064171 | 0.060606 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.216958 | 802 | 22 | 119 | 36.454545 | 0.893312 | 0.435162 | 0 | 0 | 0 | 0 | 0.016627 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.222222 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
3909cb5cbf3d27aacd9d499216668e13a1241a5e | 3,779 | py | Python | serial/splitter.py | tf-czu/gyrorad | eb1c30a9715857a50631de170cecb443457c2752 | [
"MIT"
] | null | null | null | serial/splitter.py | tf-czu/gyrorad | eb1c30a9715857a50631de170cecb443457c2752 | [
"MIT"
] | null | null | null | serial/splitter.py | tf-czu/gyrorad | eb1c30a9715857a50631de170cecb443457c2752 | [
"MIT"
] | null | null | null | #!/usr/bin/python
"""
Split logged data into separate "channels"
usage:
./splitter.py <log file> <GPS|0..3|all>
"""
import sys
FIRST_LINE = "id,timeMs,accX,accY,accZ,temp,gyroX,gyroY,gyroZ\n"
GPS_SEPARATOR_BEGIN = chr(0x2)
GPS_SEPARATOR_END = chr(0x3)
def checksum( s ):
sum = 0
for ch in s:
sum ^= ord(ch)
return "%02X" % (sum)
def ddmm2ddd( s ):
num,frac = ('0000' + s).split('.')
d = float(num[-2:]+'.'+frac)/60.0 + float(num[-4:-2])
return d
def parseNMEA( data ):
ret = []
for line in data.replace('\r','\n').split('\n'):
if '$' in line and '*' in line.split('$')[-1]:
s = line.split('$')[-1].split('*')
if len(s) > 1 and len(s[1]) >= 2:
if checksum(s[0]) == s[1][:2]:
if s[0].startswith("GPRMC"):
s = s[0].split(',')[:7]
if len(s) >= 7 and s[2] == 'A' and s[4] == 'N' and s[6] == 'E':
ret.append( (s[1], ddmm2ddd(s[3]), ddmm2ddd(s[5])) )
elif s[0].startswith("GPGGA"):
s = s[0].split(',')[:6]
if len(s) >= 6 and s[3] == 'N' and s[5] == 'E':
ret.append( (s[1], ddmm2ddd(s[2]), ddmm2ddd(s[4])) )
return ret
def stripHeader( data ):
if FIRST_LINE in data:
return data[ data.find(FIRST_LINE) + len(FIRST_LINE): ]
return data
def splitter( data, selected ):
assert selected in ['GPS','0','1','2','3','ALL'], selected
gpsSection = False
data = stripHeader( data )
result, resultGPS = "", ""
lastGPS = None
records = []
lastSeek = 0
for line in data.split('\n'):
if GPS_SEPARATOR_BEGIN in line:
if GPS_SEPARATOR_END in line:
resultGPS += line.split(GPS_SEPARATOR_BEGIN)[1].split(GPS_SEPARATOR_END)[0]
line = line.split(GPS_SEPARATOR_BEGIN)[0] + line.split(GPS_SEPARATOR_END)[1]
gpsSection = False
else:
resultGPS += line.split(GPS_SEPARATOR_BEGIN)[1]
line = line.split(GPS_SEPARATOR_BEGIN)[0]
gpsSection = True
elif GPS_SEPARATOR_END in line:
resultGPS += line.split(GPS_SEPARATOR_END)[0]
line = line.split(GPS_SEPARATOR_END)[1]
gpsSection = False
elif gpsSection:
resultGPS += line.strip() + '\n'
line = ""
arr = parseNMEA( resultGPS[lastSeek:] )
if len(arr) > 0 and arr[-1] != lastGPS:
lastSeek = max(0, len(resultGPS)-80) # max NMEA line is 80 characters
lastGPS = arr[-1]
records.append( lastGPS )
if len(line.split(',')) >= 9:
if line[:2] not in ['0,','1,','2,','3,']:
parts = line.split(',')
s = parts[-9]
if len(s) > 0:
line = parts[-9][-1] + ',' + ",".join( parts[-8:] )
if line.startswith( selected ) and '*' not in line:
result += line.strip() + '\n'
records.append( [int(x) for x in line.split(',') if '.' not in x] ) # ignore float temperature
if selected == 'GPS':
return resultGPS
if selected == 'ALL':
return records
return result
if __name__ == "__main__":
if len(sys.argv) < 3:
print __doc__
sys.exit(2)
selected = sys.argv[2].upper()
data = splitter( open(sys.argv[1], "rb").read(), selected=selected )
if selected == "GPS":
print data
print "------------------"
print parseNMEA( data )
elif selected == "ALL":
for row in data:
print row
else:
print data
# vim: expandtab sw=4 ts=4
| 32.299145 | 106 | 0.493781 | 482 | 3,779 | 3.784232 | 0.248963 | 0.085526 | 0.074561 | 0.080592 | 0.195724 | 0.195724 | 0.195724 | 0.129386 | 0.097039 | 0.097039 | 0 | 0.035614 | 0.338714 | 3,779 | 116 | 107 | 32.577586 | 0.694278 | 0.025668 | 0 | 0.076087 | 0 | 0 | 0.044283 | 0.013733 | 0 | 0 | 0.001682 | 0 | 0.01087 | 0 | null | null | 0 | 0.01087 | null | null | 0.065217 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3909f60bd9bc4dcad09d01754d26fbed50773848 | 11,267 | py | Python | main.py | opengovt/openroads-geostore | 336bdc352252ae34a66746e632ae0b8df66c04c0 | [
"MIT"
] | 1 | 2019-10-11T14:43:53.000Z | 2019-10-11T14:43:53.000Z | main.py | opengovt/openroads-geostore | 336bdc352252ae34a66746e632ae0b8df66c04c0 | [
"MIT"
] | null | null | null | main.py | opengovt/openroads-geostore | 336bdc352252ae34a66746e632ae0b8df66c04c0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import jinja2
import webapp2
import logging
import threading
from mandrill_email import *
from webapp2_extras import routes
from cookie import *
from settings import *
from decorators import *
from functions import *
from google.appengine.api import taskqueue
from google.appengine.datastore.datastore_query import Cursor
# HANDLERS
from application.handlers.pages.geoprocessing \
import GeoprocessingDashboardHandler
from application.handlers.pages.geoprocessing \
import GeoprocessingClassificationHandler
from application.handlers.pages.geoprocessing \
import GeoprocessingToolHandler
from application.handlers.pages.geoprocessing \
import GeoprocessingToolImagesHandler
from application.handlers.pages.geoprocessing \
import GeoprocessedPageHandler
from application.handlers.pages.geoprocessing \
import ForGeoprocessedPageHandler
from application.handlers.pages.statistics import StatisticsDashboard
from application.handlers.pages.statistics import StatisticsDashboard2
from application.handlers.pages.login import LoginHandler
from application.handlers.pages.loginoauth import LoginOauthHandler
from application.handlers.pages.verifylogincode import VerifyLoginCode
from application.handlers.pages.logoutapi import LogoutApiHandler
from application.handlers.pages.projectdashboard import ProjectDashboardHandler
from application.handlers.pages.logout import LogoutHandler
from application.handlers.pages.register import RegisterHandler
from application.handlers.pages.agencyadminregistration \
import AgencyAdminRegistrationHandler
from application.handlers.pages.dashboard import DashboardHandler
from application.handlers.pages.adminregister import AdminRegisterHandler
from application.handlers.pages.upload import UploadHandler
from application.handlers.pages.viewer import ViewerHandler
from application.handlers.pages.import_ import ImportHandler
from application.handlers.pages.invitedenvironment \
import InvitedEnvironmentHandler
from application.handlers.pages.scriptuploading import ScriptUploadingHandler
from application.handlers.pages.publicuserregistration \
import PublicUsersRegistrationHandler
from application.handlers.pages.passwordreset import PasswordResetHandler
from application.handlers.pages.verifyregister import VerifyRegisterHandler
from application.handlers.pages.sendverification import SendVerificationHandler
from application.handlers.pages.usergroups import UserGroupsHandler
from application.handlers.pages.classificationtokml \
import ClassificationToKMLHandler
from application.handlers.pages.environment import EnvironmentHandler
from application.handlers.pages.permission import PermissionHandler
from application.handlers.pages.taskqueueemails import TaskQueueEmailsHandler
from application.handlers.pages.taskcounter import TaskCounterHandler
from application.handlers.pages.taskimage import TaskImageHandler
from application.handlers.api.psgc import PSGCHandler
from application.handlers.api.redflags import RedFlagsHandler
from application.handlers.api.apiproxy import APIProxyHandler
from application.handlers.api.uacsapi import UACSAPIHandler
from application.handlers.api.uacsapiv2 import UACSAPIV2Handler
from application.handlers.api.usersapi import UsersApiHandler
from application.handlers.api.environmentsapi import EnvironmentsApiHandler
from application.handlers.api.usergroupsapi import UserGroupsApiHandler
from application.handlers.api.dataapi import DataApiHandler
from application.handlers.api.logs import LogsHandler
from application.handlers.api.classificationupload \
import ClassificationUploadHandler
from application.handlers.api.apikmldownloader import APIKMLDownloader
from application.handlers.api.dataapiupdate import DataApiUpdateHandler
from application.handlers.api.dataapipublish import DataApiPublishHandler
from application.handlers.api.dataapidetails import DataApiDetailsHandler
from application.handlers.api.kmllength import KMLLengthHandler
from application.handlers.api.program import ProgramAPIHandler
from application.handlers.pages.error import ErrorHandler
from application.handlers.pages.logexception import LogExceptionHandler
from application.handlers.pages.main_ import MainHandler
from application.handlers.pages.program import ProgramHandler
from application.handlers.pages.agency import AgencyHandler
from application.handlers.pages.workspace import WorkspaceHandler
from application.handlers.pages.new_statistics import NewStatisticsDashboard
from application.handlers.pages.generate_statistics import GenerateStatisticsHandler
from application.models.apidata import APIData
from google.appengine.ext import ndb
class TaskRePutHandler(webapp2.RequestHandler):
def post(self):
# get 50 records
n = 50
count = 0
curs = None
if self.request.get('cursor'):
curs = Cursor(urlsafe=self.request.get('cursor'))
if self.request.get('count'):
count = int(self.request.get('count'))
query = APIData.query().order(APIData.created_time)
data, cursor, more = query.fetch_page(n, start_cursor=curs)
# reput
if data:
ndb.put_multi(data)
count += len(data)
logging.debug('count: ' + str(count))
# pass cursor
if len(data) == n and cursor:
taskqueue.add(
url=('/api/v1/JMKr5roUu0EQyssRVv8mvkgXsmQBt3sgNDbfoBIkwoUi59dz'
'zQJnvmQ5jIlNtC4c'),
params={'cursor': cursor.urlsafe(), 'count': str(count)}
)
this_thread = threading.local()
jinja_workspace = jinja2.Environment(
loader=jinja2.FileSystemLoader('application/frontend/'),
autoescape=True,
trim_blocks=True)
jinja_workspace.filters['to_date_format_only'] = to_date_format_only
app = webapp2.WSGIApplication([
routes.DomainRoute(r'<:.*>', [
webapp2.Route('/', MainHandler),
webapp2.Route('/dashboard', DashboardHandler),
webapp2.Route('/dashboard/statistics', StatisticsDashboard),
webapp2.Route('/dashboard/statistics2', StatisticsDashboard2),
# webapp2.Route(r'/statistics/generate/<:.*>', GenerateStatisticsHandler),
webapp2.Route('/statistics/generate', GenerateStatisticsHandler),
webapp2.Route('/statistics', NewStatisticsDashboard),
webapp2.Route(r'/projects/<:.*>/<:.*>/<:.*>/<:.*>/<:.*>/<:.*>',
ProjectDashboardHandler),
webapp2.Route(r'/projects/<:.*>/<:.*>/<:.*>/<:.*>/<:.*>',
ProjectDashboardHandler),
webapp2.Route(r'/projects/<:.*>/<:.*>/<:.*>/<:.*>',
ProjectDashboardHandler),
webapp2.Route(r'/projects/<:.*>/<:.*>/<:.*>', ProjectDashboardHandler),
webapp2.Route(r'/projects/<:.*>/<:.*>', ProjectDashboardHandler),
webapp2.Route(r'/projects/<:.*>', ProjectDashboardHandler),
webapp2.Route(r'/upload/<:.*>/<:.*>/<:.*>/<:.*>', UploadHandler),
webapp2.Route(r'/upload/<:.*>/<:.*>/<:.*>', UploadHandler),
webapp2.Route(r'/upload/<:.*>/<:.*>', UploadHandler),
webapp2.Route(r'/upload/<:.*>', UploadHandler),
webapp2.Route('/projects', ProjectDashboardHandler),
webapp2.Route(r'/programs/<:.*>/<:.*>', ProgramHandler),
webapp2.Route(r'/programs/<:.*>', ProgramHandler),
webapp2.Route('/programs', ProgramHandler),
webapp2.Route(r'/agencies/<:.*>', AgencyHandler),
webapp2.Route('/agencies', AgencyHandler),
webapp2.Route('/viewer', ViewerHandler),
webapp2.Route('/import', ImportHandler),
webapp2.Route(r'/import/<:.*>', ImportHandler),
webapp2.Route(r'/invite/workspace/<:.*>', InvitedEnvironmentHandler),
webapp2.Route(r'/su/<:.*>', ScriptUploadingHandler),
webapp2.Route('/login', LoginHandler),
webapp2.Route('/login/authorize', LoginOauthHandler),
webapp2.Route(r'/login/verify/<:.*>', VerifyLoginCode),
webapp2.Route('/logout', LogoutHandler),
webapp2.Route('/api/logout', LogoutApiHandler),
webapp2.Route('/register', RegisterHandler),
webapp2.Route('/admin/register', AdminRegisterHandler),
webapp2.Route('/register/verify', VerifyRegisterHandler),
webapp2.Route('/register/verify/send', SendVerificationHandler),
webapp2.Route('/agency/admins', AgencyAdminRegistrationHandler),
webapp2.Route('/users/registration', PublicUsersRegistrationHandler),
webapp2.Route('/password/reset', PasswordResetHandler),
webapp2.Route('/groups', UserGroupsHandler),
webapp2.Route(r'/groups/<:.*>', UserGroupsHandler),
webapp2.Route('/workspace', WorkspaceHandler),
webapp2.Route(r'/workspace/<:.*>', WorkspaceHandler),
webapp2.Route('/geoprocessing/dashboard',
GeoprocessingDashboardHandler),
webapp2.Route('/geoprocessing/for_geoprocessing',
ForGeoprocessedPageHandler),
webapp2.Route('/geoprocessing/geoprocessed', GeoprocessedPageHandler),
webapp2.Route('/geoprocessing/classification',
GeoprocessingClassificationHandler),
webapp2.Route('/geoprocessing/tool', GeoprocessingToolHandler),
webapp2.Route('/geoprocessing/tool/images',
GeoprocessingToolImagesHandler),
webapp2.Route('/geoprocessing/kml/download',
ClassificationToKMLHandler),
# TASKQUEUE
webapp2.Route('/tasks/email/send', TaskQueueEmailsHandler),
webapp2.Route('/tasks/counter', TaskCounterHandler),
webapp2.Route('/tasks/images', TaskImageHandler),
# API ENDPOINTS
webapp2.Route('/api/v1/length', KMLLengthHandler),
webapp2.Route(r'/api/v1/programs/<:.*>', ProgramAPIHandler),
webapp2.Route('/api/v1/programs', ProgramAPIHandler),
webapp2.Route('/api/v1/psgc', PSGCHandler),
webapp2.Route('/api/v1/redflags', RedFlagsHandler),
webapp2.Route('/api/v1/proxy', APIProxyHandler),
webapp2.Route('/api/v1/uacs', UACSAPIHandler),
webapp2.Route('/api/v2/uacs', UACSAPIV2Handler),
webapp2.Route('/api/v1/permissions', PermissionHandler),
webapp2.Route('/api/v1/users', UsersApiHandler),
webapp2.Route(r'/api/v1/users/<:.*>', UsersApiHandler),
webapp2.Route('/api/v1/workspaces', EnvironmentsApiHandler),
webapp2.Route(r'/api/v1/workspaces/<:.*>', EnvironmentsApiHandler),
webapp2.Route('/api/v1/groups', UserGroupsApiHandler),
webapp2.Route(r'/api/v1/groups/<:.*>', UserGroupsApiHandler),
webapp2.Route('/api/v1/classification', ClassificationUploadHandler),
webapp2.Route('/api/v1/KML', APIKMLDownloader),
webapp2.Route('/api/v1/data', DataApiHandler),
webapp2.Route(r'/api/v1/data/<:.*>/update', DataApiUpdateHandler),
webapp2.Route(r'/api/v1/data/<:.*>/publish', DataApiPublishHandler),
webapp2.Route(r'/api/v1/data/<:.*>', DataApiDetailsHandler),
webapp2.Route(r'/api/v1/logs', LogsHandler),
webapp2.Route(r'/<:.*>', ErrorHandler)
])
], debug=True)
app.error_handlers[500] = LogExceptionHandler.log_exception
| 50.075556 | 84 | 0.730097 | 1,028 | 11,267 | 7.978599 | 0.234436 | 0.112655 | 0.165447 | 0.14338 | 0.189344 | 0.169349 | 0.070105 | 0.060107 | 0.049256 | 0.049256 | 0 | 0.013543 | 0.148043 | 11,267 | 224 | 85 | 50.299107 | 0.840921 | 0.016065 | 0 | 0.045 | 0 | 0 | 0.132256 | 0.062291 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005 | false | 0.01 | 0.375 | 0 | 0.385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
390e452dfc5d623666ee9a6aa2a605d724a0f630 | 585 | py | Python | pylib/mps/util/push_util.py | xkmato/py77 | 9c44d8f8924f47a7331c29fd0287a4bb9416d316 | [
"MIT"
] | null | null | null | pylib/mps/util/push_util.py | xkmato/py77 | 9c44d8f8924f47a7331c29fd0287a4bb9416d316 | [
"MIT"
] | null | null | null | pylib/mps/util/push_util.py | xkmato/py77 | 9c44d8f8924f47a7331c29fd0287a4bb9416d316 | [
"MIT"
] | 2 | 2018-07-16T19:14:11.000Z | 2020-10-15T08:48:32.000Z | #!/usr/bin/env python
"""
A variety of push utility functions
"""
from pylib.util.git_util import GitUtil
__author__ = 'edelman@room77.com (Nicholas Edelman)'
__copyright__ = 'Copyright 2013 Room77, Inc.'
class PushUtil(object):
@classmethod
def get_deployspec_name(cls, cluster_name):
"""given a cluster returns the deployspec name
convention of $cluster-$current_branchname.
Args:
cluster - the cluster name
Returns:
the deployspec name for the current branch and cluster
"""
return '%s-%s' % (cluster_name, GitUtil.get_current_branch())
| 24.375 | 65 | 0.716239 | 75 | 585 | 5.373333 | 0.613333 | 0.104218 | 0.099256 | 0.119107 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016771 | 0.184615 | 585 | 23 | 66 | 25.434783 | 0.828092 | 0.418803 | 0 | 0 | 0 | 0 | 0.228477 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.142857 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
390ea59316bebdcf2ee6aecf82c4ccdade1f6444 | 1,899 | py | Python | shardingpy/parsing/lexer/dialect/mysql.py | hongfuli/sharding-py | a26a64aa9d9196c830e7e2fa4095a58bef608a40 | [
"Apache-2.0"
] | 1 | 2021-01-29T13:29:29.000Z | 2021-01-29T13:29:29.000Z | shardingpy/parsing/lexer/dialect/mysql.py | hongfuli/sharding-py | a26a64aa9d9196c830e7e2fa4095a58bef608a40 | [
"Apache-2.0"
] | null | null | null | shardingpy/parsing/lexer/dialect/mysql.py | hongfuli/sharding-py | a26a64aa9d9196c830e7e2fa4095a58bef608a40 | [
"Apache-2.0"
] | null | null | null | import enum
from shardingpy.parsing.lexer import lexer
from shardingpy.parsing.lexer import token
class MySQLKeyword(enum.IntEnum):
SHOW = 1
DUAL = 2
LIMIT = 3
OFFSET = 4
VALUE = 5
BEGIN = 6
FORCE = 7
PARTITION = 8
DISTINCTROW = 9
KILL = 10
QUICK = 11
BINARY = 12
CACHE = 13
SQL_CACHE = 14
SQL_NO_CACHE = 15
SQL_SMALL_RESULT = 16
SQL_BIG_RESULT = 17
SQL_BUFFER_RESULT = 18
SQL_CALC_FOUND_ROWS = 19
LOW_PRIORITY = 20
HIGH_PRIORITY = 21
OPTIMIZE = 22
ANALYZE = 23
IGNORE = 24
CHANGE = 25
FIRST = 26
SPATIAL = 27
ALGORITHM = 28
COLLATE = 29
DISCARD = 30
IMPORT = 31
VALIDATION = 32
REORGANIZE = 33
EXCHANGE = 34
REBUILD = 35
REPAIR = 36
REMOVE = 37
UPGRADE = 38
KEY_BLOCK_SIZE = 39
AUTO_INCREMENT = 40
AVG_ROW_LENGTH = 41
CHECKSUM = 42
COMPRESSION = 43
CONNECTION = 44
DIRECTORY = 45
DELAY_KEY_WRITE = 46
ENCRYPTION = 47
ENGINE = 48
INSERT_METHOD = 49
MAX_ROWS = 50
MIN_ROWS = 51
PACK_KEYS = 52
ROW_FORMAT = 53
DYNAMIC = 54
FIXED = 55
COMPRESSED = 56
REDUNDANT = 57
COMPACT = 58
STATS_AUTO_RECALC = 59
STATS_PERSISTENT = 60
STATS_SAMPLE_PAGES = 61
DISK = 62
MEMORY = 63
ROLLUP = 64
RESTRICT = 65
STRAIGHT_JOIN = 66
REGEXP = 67
class MySQLLexer(lexer.Lexer):
dictionary = token.Dictionary(MySQLKeyword)
def __init__(self, sql):
super().__init__(sql, MySQLLexer.dictionary)
def is_hint_begin(self):
return self.get_current_char(0) == '/' and self.get_current_char(1) == '*' and self.get_current_char(2) == '!'
def is_comment_begin(self):
return self.get_current_char(0) == '#' or super().is_comment_begin()
def is_variable_begin(self):
return self.get_current_char(0) == '@'
| 20.868132 | 118 | 0.61664 | 251 | 1,899 | 4.434263 | 0.697211 | 0.031447 | 0.062893 | 0.080863 | 0.186882 | 0.091644 | 0.091644 | 0.091644 | 0 | 0 | 0 | 0.09856 | 0.305424 | 1,899 | 90 | 119 | 21.1 | 0.745262 | 0 | 0 | 0 | 0 | 0 | 0.002633 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.049383 | false | 0 | 0.049383 | 0.037037 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
391a5ba570f3e424763ecfe5d6242c832be51961 | 575 | py | Python | app/jwt.py | smolveau/Simple-Flask-Web-App-CI-CD | 99d0459cbcbbc8726968d7d191226fbdab46445e | [
"MIT"
] | null | null | null | app/jwt.py | smolveau/Simple-Flask-Web-App-CI-CD | 99d0459cbcbbc8726968d7d191226fbdab46445e | [
"MIT"
] | null | null | null | app/jwt.py | smolveau/Simple-Flask-Web-App-CI-CD | 99d0459cbcbbc8726968d7d191226fbdab46445e | [
"MIT"
] | null | null | null | # app/jwt.py
from os import environ as env
from itsdangerous import (
TimedJSONWebSignatureSerializer as Serializer,
BadSignature,
SignatureExpired,
)
def generate_jwt(claims, expiration=172800):
s = Serializer(env.get("SECRET_KEY"), expires_in=expiration)
return s.dumps(claims).decode("utf-8")
def load_jwt(token):
s = Serializer(env.get("SECRET_KEY"))
try:
data = s.loads(token)
except SignatureExpired as err:
raise Exception(str(err))
except BadSignature as err:
raise Exception(str(err))
return data
| 23 | 64 | 0.692174 | 72 | 575 | 5.458333 | 0.555556 | 0.05598 | 0.071247 | 0.086514 | 0.259542 | 0.259542 | 0 | 0 | 0 | 0 | 0 | 0.015351 | 0.206957 | 575 | 24 | 65 | 23.958333 | 0.846491 | 0.017391 | 0 | 0.111111 | 1 | 0 | 0.044405 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3922a73eca65de4ee19b6e71f0f70623869553df | 238 | py | Python | examples/client-context/client.py | barberj/bridge-python | 1c33df5fa1d92ac6c54bbb6d868c71e1f883e8fe | [
"MIT"
] | null | null | null | examples/client-context/client.py | barberj/bridge-python | 1c33df5fa1d92ac6c54bbb6d868c71e1f883e8fe | [
"MIT"
] | null | null | null | examples/client-context/client.py | barberj/bridge-python | 1c33df5fa1d92ac6c54bbb6d868c71e1f883e8fe | [
"MIT"
] | null | null | null | from BridgePython import Bridge
bridge = Bridge(api_key='myapikey')
class PongHandler(object):
def pong(self):
print ("PONG!")
bridge.store_service("pong", PongHandler())
bridge.get_service("ping").ping()
bridge.connect()
| 18.307692 | 43 | 0.710084 | 29 | 238 | 5.724138 | 0.655172 | 0.144578 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.138655 | 238 | 12 | 44 | 19.833333 | 0.809756 | 0 | 0 | 0 | 0 | 0 | 0.088608 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0 | 0.375 | 0.125 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
392cf46d90da25ce8a11ce807453e2474844bf87 | 867 | py | Python | visitors/models.py | maxhamz/prieds_test_hospital_queue_be | 44529f65dcd167caa48c84926e118d86a7d38b92 | [
"MIT"
] | null | null | null | visitors/models.py | maxhamz/prieds_test_hospital_queue_be | 44529f65dcd167caa48c84926e118d86a7d38b92 | [
"MIT"
] | null | null | null | visitors/models.py | maxhamz/prieds_test_hospital_queue_be | 44529f65dcd167caa48c84926e118d86a7d38b92 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class Visitor(models.Model):
MALE = 'M'
FEMALE = 'F'
OTHER = 'X'
GENDER_OPTIONS = [
(MALE, 'Male'),
(FEMALE, 'Female'),
(OTHER, 'Other')
]
dtRegistered = models.DateTimeField(auto_now_add=True)
strFullName = models.CharField(max_length=256, blank=False)
eGender = models.CharField(max_length=2,
choices=GENDER_OPTIONS,
default=OTHER) # SELECT M, F, OR X
dtBirth = models.DateField(max_length=8,
auto_now=False,
auto_now_add=False) # YYYY-MM-DD FORMAT
strGovtIdNo = models.CharField(max_length=16, blank=False)
strAddress = models.TextField(default='Indonesia')
class Meta:
ordering = ['dtRegistered'] | 32.111111 | 70 | 0.573241 | 92 | 867 | 5.282609 | 0.565217 | 0.074074 | 0.111111 | 0.148148 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011885 | 0.320646 | 867 | 27 | 71 | 32.111111 | 0.813243 | 0.069204 | 0 | 0 | 0 | 0 | 0.048507 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.045455 | 0 | 0.590909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
392daab33fb8dcb9b0b5a07380a98a73e1208a33 | 1,751 | py | Python | muchbettermoments.py | mirca/muchbettermoments | 8cc2bf18ff52abf86151a12358434691bea0857d | [
"MIT"
] | 1 | 2019-07-01T18:25:35.000Z | 2019-07-01T18:25:35.000Z | muchbettermoments.py | mirca/muchbettermoments | 8cc2bf18ff52abf86151a12358434691bea0857d | [
"MIT"
] | null | null | null | muchbettermoments.py | mirca/muchbettermoments | 8cc2bf18ff52abf86151a12358434691bea0857d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division, print_function
__all__ = ["quadratic_2d"]
import numpy as np
def quadratic_2d(data):
"""
Compute the quadratic estimate of the centroid in a 2d-array.
Args:
data (2darray): two dimensional data array
Returns
center (tuple): centroid estimate on the row and column directions,
respectively
"""
arg_data_max = np.argmax(data)
i, j = np.unravel_index(arg_data_max, data.shape)
z_ = data[i-1:i+2, j-1:j+2]
# our quadratic function is defined as
# f(x, y | a, b, c, d, e, f) := a + b * x + c * y + d * x^2 + e * xy + f * y^2
# therefore, the best fit coeffiecients are given as
# note that they are unique and the uncertainty in each of them (#TODO) can be
# computed following the derivations done by Vakili & Hogg (2016) and
# Teague & Foreman-Mackey (2018)
try:
a = (-z_[0,0] + 2*z_[0,1] - z_[0,2] + 2*z_[1,0] + 5*z_[1,1] + 2*z_[1,2] -
z_[2,0] + 2*z_[2,1] - z_[2,2]) / 9
b = (-z_[0,0] - z_[0,1] - z_[0,2] + z_[2,0] + z_[2,1] + z_[2,2]) / 6
c = (-z_[0,0] + z_[0,2] - z_[1,0] + z_[1,2] - z_[2,0] + z_[2,2]) / 6
d = (z_[0,0] + z_[0,1] + z_[0,2] - z_[1,0]*2 - z_[1,1]*2 - z_[1,2]*2 +
z_[2,0] + z_[2,1] + z_[2,2])/6
e = (z_[0,0] - z_[0,2] - z_[2,0] + z_[2,2]) * .25
f = (z_[0,0] - 2 * z_[0,1] + z_[0,2] + z_[1,0] - 2 * z_[1,1] + z_[1,2] +
z_[2,0] - 2 * z_[2,1] + z_[2,2]) / 6
except IndexError:
return (i, j)
# see https://en.wikipedia.org/wiki/Quadratic_function
det = 4 * d * f - e ** 2
xm = - (2 * f * b - c * e) / det
ym = - (2 * d * c - b * e) / det
return (i+xm, j+ym)
| 37.255319 | 82 | 0.503141 | 332 | 1,751 | 2.46988 | 0.322289 | 0.043902 | 0.040244 | 0.029268 | 0.168293 | 0.168293 | 0.163415 | 0.130488 | 0.120732 | 0.120732 | 0 | 0.103843 | 0.301542 | 1,751 | 46 | 83 | 38.065217 | 0.566639 | 0.368361 | 0 | 0 | 0 | 0 | 0.011299 | 0 | 0 | 0 | 0 | 0.021739 | 0 | 1 | 0.043478 | false | 0 | 0.086957 | 0 | 0.217391 | 0.043478 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
392e0fa49127c7455cbbc085060a98c4b07b4219 | 600 | py | Python | mmdetection_pipeline/tests/mmdet_test.py | KonstantinSviridov/mmdetection_pipeline | 7e17c4bb48af28713d018e087907f7295ef68d7e | [
"MIT"
] | null | null | null | mmdetection_pipeline/tests/mmdet_test.py | KonstantinSviridov/mmdetection_pipeline | 7e17c4bb48af28713d018e087907f7295ef68d7e | [
"MIT"
] | 2 | 2019-12-13T04:40:34.000Z | 2019-12-13T04:41:19.000Z | mmdetection_pipeline/tests/mmdet_test.py | musket-ml/instance_segmentation_pipeline | 7e17c4bb48af28713d018e087907f7295ef68d7e | [
"MIT"
] | null | null | null | import unittest
from musket_core import projects
from musket_core import parralel
import os
fl=__file__
fl=os.path.dirname(fl)
class TestCoders(unittest.TestCase):
def test_basic_network(self):
pr = projects.Project(os.path.join(fl, "project"))
exp = pr.byName("exp01")
tasks = exp.fit()
executor = parralel.get_executor(1, 1)
executor.execute(tasks)
r = exp.result()
self.assertGreaterEqual(r, 0, "Result should be greater then zero")
self.assertTrue(isinstance(r, float), "result should be float")
print(r)
pass | 28.571429 | 75 | 0.663333 | 79 | 600 | 4.924051 | 0.582278 | 0.051414 | 0.071979 | 0.102828 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010823 | 0.23 | 600 | 21 | 76 | 28.571429 | 0.831169 | 0 | 0 | 0 | 0 | 0 | 0.113145 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0.055556 | false | 0.055556 | 0.222222 | 0 | 0.333333 | 0.055556 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
392e81963b0ccd94345db1a8d6229e0ef20fd753 | 3,038 | py | Python | Paleo_DB_Rip.py | matt-oak/DinoFinder | 8a66c6da77dae01b6083155d724479e02abb8440 | [
"MIT"
] | null | null | null | Paleo_DB_Rip.py | matt-oak/DinoFinder | 8a66c6da77dae01b6083155d724479e02abb8440 | [
"MIT"
] | null | null | null | Paleo_DB_Rip.py | matt-oak/DinoFinder | 8a66c6da77dae01b6083155d724479e02abb8440 | [
"MIT"
] | null | null | null | #Paleo_DB_Rip.py
#Python script to programmatically web-scrape from paleobiodb.org
#Author: Matt Oakley
#Date: 08/15/2016
# Imports #
from bs4 import BeautifulSoup
from time import sleep
from geopy.geocoders import Nominatim
import urllib2
import pycountry
import wget
import sys
import os.path
import codecs
# Globals #
listed_dinos = ["Tyrannosaurus", "Stegosaurus", "Velociraptor", "Triceratops", "Spinosaurus"]
def retrieve_webpage(dino_name):
#Retrieve the HTML for the specific dinosaur and return the page in string format
URL = "https://paleobiodb.org/data1.2/occs/list.txt?base_name=" + dino_name + "&show=loc"
page = urllib2.urlopen(URL)
page_str = str(BeautifulSoup(page, "lxml")).splitlines()
return page_str
def extract_webpage_header(web_page):
#Extract the header from the list
header = web_page[0]
header_elements = header.split("\"")
#Get rid of delimeter elements (commas)
header_elements[:] = [x for x in header_elements if x != ","]
return header_elements
def construct_location_string(county, state, cc):
#Convert country-code to full-name of country
try:
country = pycountry.countries.get(alpha2 = cc)
country = str(country.name)
except KeyError:
return None
#Construct location string usable by geopy
if county != "":
location = county + ", " + state + ", " + country
return location
else:
location = state + ", " + country
return location
def construct_GPS_coords(location):
#Construct the lat/lon of different locations
geolocator = Nominatim()
coords = geolocator.geocode(location)
sleep(1)
if coords == None:
pass
else:
return (coords.latitude, coords.longitude)
def parse_locations(web_page):
#Get the indexes of country code, state, and county
header = extract_webpage_header(web_page)
index_of_country = header.index("cc")
index_of_state = header.index("state")
index_of_county = header.index("county")
coords_list = []
#For all locations, get the lat/lon coordinates and output to list
for i in range(1, len(web_page) - 1):
entry = web_page[i].split("\"")
entry[:] = [x for x in entry if x != ","]
country = entry[index_of_country]
state = entry[index_of_state]
county = entry[index_of_county]
location = construct_location_string(county, state, country)
print location
#Coords Format: (Lat, Lon)
coords = construct_GPS_coords(location)
coords_list.append(coords)
return coords_list
def output_locations(locations, dino):
filename = "dinosaur_locs/" + dino + ".txt"
output_file = open(filename, "w")
for i in range(0, len(locations)):
location_str = str(locations[i])
output_file.write(location_str + "\n")
def check_if_file_exists(dino):
filename = "dinosaur_locs/" + dino + ".txt"
if os.path.isfile(filename):
return 1
else:
return 0
for i in range(0, len(listed_dinos)):
file_bool = check_if_file_exists(listed_dinos[i])
web_page = retrieve_webpage(listed_dinos[i])
if file_bool == 0:
locations = parse_locations(web_page)
output_locations(locations, listed_dinos[i])
else:
print "kek"
continue | 28.392523 | 93 | 0.737986 | 433 | 3,038 | 5.013857 | 0.325635 | 0.025795 | 0.017964 | 0.0152 | 0.098572 | 0.042377 | 0 | 0 | 0 | 0 | 0 | 0.008908 | 0.150099 | 3,038 | 107 | 94 | 28.392523 | 0.831913 | 0.181369 | 0 | 0.102564 | 0 | 0 | 0.077297 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.012821 | 0.115385 | null | null | 0.025641 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3932406768d964a4c7968afb2b7511f4c0d4b671 | 2,569 | py | Python | apps/events/views.py | seanlefevre/openduty | 34ab21117f114ccc808d8b0aa2cb801c819bdb86 | [
"MIT"
] | 145 | 2016-04-11T06:53:13.000Z | 2022-03-22T05:15:49.000Z | apps/events/views.py | seanlefevre/openduty | 34ab21117f114ccc808d8b0aa2cb801c819bdb86 | [
"MIT"
] | 78 | 2017-09-24T10:59:49.000Z | 2022-02-12T07:36:27.000Z | apps/events/views.py | seanlefevre/openduty | 34ab21117f114ccc808d8b0aa2cb801c819bdb86 | [
"MIT"
] | 30 | 2016-04-11T06:53:16.000Z | 2021-12-29T11:39:26.000Z | from django.views.generic import DeleteView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib import messages
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from schedule.models import Calendar
from schedule.views import CreateEventView, EditEventView, EventMixin
from apps.events.forms import CustomEventForm
class CustomCreateEventView(CreateEventView):
form_class = CustomEventForm
template_name = 'event/edit.html'
def get_context_data(self, **kwargs):
context = super(CustomCreateEventView, self).get_context_data(**kwargs)
calendar = get_object_or_404(Calendar, slug=self.kwargs.get('calendar_slug'))
extra_context = {
"calendar": calendar,
}
context.update(extra_context)
return context
def form_valid(self, form):
super(CustomCreateEventView, self).form_valid(form)
messages.error(self.request, 'Event created successfully.')
return HttpResponseRedirect(
reverse('calendar_details', kwargs={'calendar_slug': self.kwargs.get('calendar_slug')})
)
class CustomUpdateEventView(EditEventView):
form_class = CustomEventForm
template_name = 'event/edit.html'
def get_context_data(self, **kwargs):
context = super(CustomUpdateEventView, self).get_context_data(**kwargs)
calendar = get_object_or_404(Calendar, slug=self.kwargs.get('calendar_slug'))
extra_context = {
"calendar": calendar,
}
context.update(extra_context)
return context
def form_valid(self, form):
super(CustomUpdateEventView, self).form_valid(form)
messages.error(self.request, 'Event edited successfully.')
return HttpResponseRedirect(
reverse('calendar_details', kwargs={'calendar_slug': self.kwargs.get('calendar_slug')})
)
class CustomDeleteEventView(LoginRequiredMixin, EventMixin, DeleteView):
"""Delete Event"""
template_name = 'event/delete.html'
def get_success_url(self):
return reverse('calendar_details', args=[self.kwargs.get('calendar_slug')])
def get_context_data(self, **kwargs):
context = super(CustomDeleteEventView, self).get_context_data(**kwargs)
calendar = get_object_or_404(Calendar, slug=self.kwargs.get('calendar_slug'))
context.update(
{
'event': self.object,
'calendar': calendar
}
)
return context
| 36.183099 | 99 | 0.695601 | 272 | 2,569 | 6.386029 | 0.224265 | 0.075993 | 0.048359 | 0.072539 | 0.563615 | 0.549223 | 0.549223 | 0.549223 | 0.52677 | 0.473805 | 0 | 0.005888 | 0.206695 | 2,569 | 70 | 100 | 36.7 | 0.846418 | 0.004671 | 0 | 0.438596 | 0 | 0 | 0.110153 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0.157895 | 0.017544 | 0.508772 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
39374b6e345f46dad950a9f9736af1abd9167fd0 | 2,055 | py | Python | examples/experiments_code/amazon_reviews/sentiment_subsampling.py | fossabot/textlytics | d172211316d688604bcd18d3581c3aac26dcc404 | [
"MIT"
] | 26 | 2016-12-05T19:37:27.000Z | 2021-01-03T21:48:23.000Z | examples/experiments_code/amazon_reviews/sentiment_subsampling.py | fossabot/textlytics | d172211316d688604bcd18d3581c3aac26dcc404 | [
"MIT"
] | 3 | 2017-07-15T13:33:18.000Z | 2020-09-21T11:39:37.000Z | examples/experiments_code/amazon_reviews/sentiment_subsampling.py | fossabot/textlytics | d172211316d688604bcd18d3581c3aac26dcc404 | [
"MIT"
] | 14 | 2017-05-29T22:19:35.000Z | 2021-01-03T21:48:24.000Z | import dill
import glob
import csv
import os
from os.path import basename, join
from joblib import Parallel, delayed
domain_path = '/datasets/amazon-data/new-julian/domains'
domain_subdirectory = 'only-overall-lemma-and-label-sampling-1-3-5'
domain_files = glob.glob(join(domain_path,
'only-overall-lemma-and-label/*.csv'))
all_stars_count = {}
output_csv = join(domain_path, domain_subdirectory)
try:
os.makedirs(output_csv)
except OSError:
if not os.path.isdir(output_csv):
raise
def stars(domain_file):
stars_count = [0, 0, 0, 0, 0]
stars_used = [1, 3, 5]
with open(domain_file, 'r') as f:
for line in f:
l = line.replace('\r\n', '').split(',')
stars_count[int(l[0]) - 1] += 1
f_name = '{}.csv'.format(basename(domain_file).split('.')[0])
min_count = min(stars_count)
print '\nDomain: {}\nStars count: {}\nMin star count: {}\n'.format(f_name,
stars_count,
min_count)
stars_count = [0, 0, 0, 0, 0]
with open(domain_file, 'r') as f:
with open(join(output_csv, f_name), 'w') as csv_file:
sent_writer = csv.writer(csv_file, delimiter=',', quotechar=' ',
quoting=csv.QUOTE_MINIMAL)
for line in f:
l = line.replace('\r\n', '').split(',')
star_label = int(l[0])
idx = star_label - 1
stars_count[idx] += 1
if stars_count[idx] <= min_count and star_label in stars_used:
sent_writer.writerow(l)
return {f_name: {'distribution': stars_count,
'star_threshold': min_count,
'skip_stars': stars_used}
}
results = Parallel(n_jobs=-1)(delayed(stars)(i) for i in domain_files)
with open(join(domain_path, domain_subdirectory, 'results.pkl'), 'w') as f:
dill.dump(results, f) | 33.688525 | 83 | 0.547932 | 264 | 2,055 | 4.079545 | 0.325758 | 0.083565 | 0.016713 | 0.014856 | 0.226555 | 0.122563 | 0.122563 | 0.053853 | 0.053853 | 0.053853 | 0 | 0.017254 | 0.323114 | 2,055 | 61 | 84 | 33.688525 | 0.757009 | 0 | 0 | 0.166667 | 0 | 0 | 0.115759 | 0.056907 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.125 | null | null | 0.020833 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
393bd4af7328cb9c5923034c08fd8225175f6552 | 1,248 | py | Python | lattly_tests/converter_tests.py | yfarrugia/lattly | 9c8d02ece253d9f61b09d66bc87b097a15970619 | [
"BSD-2-Clause"
] | null | null | null | lattly_tests/converter_tests.py | yfarrugia/lattly | 9c8d02ece253d9f61b09d66bc87b097a15970619 | [
"BSD-2-Clause"
] | null | null | null | lattly_tests/converter_tests.py | yfarrugia/lattly | 9c8d02ece253d9f61b09d66bc87b097a15970619 | [
"BSD-2-Clause"
] | null | null | null | __author__ = 'yanikafarrugia'
import unittest
import lattly_service.converter
class ConverterTests(unittest.TestCase):
def test_degrees_to_radians(self):
rad = lattly_service.converter.Converter.degrees_to_radians(120)
self.assertEqual(rad, 2.0943951023931953)
self.assertIsNotNone(rad)
self.assertTrue(rad > 2)
def test_radians_to_degrees(self):
deg = lattly_service.converter.Converter.radians_to_degrees(1.57)
self.assertIsNotNone(deg)
self.assertTrue(deg < 90.0)
self.assertTrue(deg > 89.9)
self.assertEqual(deg, 89.954373835539243)
def test_radians_to_cartesian(self):
car = lattly_service.converter.Converter.radians_to_cartesian(0.73091096, -1.5294285)
self.assertIsNotNone(car)
self.assertTrue(car[0] > 0.03079231)
self.assertTrue(car[1] < -0.74392960)
self.assertTrue(car[2] > 0.66754818)
def test_cartesian_to_radians(self):
carty = [0.12824063, -0.75020731, 0.64125282]
rad = lattly_service.converter.Converter.cartesian_to_radians(carty)
self.assertIsNotNone(rad)
self.assertTrue(rad[0] > 0.70015084)
self.assertTrue(rad[1] < -1.40149245)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(ConverterTests)
unittest.TextTestRunner(verbosity = 2).run(suite)
| 32 | 87 | 0.776442 | 163 | 1,248 | 5.717791 | 0.325153 | 0.120172 | 0.118026 | 0.133047 | 0.242489 | 0.169528 | 0 | 0 | 0 | 0 | 0 | 0.126899 | 0.103365 | 1,248 | 38 | 88 | 32.842105 | 0.705987 | 0 | 0 | 0.066667 | 0 | 0 | 0.017628 | 0 | 0 | 0 | 0 | 0 | 0.466667 | 1 | 0.133333 | false | 0 | 0.066667 | 0 | 0.233333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
39407878cc72837d87538645a4bdabbbbb973ff4 | 21,829 | py | Python | pydl/tests/test_rnn.py | nash911/PyDL | b0b6f599184c0046f503b9ee1703dc3dfe9a89f2 | [
"MIT"
] | null | null | null | pydl/tests/test_rnn.py | nash911/PyDL | b0b6f599184c0046f503b9ee1703dc3dfe9a89f2 | [
"MIT"
] | null | null | null | pydl/tests/test_rnn.py | nash911/PyDL | b0b6f599184c0046f503b9ee1703dc3dfe9a89f2 | [
"MIT"
] | null | null | null | # ------------------------------------------------------------------------
# MIT License
#
# Copyright (c) [2021] [Avinash Ranganath]
#
# This code is part of the library PyDL <https://github.com/nash911/PyDL>
# This code is licensed under MIT license (see LICENSE.txt for details)
# ------------------------------------------------------------------------
import unittest
import numpy as np
import numpy.testing as npt
import itertools
from collections import OrderedDict
import copy
from pydl.nn.rnn import RNN
from pydl import conf
class TestRNN(unittest.TestCase):
def test_score_fn(self):
def test(inp, w, seq_len, true_out, bias=False):
num_neur = w['hidden'].shape[0]
rnn = RNN(inp, num_neur, w, bias, seq_len)
out_rnn = np.zeros((1, num_neur), dtype=conf.dtype)
for _ in range(seq_len):
out_rnn = rnn.score_fn({'h': out_rnn, 'inp': inp})
npt.assert_almost_equal(out_rnn, true_out, decimal=5)
# Manually calculated
# -------------------
X = np.ones((1, 3), dtype=conf.dtype)
wh = np.ones((7, 7), dtype=conf.dtype)
wx = np.random.rand(3, 7)
w = {'hidden': wh, 'inp': wx}
bias = np.random.rand(7)
true_out = np.array([np.sum(wx) + np.sum(bias)] * 7).reshape(1, -1) + \
np.sum(wx, axis=0, keepdims=True) + bias
test(X, w, seq_len=2, true_out=true_out, bias=bias)
# Combinatorial Test Cases
# ------------------------
feature_size = [1, 2, 3, 5, 6, 11]
num_neurons = [1, 2, 3, 5, 6, 11]
scale = [1e-6, 1e-3, 1e-1, 1e-0, 2, 3, 10]
batch = 1
for feat, neur, scl in list(itertools.product(feature_size, num_neurons, scale)):
X = np.ones((batch, feat), dtype=conf.dtype)
wh = np.ones((neur, neur), dtype=conf.dtype)
wx = np.random.rand(feat, neur) * scl
w = {'hidden': wh, 'inp': wx}
bias = np.random.rand(neur) * scl
true_out = np.array([np.sum(wx) + np.sum(bias)] * neur).reshape(1, -1) + \
np.sum(wx, axis=0, keepdims=True) + bias
test(X, w, seq_len=2, true_out=true_out, bias=bias)
def test_forward(self):
def test(inp, w, seq_len, true_out, bias=False, init_h_state=None, actv_fn='Sigmoid',
p=None, mask=None, architecture_type='many_to_many'):
num_neur = w['hidden'].shape[0]
rnn = RNN(inp, num_neur, w, bias, seq_len=seq_len, activation_fn=actv_fn,
architecture_type=architecture_type, dropout=p,
tune_internal_states=(False if init_h_state is None else True))
if init_h_state is not None:
rnn.init_hidden_state = init_h_state
rnn.reset_internal_states()
out_rnn = rnn.forward(inp, mask=mask)
# Check if the output has the right keys
npt.assert_equal(out_rnn.keys(), true_out.keys())
for k, v in out_rnn.items():
npt.assert_almost_equal(v, true_out[k], decimal=5)
# Combinatorial Test Cases
# ------------------------
sequence_length = [1, 2, 3, 5, 6, 11]
reduce_size = [0, 1]
feature_size = [1, 2, 3, 5, 6, 11]
num_neurons = [1, 2, 3, 5, 6, 11]
one_hot = [True, False]
scale = [1e-6, 1e-3, 1e-1, 1e-0, 2]
dropout = [True, False]
architecture_type = ['many_to_many', 'many_to_one']
tune_internal_states = [True, False]
for seq_len, r_size, feat, neur, oh, scl, dout, a_type, tune in list(itertools.product(
sequence_length, reduce_size, feature_size, num_neurons, one_hot, scale, dropout,
architecture_type, tune_internal_states)):
batch_size = seq_len - (r_size if seq_len > 1 else 0)
if oh:
X = np.zeros((batch_size, feat), dtype=conf.dtype)
rnd_idx = np.random.randint(feat, size=batch_size)
X[range(batch_size), rnd_idx] = 1
else:
X = np.random.uniform(-scl, scl, (batch_size, feat))
wh = np.random.rand(neur, neur) * scl
wx = np.random.rand(feat, neur) * scl
w = {'hidden': wh, 'inp': wx}
bias = np.random.rand(neur) * scl
# Linear
if tune:
h_init = np.array(np.random.rand(1, neur), dtype=conf.dtype) * scl
h = np.copy(h_init)
else:
h = np.zeros((1, neur), dtype=conf.dtype)
h_init = None
true_out_linear = OrderedDict()
p = None
mask = None
for i, x in enumerate(X):
h = np.matmul(h, wh) + np.matmul(x.reshape(1, -1), wx) + bias
if dout:
if p is None:
p = np.random.rand()
mask = list()
mask.append(np.array(np.random.rand(*h.shape) < p, dtype=conf.dtype) / p)
layer_out = h * mask[-1]
else:
layer_out = h
if a_type == 'many_to_one':
if i == batch_size - 1:
true_out_linear = OrderedDict()
true_out_linear[i + 1] = layer_out
else:
true_out_linear[i + 1] = layer_out
test(X, w, seq_len, true_out_linear, bias, h_init, actv_fn='Linear', p=p, mask=mask,
architecture_type=a_type)
# Sigmoid
if tune:
h_init = np.array(np.random.rand(1, neur), dtype=conf.dtype) * scl
h = np.copy(h_init)
h = 1.0 / (1.0 + np.exp(-h))
else:
h = np.zeros((1, neur), dtype=conf.dtype)
h_init = None
true_out_sigmoid = OrderedDict()
p = None
mask = None
for i, x in enumerate(X):
score = np.matmul(h, wh) + np.matmul(x.reshape(1, -1), wx) + bias
h = 1.0 / (1.0 + np.exp(-score))
if dout:
if p is None:
p = np.random.rand()
mask = list()
mask.append(np.array(np.random.rand(*h.shape) < p, dtype=conf.dtype))
layer_out = h * mask[-1]
else:
layer_out = h
if a_type == 'many_to_one':
if i == batch_size - 1:
true_out_sigmoid = OrderedDict()
true_out_sigmoid[i + 1] = layer_out
else:
true_out_sigmoid[i + 1] = layer_out
test(X, w, seq_len, true_out_sigmoid, bias, h_init, actv_fn='Sigmoid', p=p, mask=mask,
architecture_type=a_type)
# Tanh
if tune:
h_init = np.array(np.random.rand(1, neur), dtype=conf.dtype) * scl
h = np.copy(h_init)
h = (2.0 / (1.0 + np.exp(-2.0 * h))) - 1.0
else:
h = np.zeros((1, neur), dtype=conf.dtype)
h_init = None
true_out_tanh = OrderedDict()
p = None
mask = None
for i, x in enumerate(X):
score = np.matmul(h, wh) + np.matmul(x.reshape(1, -1), wx) + bias
h = (2.0 / (1.0 + np.exp(-2.0 * score))) - 1.0
if dout:
if p is None:
p = np.random.rand()
mask = list()
mask.append(np.array(np.random.rand(*h.shape) < p, dtype=conf.dtype))
layer_out = h * mask[-1]
else:
layer_out = h
if a_type == 'many_to_one':
if i == batch_size - 1:
true_out_tanh = OrderedDict()
true_out_tanh[i + 1] = layer_out
else:
true_out_tanh[i + 1] = layer_out
test(X, w, seq_len, true_out_tanh, bias, h_init, actv_fn='Tanh', p=p, mask=mask,
architecture_type=a_type)
# ReLU
if tune:
h_init = np.array(np.random.rand(1, neur), dtype=conf.dtype) * scl
h = np.copy(h_init)
h = np.maximum(0, h)
else:
h = np.zeros((1, neur), dtype=conf.dtype)
h_init = None
true_out_relu = OrderedDict()
p = None
mask = None
for i, x in enumerate(X):
score = np.matmul(h, wh) + np.matmul(x.reshape(1, -1), wx) + bias
h = np.maximum(0, score)
if dout:
if p is None:
p = np.random.rand()
mask = list()
mask.append(np.array(np.random.rand(*h.shape) < p, dtype=conf.dtype) / p)
layer_out = h * mask[-1]
else:
layer_out = h
if a_type == 'many_to_one':
if i == batch_size - 1:
true_out_relu = OrderedDict()
true_out_relu[i + 1] = layer_out
else:
true_out_relu[i + 1] = layer_out
test(X, w, seq_len, true_out_relu, bias, h_init, actv_fn='ReLU', p=p, mask=mask,
architecture_type=a_type)
# SoftMax
if tune:
h_init = np.array(np.random.rand(1, neur), dtype=conf.dtype) * scl
h = np.copy(h_init)
h = np.exp(h) / np.sum(np.exp(h), axis=-1, keepdims=True)
else:
h = np.zeros((1, neur), dtype=conf.dtype)
h_init = None
true_out_softmax = OrderedDict()
p = None
mask = None
for i, x in enumerate(X):
score = np.matmul(h, wh) + np.matmul(x.reshape(1, -1), wx) + bias
unnorm_prob = np.exp(score)
h = unnorm_prob / np.sum(unnorm_prob, axis=-1, keepdims=True)
if dout:
if p is None:
p = np.random.rand()
mask = list()
mask.append(np.array(np.random.rand(*h.shape) < p, dtype=conf.dtype))
layer_out = h * mask[-1]
else:
layer_out = h
if a_type == 'many_to_one':
if i == batch_size - 1:
true_out_softmax = OrderedDict()
true_out_softmax[i + 1] = layer_out
else:
true_out_softmax[i + 1] = layer_out
test(X, w, seq_len, true_out_softmax, bias, h_init, actv_fn='Softmax', p=p, mask=mask,
architecture_type=a_type)
def test_backward_gradients_finite_difference(self):
self.delta = 1e-6
tol = 8
def test(inp, w, seq_len, inp_grad, bias=False, init_hidden_state=None, actv_fn='Sigmoid',
p=None, mask=None, architecture_type='many_to_many'):
num_neur = w['hidden'].shape[0]
wh = w['hidden']
wx = w['inp']
rnn = RNN(inp, num_neur, w, bias, seq_len=seq_len, activation_fn=actv_fn,
architecture_type=architecture_type, dropout=p,
tune_internal_states=(False if init_hidden_state is None else True))
if init_hidden_state is not None:
rnn.init_hidden_state = init_hidden_state
rnn.reset_internal_states()
_ = rnn.forward(inp, mask=mask)
inputs_grad = rnn.backward(inp_grad)
hidden_weights_grad = rnn.hidden_weights_grad
input_weights_grad = rnn.input_weights_grad
bias_grad = rnn.bias_grad
hidden_grad = rnn.hidden_state_grad
# Hidden weights finite difference gradients
hidden_weights_finite_diff = np.empty(hidden_weights_grad.shape)
for i in range(hidden_weights_grad.shape[0]):
for j in range(hidden_weights_grad.shape[1]):
w_delta = np.zeros_like(wh)
w_delta[i, j] = self.delta
rnn.hidden_weights = wh + w_delta
lhs = copy.deepcopy(rnn.forward(inp, mask=mask))
rnn.hidden_weights = wh - w_delta
rhs = copy.deepcopy(rnn.forward(inp, mask=mask))
lhs_sum = np.zeros_like(list(lhs.values())[0])
rhs_sum = np.zeros_like(list(rhs.values())[0])
for k in list(lhs.keys()):
if k > 0:
lhs_sum += lhs[k] * inp_grad[k]
rhs_sum += rhs[k] * inp_grad[k]
hidden_weights_finite_diff[i, j] = \
np.sum(((lhs_sum - rhs_sum) / (2 * self.delta)))
rnn.hidden_weights = wh
# Input weights finite difference gradients
input_weights_finite_diff = np.empty(input_weights_grad.shape)
for i in range(input_weights_grad.shape[0]):
for j in range(input_weights_grad.shape[1]):
w_delta = np.zeros_like(wx)
w_delta[i, j] = self.delta
rnn.input_weights = wx + w_delta
lhs = copy.deepcopy(rnn.forward(inp, mask=mask))
rnn.input_weights = wx - w_delta
rhs = copy.deepcopy(rnn.forward(inp, mask=mask))
lhs_sum = np.zeros_like(list(lhs.values())[0])
rhs_sum = np.zeros_like(list(rhs.values())[0])
for k in list(lhs.keys()):
if k > 0:
lhs_sum += lhs[k] * inp_grad[k]
rhs_sum += rhs[k] * inp_grad[k]
input_weights_finite_diff[i, j] = \
np.sum(((lhs_sum - rhs_sum) / (2 * self.delta)))
rnn.input_weights = wx
# Bias finite difference gradients
bias_finite_diff = np.empty(bias_grad.shape)
for i in range(bias_grad.shape[0]):
bias_delta = np.zeros(bias.shape, dtype=conf.dtype)
bias_delta[i] = self.delta
rnn.bias = bias + bias_delta
lhs = copy.deepcopy(rnn.forward(inp, mask=mask))
rnn.bias = bias - bias_delta
rhs = copy.deepcopy(rnn.forward(inp, mask=mask))
lhs_sum = np.zeros_like(list(lhs.values())[0])
rhs_sum = np.zeros_like(list(rhs.values())[0])
for k in list(lhs.keys()):
if k > 0:
lhs_sum += lhs[k] * inp_grad[k]
rhs_sum += rhs[k] * inp_grad[k]
bias_finite_diff[i] = \
np.sum(((lhs_sum - rhs_sum) / (2 * self.delta)))
rnn.bias = bias
# Inputs finite difference gradients
inputs_grad = np.vstack(reversed(list(inputs_grad.values())))
inputs_finite_diff = np.empty(inputs_grad.shape)
for i in range(inp.shape[0]):
for j in range(inp.shape[1]):
i_delta = np.zeros(inp.shape, dtype=conf.dtype)
i_delta[i, j] = self.delta
lhs = copy.deepcopy(rnn.forward(inp + i_delta, mask=mask))
rhs = copy.deepcopy(rnn.forward(inp - i_delta, mask=mask))
lhs_sum = np.zeros_like(list(lhs.values())[0])
rhs_sum = np.zeros_like(list(rhs.values())[0])
for k in list(lhs.keys()):
if k > 0:
lhs_sum += lhs[k] * inp_grad[k]
rhs_sum += rhs[k] * inp_grad[k]
inputs_finite_diff[i, j] = \
np.sum(((lhs_sum - rhs_sum) / (2 * self.delta)), keepdims=False)
if init_hidden_state is not None:
# Initial hidden state finite difference gradients
hidden_finite_diff = np.empty(hidden_grad.shape)
for i in range(init_hidden_state.shape[0]):
for j in range(init_hidden_state.shape[1]):
h_delta = np.zeros(init_hidden_state.shape, dtype=conf.dtype)
h_delta[i, j] = self.delta
rnn.init_hidden_state = init_hidden_state + h_delta
rnn.reset_internal_states()
lhs = copy.deepcopy(rnn.forward(inp, mask=mask))
rnn.init_hidden_state = init_hidden_state - h_delta
rnn.reset_internal_states()
rhs = copy.deepcopy(rnn.forward(inp, mask=mask))
lhs_sum = np.zeros_like(list(lhs.values())[0])
rhs_sum = np.zeros_like(list(rhs.values())[0])
for k in list(lhs.keys()):
if k > 0:
lhs_sum += lhs[k] * inp_grad[k]
rhs_sum += rhs[k] * inp_grad[k]
hidden_finite_diff[i, j] = \
np.sum(((lhs_sum - rhs_sum) / (2 * self.delta)), keepdims=False)
rnn.init_hidden_state = init_hidden_state
rnn.reset_internal_states()
npt.assert_almost_equal(hidden_weights_grad, hidden_weights_finite_diff, decimal=tol)
npt.assert_almost_equal(input_weights_grad, input_weights_finite_diff, decimal=tol)
npt.assert_almost_equal(inputs_grad, inputs_finite_diff, decimal=tol)
if init_hidden_state is not None:
npt.assert_almost_equal(hidden_grad, hidden_finite_diff, decimal=tol)
if not actv_fn == 'ReLU':
npt.assert_almost_equal(bias_grad, bias_finite_diff, decimal=tol)
# if not actv_fn == 'Softmax':
# # Hidden weights gradient check
# grad_diff = (abs(hidden_weights_grad - hidden_weights_finite_diff) /
# (abs(hidden_weights_grad + hidden_weights_finite_diff) + 1e-64))
# error_threshold = np.ones_like(grad_diff) * 1e-5
# npt.assert_array_less(grad_diff, error_threshold)
#
# # Input weights gradient check
# grad_diff = (abs(input_weights_grad - input_weights_finite_diff) /
# (abs(input_weights_grad + input_weights_finite_diff) + 1e-64))
# error_threshold = np.ones_like(grad_diff) * 1e-5
# npt.assert_array_less(grad_diff, error_threshold)
#
# # Inputs gradient check
# grad_diff = (abs(inputs_grad - inputs_finite_diff) /
# (abs(inputs_grad + inputs_finite_diff) + 1e-64))
# error_threshold = np.ones_like(grad_diff) * 1e-5
# npt.assert_array_less(grad_diff, error_threshold)
# Combinatorial Test Cases
# ------------------------
sequence_length = [1, 2, 3, 11]
reduce_size = [0, 1]
feature_size = [1, 2, 3, 11]
num_neurons = [1, 2, 3, 11]
one_hot = [True, False]
scale = [1e-2]
unit_inp_grad = [True, False]
activation_fn = ['Linear', 'Sigmoid', 'Tanh', 'ReLU', 'Softmax']
dropout = [True, False]
architecture_type = ['many_to_many', 'many_to_one']
tune_internal_states = [True, False]
repeat = list(range(1))
for seq_len, r_size, feat, neur, oh, scl, unit, actv, dout, a_type, tune, r in \
list(itertools.product(sequence_length, reduce_size, feature_size, num_neurons, one_hot,
scale, unit_inp_grad, activation_fn, dropout, architecture_type,
tune_internal_states, repeat)):
batch_size = seq_len - (r_size if seq_len > 1 else 0)
# Initialize inputs
if oh:
X = np.zeros((batch_size, feat), dtype=conf.dtype)
rnd_idx = np.random.randint(feat, size=batch_size)
X[range(batch_size), rnd_idx] = 1
else:
X = np.random.uniform(-scl, scl, (batch_size, feat))
# Initialize weights and bias
wh = np.random.rand(neur, neur) * scl
wx = np.random.rand(feat, neur) * scl
w = {'hidden': wh, 'inp': wx}
bias = np.random.rand(neur) * scl
init_h_state = np.random.rand(1, neur) if tune else None
# Initialize input gradients
inp_grad = OrderedDict()
if a_type == 'many_to_many':
for s in range(1, batch_size + 1):
inp_grad[s] = np.ones((1, neur), dtype=conf.dtype) if unit else \
np.random.uniform(-1, 1, (1, neur))
else:
inp_grad[batch_size] = np.ones((1, neur), dtype=conf.dtype) if unit else \
np.random.uniform(-1, 1, (1, neur))
# Set dropout mask
if dout:
p = np.random.rand()
mask = np.array(np.random.rand(batch_size, neur) < p, dtype=conf.dtype)
if actv in ['Linear', 'ReLU']:
mask /= p
else:
p = None
mask = None
test(X, w, seq_len, inp_grad, bias, init_h_state, actv, p, mask, a_type)
if __name__ == '__main__':
unittest.main()
| 46.247881 | 100 | 0.492922 | 2,744 | 21,829 | 3.712464 | 0.0707 | 0.024737 | 0.03848 | 0.024737 | 0.77275 | 0.728772 | 0.67969 | 0.620693 | 0.586237 | 0.530676 | 0 | 0.017939 | 0.3871 | 21,829 | 471 | 101 | 46.346072 | 0.743479 | 0.081039 | 0 | 0.638021 | 0 | 0 | 0.014795 | 0 | 0 | 0 | 0 | 0 | 0.020833 | 1 | 0.015625 | false | 0 | 0.020833 | 0 | 0.039063 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3945dcb9dd48db259f43d38679d34c16f4543743 | 913 | py | Python | Joints/Pelvis.py | lcremer/Maya_Rigging | 8fe07e5f8d021a6828608bca4bf74e04f023b1cd | [
"Unlicense"
] | null | null | null | Joints/Pelvis.py | lcremer/Maya_Rigging | 8fe07e5f8d021a6828608bca4bf74e04f023b1cd | [
"Unlicense"
] | null | null | null | Joints/Pelvis.py | lcremer/Maya_Rigging | 8fe07e5f8d021a6828608bca4bf74e04f023b1cd | [
"Unlicense"
] | null | null | null | """
Creates Pelvis
"""
import maya.cmds as mc
from ..Utils import String as String
class Pelvis():
def __init__(self,
characterName = '',
suffix = '',
name = 'Pelvis',
parent = ''):
"""
@return: returns end joint
"""
self.characterName = characterName
self.suffix = suffix
self.name = name
mc.select(cl = True)
self.topJoint = mc.joint(n = String.combineWith_((characterName, name, suffix)), p = (0,3,0))
self.endJoint = self.topJoint
if parent:
mc.delete(mc.pointConstraint(parent, self.topJoint))
mc.delete(mc.orientConstraint(parent, self.topJoint))
mc.parent(self.topJoint, parent)
mc.select(cl = True)
#return {'topJoint':topJoint, 'endJoint':topJoint} | 27.666667 | 101 | 0.521358 | 89 | 913 | 5.303371 | 0.41573 | 0.127119 | 0.088983 | 0.059322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005137 | 0.36035 | 913 | 33 | 102 | 27.666667 | 0.80137 | 0.053669 | 0 | 0.105263 | 0 | 0 | 0.007585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.105263 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
394feb4118d6e9b0b87ec29bc0be9b581f2100ed | 3,193 | py | Python | evaluate.py | uw-biomedical-ml/oct-irf-train | ebf8631f96883ec5ed91574201b05818f95c0f7d | [
"BSD-3-Clause"
] | 1 | 2021-07-24T06:44:06.000Z | 2021-07-24T06:44:06.000Z | evaluate.py | uw-biomedical-ml/oct-irf-train | ebf8631f96883ec5ed91574201b05818f95c0f7d | [
"BSD-3-Clause"
] | 5 | 2020-09-25T22:35:32.000Z | 2022-02-09T23:37:02.000Z | evaluate.py | uw-biomedical-ml/oct-irf-train | ebf8631f96883ec5ed91574201b05818f95c0f7d | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
from PIL import Image
import sys, glob, tqdm, os
import numpy as np
from colour import Color
def usage():
print("./evaluate.py <imgdir> <outdir> <mode>")
print("")
print("\timgdir = folder of OCT B scans")
print("\toutdir = EMPTY folder to output segmentation masks")
print("\tmode = mask, mask_blend, blend")
sys.exit(-1)
if len(sys.argv) != 4:
usage()
import deeplearning.unet
(_, indir, outdir, mode) = sys.argv
if not os.path.isdir(indir):
print("ERROR: %s is not a directory" % indir)
sys.exit(-1)
if not os.path.isdir(outdir):
print("ERROR: %s is not a directory" % outdir)
sys.exit(-1)
if len(glob.glob("%s/*" % outdir)) != 0:
print("ERROR: %s is not empty" % outdir)
sys.exit(-1)
imgs = []
for f in glob.glob("%s/*" % indir):
(_, ext) = os.path.splitext(f)
if ext in [".jpg", ".png", ".jpeg"]:
imgs.append(f)
if len(imgs) == 0:
print("ERROR: %s has no images!" % indir)
sys.exit(-1)
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
model = deeplearning.unet.get_unet()
model.load_weights("runs/weights.hdf5", by_name=True)
image_rows = 432
image_cols = 32
my_cm = []
colors = list(Color("yellow").range_to(Color("red"),1001))
for c in colors:
my_cm.append((255 * np.array(c.rgb)).astype(np.uint8))
my_cm = np.array(my_cm)
for f in tqdm.tqdm(imgs):
ji = Image.open(f)
img = np.array(ji)
img = img.astype(np.float)
img -= 28.991758347
img /= 46.875888824
totaloutput = np.zeros((img.shape[0], img.shape[1], 32))
ym = np.argmax(np.sum(img, axis=1))
y0 = int(ym - image_rows / 2)
y1 = int(ym + image_rows / 2)
if y0 < 0:
y0 = 0
if y1 >= img.shape[0]:
y1 = img.shape[0] - 1
for dx in tqdm.tqdm(range(0, img.shape[1] - 32)):
sliori = np.zeros((image_rows, image_cols), dtype=np.float)
sliori[0:y1-y0, :] = img[y0:y1, dx:dx+image_cols]
imgsbatch = sliori.reshape((1, 1, image_rows,image_cols))
output = model.predict(imgsbatch, batch_size=1)
totaloutput[y0:y1,dx:dx+image_cols,dx % 32] = output[0,0,0:y1-y0,:]
totaloutput = np.mean(totaloutput, 2)
if (mode == "mask"):
# for binary masks
mask = (totaloutput > 0.5)
mask = np.uint8(mask)
mask *= 255
mask = Image.fromarray(mask)
mask.save(f.replace(indir,outdir))
elif (mode == "mask_blend"):
# for masked heatmap overlay
mask = (totaloutput < 0.5)
mask = np.uint8(mask)
mask *= 255
mask = Image.fromarray(mask)
mapped_data = np.zeros((totaloutput.shape[0], totaloutput.shape[1],3), dtype="uint8")
totalint = (1000 * totaloutput).astype(np.uint16)
mapped_data = my_cm[totalint]
j = Image.fromarray(mapped_data).convert('RGBA')
ji = ji.convert("RGBA")
Image.composite(ji, j,mask).save(f.replace(indir,outdir))
elif (mode == "blend"):
# for blend overlay
totalint = (1000 * totaloutput).astype(np.uint16)
mapped_data = my_cm[totalint]
j = Image.fromarray(mapped_data).convert('RGBA')
ji = ji.convert("RGBA")
Image.blend(ji, j,0.5).save(f.replace(indir,outdir))
print("\n\nFinished.")
| 29.293578 | 86 | 0.61259 | 486 | 3,193 | 3.958848 | 0.3107 | 0.012474 | 0.02079 | 0.015593 | 0.342516 | 0.264033 | 0.246362 | 0.219335 | 0.182952 | 0.182952 | 0 | 0.045982 | 0.216724 | 3,193 | 108 | 87 | 29.564815 | 0.723311 | 0.025681 | 0 | 0.218391 | 0 | 0 | 0.121339 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.057471 | null | null | 0.114943 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3950e84c703f64f3f48f3a37d0f1cd0486c9f552 | 29,604 | py | Python | interfaz.py | ifigueroa065/Voluntariado | 375eab96adc7a95f8204244f942840bdce47c8b5 | [
"MIT"
] | null | null | null | interfaz.py | ifigueroa065/Voluntariado | 375eab96adc7a95f8204244f942840bdce47c8b5 | [
"MIT"
] | null | null | null | interfaz.py | ifigueroa065/Voluntariado | 375eab96adc7a95f8204244f942840bdce47c8b5 | [
"MIT"
] | null | null | null | from tkinter import *
import os
from datetime import datetime
import webbrowser
from tkinter import messagebox
from tkinter import ttk
import tkinter.filedialog
import tkinter as tk
import openpyxl
from REPORTE import *
datos = [] #reporte
precios = [] #precios
preciosmq=[] #precios mq
subtotales = []
def CREAR_INTERFAZ():
def DIALOGO():
fd= tkinter.Tk()
fd.withdraw()
ruta=tkinter.filedialog.askopenfilename(
initialdir="C:",
filetypes=(
("Libro de Excel", "*.xlsx"),
("Libro de Excel 97 a Excel 2003", "*.xls"),
("Todos los Archivos de Excel","*.*")
),
title = "ABRIR ARCHIVO"
)
if ruta=="":
messagebox.showinfo(message="Debe cargar un archivo", title="ERROR")
else:
try:
print("------> "+ ruta)
rut.set("CARGA EXITOSA")
book2 = openpyxl.load_workbook(ruta, data_only=True)
celdas2 = book2.active
for row in range(2,celdas2.max_row +1):
if(celdas2.cell(row,1).value is not None):
precios.append(Datos(celdas2.cell(row,1).value,celdas2.cell(row,2).value, celdas2.cell(row,3).value))
finally:
print(" ************************** ")
print(" SUCCESSFULLY ")
print(" ************************** ")
def DIALOGO2():
fd= tkinter.Tk()
fd.withdraw()
ruta=tkinter.filedialog.askopenfilename(
initialdir="C:",
filetypes=(
("Libro de Excel", "*.xlsx"),
("Libro de Excel 97 a Excel 2003", "*.xls"),
("Todos los Archivos de Excel","*.*")
),
title = "ABRIR ARCHIVO"
)
if ruta=="":
messagebox.showinfo(message="Debe cargar un archivo", title="ERROR")
else:
try:
print("------> "+ ruta)
zm1.set("CARGA EXITOSA")
book2 = openpyxl.load_workbook(ruta, data_only=True)
celdas2 = book2.active
for row in range(2,celdas2.max_row +1):
if(celdas2.cell(row,1).value is not None):
preciosmq.append(Datos(celdas2.cell(row,1).value,celdas2.cell(row,2).value, celdas2.cell(row,3).value))
finally:
print(" ************************** ")
print(" SUCCESSFULLY ")
print(" ************************** ")
def DIALOGO_REPORTE():
TP=TIPO.get()
fd= tkinter.Tk()
fd.withdraw()
ruta=tkinter.filedialog.askopenfilename(
initialdir="C:",
filetypes=(
("Libro de Excel", "*.xlsx"),
("Libro de Excel 97 a Excel 2003", "*.xls"),
("Todos los Archivos de Excel","*.*")
),
title = "ABRIR ARCHIVO"
)
if ruta=="":
messagebox.showinfo(message="Debe cargar un archivo", title="ERROR")
else:
try:
print("------> "+ ruta)
rut.set("CARGA EXITOSA")
book = openpyxl.load_workbook(ruta, data_only=True)
celdas = book.active
for row in range(2,celdas.max_row):
if(celdas.cell(row,1).value is not None):
datos.append(Reporte(celdas.cell(row,1).value, celdas.cell(row,2).value, celdas.cell(row,3).value))
if TP=="MQ":
print("--------------IMPRIMIENDO SUBTOTALES-------------")
x=0
contador=0
while x<len(datos):
for i in preciosmq:
if datos[x].nombre.upper().replace(" ", "")==i.nombre.upper().replace(" ", ""):
contador+=1
subtotal=datos[x].entregado_usuario*i.precio
print(str(contador)+ ")" +datos[x].nombre +"="+ str(subtotal))
subtotales.append(Subtotal(contador,datos[x].codigo,datos[x].nombre,datos[x].entregado_usuario,subtotal))
break
x+=1
print("----------------------------------------")
TOTAL=0
for i in subtotales:
TOTAL+=i.subtotal
print("TOTAL = Q"+ str(TOTAL))
else:
print("--------------IMPRIMIENDO SUBTOTALES-------------")
x=0
contador=0
while x<len(datos):
for i in precios:
if datos[x].nombre.upper().replace(" ", "")==i.nombre.upper().replace(" ", ""):
contador+=1
subtotal=datos[x].entregado_usuario*i.precio
print(str(contador)+ ")" +datos[x].nombre +"="+ str(subtotal))
subtotales.append(Subtotal(contador,datos[x].codigo,datos[x].nombre,datos[x].entregado_usuario,subtotal))
break
x+=1
print("----------------------------------------")
TOTAL=0
for i in subtotales:
TOTAL+=i.subtotal
print("TOTAL = Q"+ str(TOTAL))
finally:
print(" ************************** ")
print(" SUCCESSFULLY ")
print(" ************************** ")
def VER_REPORTE():
#obteniendo datos de inputs
A=año.get()
MO=Mes_inicial.get()
M=Mes_final.get()
DEPA=dpto.get()
AR=area.get()
MUN=municipio.get()
TIPS=t_servicio.get()
SERV=servicio.get()
DIST=distrito.get()
f = open('REPORTE.html','w', encoding="utf-8")
f.write("""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="description" content="">
<meta name="author" content="">
<title>ÁREA DE SALUD</title>
<link href="img/icono.ico" rel="icon">
<!-- Custom fonts for this template-->
<link href="vendor/fontawesome-free/css/all.min.css" rel="stylesheet" type="text/css">
<link
href="https://fonts.googleapis.com/css?family=Nunito:200,200i,300,300i,400,400i,600,600i,700,700i,800,800i,900,900i"
rel="stylesheet">
<!-- Custom styles for this template-->
<link href="css/sb-admin-2.min.css" rel="stylesheet">
<link href="vendor/datatables/dataTables.bootstrap4.min.css" rel="stylesheet">
</head>
<body id="page-top">
<!-- Page Wrapper -->
<div id="wrapper">
<!-- Sidebar -->
<ul class="navbar-nav bg-gradient-primary sidebar sidebar-dark accordion" id="accordionSidebar">
<!-- Sidebar - Brand -->
<a class="sidebar-brand d-flex align-items-center justify-content-center" href="REPORTE.html">
<div class="sidebar-brand-icon rotate-n-15">
<i class="fas fa-laugh-wink"></i>
</div>
<div class="sidebar-brand-text mx-3">ANALISIS</div>
</a>
<!-- Divider -->
<hr class="sidebar-divider my-0">
<!-- Nav Item - Dashboard -->
<li class="nav-item active">
<a class="nav-link" href="REPORTE.html">
<i class="fas fa-bars"></i>
<span>REPORTE</span></a>
</li>
<!-- Divider -->
<hr class="sidebar-divider">
<!-- Heading -->
<div class="sidebar-heading">
OTROS
</div>
<!-- Nav Item - Utilities Collapse Menu -->
<li class="nav-item">
<a class="nav-link collapsed" href="#" data-toggle="collapse" data-target="#collapseUtilities"
aria-expanded="true" aria-controls="collapseUtilities">
<i class="fas fa-fw fa-2x"></i>
<span>BRESS</span>
</a>
</li>
<!-- Divider -->
<hr class="sidebar-divider d-none d-md-block">
<!-- Sidebar Toggler (Sidebar) -->
<div class="text-center d-none d-md-inline">
<button class="rounded-circle border-0" id="sidebarToggle"></button>
</div>
</ul>
<!-- End of Sidebar -->
<!-- Content Wrapper -->
<div id="content-wrapper" class="d-flex flex-column">
<!-- Main Content -->
<div id="content">
<!-- Topbar -->
<nav class="navbar navbar-expand navbar-light bg-white topbar mb-4 static-top shadow">
<!-- Sidebar Toggle (Topbar) -->
<button id="sidebarToggleTop" class="btn btn-link d-md-none rounded-circle mr-3">
<i class="fa fa-bars"></i>
</button>
<!-- Topbar Navbar -->
<ul class="navbar-nav ml-auto">
<!-- Nav Item - Search Dropdown (Visible Only XS) -->
<li class="nav-item dropdown no-arrow d-sm-none">
<a class="nav-link dropdown-toggle" href="#" id="searchDropdown" role="button"
data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
<i class="fas fa-search fa-fw"></i>
</a>
</li>
<div class="topbar-divider d-none d-sm-block"></div>
<!-- Nav Item - User Information -->
<li class="nav-item dropdown no-arrow">
<a class="nav-link dropdown-toggle" href="#" id="userDropdown" role="button"
data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
<span class="mr-2 d-none d-lg-inline text-gray-600 small">Administrador</span>
<img class="img-profile rounded-circle"
src="img/undraw_profile.svg">
</a>
</li>
</ul>
</nav>
<!-- End of Topbar -->
<!-- Begin Page Content -->
<div class="container-fluid">
<!-- Page Heading -->
<div class="d-sm-flex align-items-center justify-content-between mb-4">
<h1 class="h3 mb-0 text-gray-800">ÁREA DE SALUD DE CHIMALTENANGO</h1>
<a href="#" class="d-none d-sm-inline-block btn btn-sm btn-primary shadow-sm"><i
class="fas fa-download fa-sm text-white-50"></i> Descargar Reporte</a>
</div>
<!-- Content Row -->
<div class="row">
<!-- Earnings (Monthly) Card Example -->
<div class="col-xl-3 col-md-6 mb-4">
<div class="card border-left-primary shadow h-100 py-2">
<div class="card-body">
<div class="row no-gutters align-items-center">
<div class="col mr-2">
<div class="text-xs font-weight-bold text-primary text-uppercase mb-1">
Departamento</div>
<div class="h5 mb-0 font-weight-bold text-gray-800">
""")
f.write(DEPA) #DEPARTAMENTO
f.write("""
</div>
</div>
<div class="col-auto">
<i class="fas fa-fw"></i>
</div>
</div>
</div>
</div>
</div>
<!-- Earnings (Monthly) Card Example -->
<div class="col-xl-3 col-md-6 mb-4">
<div class="card border-left-success shadow h-100 py-2">
<div class="card-body">
<div class="row no-gutters align-items-center">
<div class="col mr-2">
<div class="text-xs font-weight-bold text-success text-uppercase mb-1">
Distrito</div>
<div class="h5 mb-0 font-weight-bold text-gray-800">
""")
f.write(DIST) #DISTRITO
f.write("""
</div>
</div>
<div class="col-auto">
<i class="fas fa-fw"></i>
</div>
</div>
</div>
</div>
</div>
<!-- Earnings (Monthly) Card Example -->
<div class="col-xl-3 col-md-6 mb-4">
<div class="card border-left-info shadow h-100 py-2">
<div class="card-body">
<div class="row no-gutters align-items-center">
<div class="col mr-2">
<div class="text-xs font-weight-bold text-info text-uppercase mb-1">Del Mes
</div>
<div class="row no-gutters align-items-center">
<div class="col-auto">
<div class="h5 mb-0 mr-3 font-weight-bold text-gray-800">
""")
f.write(MO) #MES INICIAL
f.write("""
</div>
</div>
</div>
</div>
<div class="col-auto">
<i class="fas fa-calendar fa-2x text-gray-300"></i>
</div>
</div>
</div>
</div>
</div>
<!-- Pending Requests Card Example -->
<div class="col-xl-3 col-md-6 mb-4">
<div class="card border-left-warning shadow h-100 py-2">
<div class="card-body">
<div class="row no-gutters align-items-center">
<div class="col mr-2">
<div class="text-xs font-weight-bold text-warning text-uppercase mb-1">
Al mes</div>
<div class="h5 mb-0 font-weight-bold text-gray-800">
""")
f.write(M) #MES FINAL
f.write("""
</div>
</div>
<div class="col-auto">
<i class="fas fa-calendar fa-2x text-gray-300"></i>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Content Row -->
<div class="row">
<!-- TABLA RESUMEN-->
<h1 class="h3 mb-2 text-gray-800">
""")
f.write(MUN) # MUNICIPIO
f.write("""
</h1>
<p class="mb-4">Reporte de Balance, Requisición y Envío de Suministros</p>
<!-- TABLA DE MEDICAMENTOS Y MÉDIDO QUIRURGICO -->
<div class="card shadow mb-4">
<div class="card-header py-3">
<h6 class="m-0 font-weight-bold text-primary">
""")
f.write(TIPS) #TIPO DE SERVICIO
f.write("""
</h6>
</div>
<div class="card-body">
<div class="table-responsive">
<table class="table table-bordered" id="dataTable" width="100%" cellspacing="0">
<thead>
<tr>
<th>Número de orden</th>
<th>Código</th>
<th>Descripción de Articulo/Producto</th>
<th>Unidad de Medida</th>
<th>Cantidad Autorizada</th>
<th>Cantidad despachada</th>
<th>Subtotal</th>
</tr>
</thead>
<tfoot>
<th>Número de orden</th>
<th>Código</th>
<th>Descripción de Articulo/Producto</th>
<th>Unidad de Medida</th>
<th>Cantidad Autorizada</th>
<th>Cantidad despachada</th>
<th>Subtotal </th>
</tfoot>
<tbody>
""")
for i in subtotales:
p="{0:.2f}".format(float(i.subtotal))
f.write("<tr>")
f.write(" <td><center>"+str(i.id)+"</center></td>"
+"<td><center>"+str(i.codigo)+"</center></td>"
+"<td><center>"+str(i.nombre)+"</center></td>"
+"<td><center>"+"x"+"</center></td>"
+"<td><center>"+str(i.entregado)+"</center></td>"
+"<td><center>"+str(i.entregado)+"</center></td>"
+"<td><center>"+ "Q"+str(p)+"</center></td>"
)
f.write("<t/r>")
f.write("""
</tbody>
</table>
</div>
</div>
</div>
<!-- Content Row -->
<div class="row">
<!-- Content Column -->
<div class="col-auto">
</div>
</div>
</div>
<!-- /.container-fluid -->
</div>
<!-- End of Main Content -->
<!-- Footer -->
<footer class="sticky-footer bg-white">
<div class="container my-auto">
<div class="copyright text-center my-auto">
<span>© Facultad de Ingeniería 2021</span>
</div>
</div>
</footer>
<!-- End of Footer -->
</div>
<!-- End of Content Wrapper -->
</div>
<!-- End of Page Wrapper -->
<!-- Scroll to Top Button-->
<a class="scroll-to-top rounded" href="#page-top">
<i class="fas fa-angle-up"></i>
</a>
<!-- Logout Modal-->
<div class="modal fade" id="logoutModal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel"
aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLabel">Ready to Leave?</h5>
<button class="close" type="button" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">Select "Logout" below if you are ready to end your current session.</div>
<div class="modal-footer">
<button class="btn btn-secondary" type="button" data-dismiss="modal">Cancel</button>
<a class="btn btn-primary" href="login.html">Logout</a>
</div>
</div>
</div>
</div>
<!-- Bootstrap core JavaScript-->
<script src="vendor/jquery/jquery.min.js"></script>
<script src="vendor/bootstrap/js/bootstrap.bundle.min.js"></script>
<!-- Core plugin JavaScript-->
<script src="vendor/jquery-easing/jquery.easing.min.js"></script>
<!-- Custom scripts for all pages-->
<script src="js/sb-admin-2.min.js"></script>
<!-- Page level plugins -->
<script src="vendor/chart.js/Chart.min.js"></script>
<!-- Page level custom scripts -->
<script src="js/demo/chart-area-demo.js"></script>
<script src="js/demo/chart-pie-demo.js"></script>
<!-- Page level plugins -->
<script src="vendor/datatables/jquery.dataTables.min.js"></script>
<script src="vendor/datatables/dataTables.bootstrap4.min.js"></script>
<!-- Page level custom scripts -->
<script src="js/demo/datatables-demo.js"></script>
</body>
</html>
""")
f.close()
webbrowser.open_new_tab('REPORTE.html')
#--------------CREANDO VENTANA PRINCIPAL--------------
root=Tk()
root.title("VOLUNTARIADO")
root.iconbitmap('img\icono.ico')
rut=StringVar()
zm1=StringVar()
nt=ttk.Notebook(root)
nt.pack(fill="both",expand="yes")
s = ttk.Style()
# Create style used by default for all Frames
s.configure('TFrame', background='#1F618D')
#--------------FRAME INICIO--------------
s.configure('Frame1.TFrame', background='#1F618D')
V1 = ttk.Frame(nt, style='Frame1.TFrame')
nt.add(V1, text="INICIO")
#--------------FRAME CARGAR ARCHIVOS--------------
s.configure('Frame2.TFrame', background='#1F618D')
V2 = ttk.Frame(nt, style='Frame2.TFrame')
nt.add(V2, text="PRECIOS")
Label(V2,textvariable=rut,font="Helvetica 16",bg="#1F618D").place(x=100,y=280)
rut.set("NO SE HA CARGADO NADA")
Button(V2,text="SELECCIONAR ARCHIVO",command=DIALOGO,font="Helvetica 12",height=5,width=25).place(x=120, y=110)
Label(V2,textvariable=zm1,font="Helvetica 16",bg="#1F618D").place(x=560,y=280)
zm1.set("NO SE HA CARGADO NADA")
Button(V2,text="SELECCIONAR ARCHIVO",command=DIALOGO2,font="Helvetica 12",height=5,width=25).place(x=520, y=110)
L1=StringVar()
l2=StringVar()
l3=StringVar()
xo=IntVar()
yo=IntVar()
Label(V2,textvariable=L1,font="Helvetica 16",bg="#1F618D").place(x=30,y=30)
L1.set("CARGAR ARCHIVO DE PRECIOS (MED)")
Label(V2,textvariable=l2,font="Helvetica 16",bg="#1F618D").place(x=500,y=30)
l2.set("CARGAR ARCHIVO DE PRECIOS (MQ)")
#--------------FRAME REPORTES--------------
s.configure('Frame3.TFrame', background='#1F618D')
V3 = ttk.Frame(nt, style='Frame3.TFrame')
nt.add(V3, text=" VISUALIZAR REPORTE")
icodoct=PhotoImage(file="img\doct.png")
icodoct.subsample(1,1)
#Button(V3,image=icodoct,font="Helvetica 14",width=300,height=300).place(x=100, y=130)
Label(V3,textvariable=rut,font="Helvetica 16",bg="#1F618D").place(x=150,y=400)
rut.set("NO SE HA CARGADO NADA")
Button(V3,text="SELECCIONAR ARCHIVO",command=DIALOGO_REPORTE,font="Helvetica 12").place(x=250, y=350)
Button(V3,text="VER REPORTE",command=VER_REPORTE,height=5,width=25,font="Helvetica 12").place(x=650, y=350)
L6=StringVar()
año=StringVar()
dpto=StringVar()
area=StringVar()
distrito=StringVar()
municipio=StringVar()
t_servicio=StringVar()
servicio=StringVar()
l9=StringVar()
l8=StringVar()
l7=StringVar()
l6=StringVar()
l5=StringVar()
l4=StringVar()
a=StringVar()
b=StringVar()
c=StringVar()
Label(V3,textvariable=L6,font="Helvetica 16",bg="#1F618D").place(x=70,y=30)
L6.set("DATOS PARA EL REPORTE")
Label(V3,textvariable=l9,font="Helvetica 12",bg="#1F618D",fg="white").place(x=75,y=140)
l9.set("Departamento")
"""Label(V3,textvariable=l8,font="Helvetica 12",bg="#1F618D",fg="white").place(x=75,y=180)
l8.set("Area")
Label(V3,textvariable=l7,font="Helvetica 12",bg="#1F618D",fg="white").place(x=75,y=220)
l7.set("Distrito")"""
Label(V3,textvariable=l6,font="Helvetica 12",bg="#1F618D",fg="white").place(x=75,y=180)
l6.set("Municipio")
Label(V3,textvariable=l5,font="Helvetica 12",bg="#1F618D",fg="white").place(x=475,y=180)
"""l5.set("Tipo de Servicio")
Label(V3,textvariable=l4,font="Helvetica 12",bg="#1F618D",fg="white").place(x=475,y=220)
l4.set("Servicio")"""
Label(V3,textvariable=a,font="Helvetica 12",bg="#1F618D",fg="white").place(x=450,y=40)
a.set("Año")
Label(V3,textvariable=b,font="Helvetica 12",bg="#1F618D",fg="white").place(x=570,y=40)
b.set("Del Mes")
Label(V3,textvariable=c,font="Helvetica 12",bg="#1F618D",fg="white").place(x=760,y=40)
c.set("Al mes")
Entry(V3,textvariable=año,font="Helvetica 11",width=5).place(x=500,y=40)
#Entry(V3,textvariable=Mes_inicial,font="Helvetica 11",width=10).place(x=650,y=40)
#Entry(V3,textvariable=Mes_final,font="Helvetica 11",width=10).place(x=820,y=40)
Mes_inicial=ttk.Combobox(V3,width=10,font="Helvetica 11",state="readonly")
Mes_inicial.place(x=650,y=40)
Mes_inicial['values']=('Enero','Febrero','Marzo ','Abril','Mayo','Junio','Julio','Agosto','Septiembre','Octubre','Noviembre','Diciembre')
Mes_final=ttk.Combobox(V3,width=10,font="Helvetica 11",state="readonly")
Mes_final.place(x=820,y=40)
Mes_final['values']=('Enero','Febrero','Marzo ','Abril','Mayo','Junio','Julio','Agosto','Septiembre','Octubre','Noviembre','Diciembre')
TIPO=ttk.Combobox(V3,width=10,font="Helvetica 14",state="readonly")
TIPO.place(x=100,y=350)
TIPO['values']=('MED','MQ')
Entry(V3,textvariable=dpto,font="Helvetica 12").place(x=200,y=140)
#Entry(V3,textvariable=distrito,font="Helvetica 12").place(x=200,y=180)
#Entry(V3,textvariable=t_servicio,font="Helvetica 12").place(x=200,y=220)
Entry(V3,textvariable=distrito,font="Helvetica 12").place(x=200,y=180)
#Entry(V3,textvariable=municipio,font="Helvetica 12").place(x=600,y=180)
#Entry(V3,textvariable=servicio,font="Helvetica 12").place(x=600,y=220)
root.geometry("950x550")
root.mainloop()
CREAR_INTERFAZ() | 41.520337 | 141 | 0.420011 | 2,835 | 29,604 | 4.373898 | 0.196825 | 0.036774 | 0.016694 | 0.011613 | 0.516613 | 0.467661 | 0.435645 | 0.401694 | 0.375 | 0.353548 | 0 | 0.038541 | 0.430313 | 29,604 | 713 | 142 | 41.520337 | 0.69665 | 0.029455 | 0 | 0.454545 | 0 | 0.045455 | 0.677689 | 0.066841 | 0 | 0 | 0 | 0 | 0 | 1 | 0.009091 | false | 0 | 0.018182 | 0 | 0.027273 | 0.036364 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a33943d2cf0f6c01fc1fd72edefaa54e0e682d5 | 3,911 | py | Python | distnet/keras_models/self_attention.py | jeanollion/dlutils | ea419e79486e1212219dc06d39c3a4f4c305ff49 | [
"Apache-2.0"
] | 4 | 2020-05-27T01:39:44.000Z | 2021-09-03T18:20:33.000Z | distnet/keras_models/self_attention.py | jeanollion/dlutils | ea419e79486e1212219dc06d39c3a4f4c305ff49 | [
"Apache-2.0"
] | null | null | null | distnet/keras_models/self_attention.py | jeanollion/dlutils | ea419e79486e1212219dc06d39c3a4f4c305ff49 | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
from tensorflow.keras.layers import Layer, Dense, Reshape, Embedding, Concatenate, Conv2D
from tensorflow.keras.models import Model
import numpy as np
class SelfAttention(Model):
def __init__(self, d_model, spatial_dims, positional_encoding=True, name="self_attention"):
'''
d_model : number of output channels
spatial_dim : spatial dimensions of input tensor (x , y)
if positional_encoding: depth must correspond to input channel number
adapted from: https://www.tensorflow.org/tutorials/text/transformer
'''
super().__init__(name=name)
self.d_model = d_model
self.spatial_dims=spatial_dims
self.spatial_dim = np.prod(spatial_dims)
self.wq = Dense(self.d_model, name=name+"_q")
self.wk = Dense(self.d_model, name=name+"_k")
self.wv = Dense(self.d_model, name=name+"_w")
self.positional_encoding=positional_encoding
if positional_encoding:
self.pos_embedding = Embedding(self.spatial_dim, d_model, name=name+"pos_enc") # TODO test other positional encoding. in particular that encodes X and Y
def call(self, x):
'''
x : tensor with shape (batch_size, y, x, channels)
'''
shape = tf.shape(x)
batch_size = shape[0]
#spatial_dims = shape[1:-1]
#spatial_dim = tf.reduce_prod(spatial_dims)
depth_dim = shape[3]
if self.positional_encoding:
x_index = tf.range(self.spatial_dim, dtype=tf.int32)
pos_emb = self.pos_embedding(x_index) # (spa_dim, d_model)
pos_emb = tf.reshape(pos_emb, (self.spatial_dims[0], self.spatial_dims[1], self.d_model)) #for broadcasting purpose
x = x + pos_emb # broadcast
q = self.wq(x) # (batch_size, *spa_dims, d_model)
k = self.wk(x) # (batch_size, *spa_dims, d_model)
v = self.wv(x) # (batch_size, *spa_dims, d_model)
q = tf.reshape(q, (batch_size, -1, depth_dim)) # (batch_size, spa_dim, d_model)
k = tf.reshape(k, (batch_size, -1, depth_dim))
v = tf.reshape(v, (batch_size, -1, depth_dim))
# scaled_attention.shape == (batch_size, spa_dims, depth)
# attention_weights.shape == (batch_size, spa_dims, spa_dims)
scaled_attention, attention_weights = scaled_dot_product_attention(q, k, v)
output = tf.reshape(scaled_attention, (batch_size, self.spatial_dims[0], self.spatial_dims[1], self.d_model))
tf.identity(attention_weights, name=self.name+"_attention_weights")
return output, attention_weights
def compute_output_shape(self, input_shape):
return input_shape[:-1]+(self.d_model,), (input_shape[0],self.spatial_dim,self.spatial_dim)
def scaled_dot_product_attention(q, k, v):
"""Calculate the attention weights.
q, k, v must have matching leading dimensions.
k, v must have matching penultimate dimension, i.e.: seq_len_k = seq_len_v.
The mask has different shapes depending on its type(padding or look ahead)
but it must be broadcastable for addition.
Args:
q: query shape == (..., seq_len_q, depth)
k: key shape == (..., seq_len_k, depth)
v: value shape == (..., seq_len_v, depth_v)
Returns:
output, attention_weights
from : https://www.tensorflow.org/tutorials/text/transformer
"""
matmul_qk = tf.matmul(q, k, transpose_b=True) # (..., seq_len_q, seq_len_k)
# scale matmul_qk
dk = tf.cast(tf.shape(k)[-1], tf.float32)
scaled_attention_logits = matmul_qk / tf.math.sqrt(dk)
# softmax is normalized on the last axis (seq_len_k) so that the scores
# add up to 1.
attention_weights = tf.nn.softmax(scaled_attention_logits, axis=-1) # (..., seq_len_q, seq_len_k)
output = tf.matmul(attention_weights, v) # (..., seq_len_q, depth_v)
return output, attention_weights
| 43.455556 | 164 | 0.662746 | 563 | 3,911 | 4.353464 | 0.269982 | 0.039168 | 0.03264 | 0.03264 | 0.218686 | 0.164831 | 0.125255 | 0.074255 | 0.034272 | 0.034272 | 0 | 0.006899 | 0.221682 | 3,911 | 89 | 165 | 43.94382 | 0.798292 | 0.359755 | 0 | 0.045455 | 0 | 0 | 0.019214 | 0 | 0 | 0 | 0 | 0.011236 | 0 | 1 | 0.090909 | false | 0 | 0.090909 | 0.022727 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a394cf9c7eb99717e2514108e5f1a318701bbde | 666 | py | Python | src/modax/layers/network.py | GJBoth/modax | c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82 | [
"MIT"
] | 2 | 2021-12-10T14:36:37.000Z | 2022-02-10T11:47:03.000Z | src/modax/layers/network.py | GJBoth/modax | c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82 | [
"MIT"
] | null | null | null | src/modax/layers/network.py | GJBoth/modax | c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82 | [
"MIT"
] | 2 | 2020-12-22T14:49:13.000Z | 2021-04-09T08:52:08.000Z | from typing import Callable
from jax import lax
from flax import linen as nn
class MultiTaskDense(nn.Module):
features: int
n_tasks: int
kernel_init: Callable = nn.initializers.lecun_normal()
bias_init: Callable = nn.initializers.zeros
@nn.compact
def __call__(self, inputs):
kernel = self.param(
"kernel", self.kernel_init, (self.n_tasks, inputs.shape[-1], self.features)
)
y = lax.dot_general(
inputs, kernel, dimension_numbers=(((2,), (1,)), ((0,), (0,)))
)
bias = self.param("bias", self.bias_init, (self.n_tasks, 1, self.features))
y = y + bias
return y
| 28.956522 | 87 | 0.612613 | 87 | 666 | 4.528736 | 0.45977 | 0.045685 | 0.071066 | 0.13198 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012146 | 0.258258 | 666 | 22 | 88 | 30.272727 | 0.785425 | 0 | 0 | 0 | 0 | 0 | 0.015015 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.157895 | 0 | 0.526316 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
1a3d73a6c52da2deb3d1d2f1db4c3862bf7713d4 | 350 | py | Python | functions/closeAll.py | chiluf/visvis.dev | 373846ea25044b7ca50f44c63dab4248e14deacd | [
"BSD-3-Clause"
] | null | null | null | functions/closeAll.py | chiluf/visvis.dev | 373846ea25044b7ca50f44c63dab4248e14deacd | [
"BSD-3-Clause"
] | null | null | null | functions/closeAll.py | chiluf/visvis.dev | 373846ea25044b7ca50f44c63dab4248e14deacd | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein
#
# Visvis is distributed under the terms of the (new) BSD License.
# The full license can be found in 'license.txt'.
import visvis as vv
def closeAll():
""" closeAll()
Closes all figures.
"""
for fig in vv.BaseFigure._figures.values():
fig.Destroy()
| 19.444444 | 65 | 0.614286 | 47 | 350 | 4.553191 | 0.787234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019305 | 0.26 | 350 | 17 | 66 | 20.588235 | 0.80695 | 0.565714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a4f33963cc653151cea3eb94ee867a8bc500078 | 660 | py | Python | test_RTC_DS1307.py | LeMaker/LeScratch | 0dde167925afe40cf63cf8ccba13321761494c25 | [
"Apache-2.0"
] | 4 | 2015-06-23T17:57:47.000Z | 2016-02-15T12:52:46.000Z | test_RTC_DS1307.py | LeMaker/LeScratch | 0dde167925afe40cf63cf8ccba13321761494c25 | [
"Apache-2.0"
] | 1 | 2021-08-18T03:17:45.000Z | 2021-08-18T03:17:45.000Z | test_RTC_DS1307.py | LeMaker/LeScratch | 0dde167925afe40cf63cf8ccba13321761494c25 | [
"Apache-2.0"
] | 4 | 2015-07-13T14:43:24.000Z | 2015-12-25T09:14:50.000Z | #!/usr/bin/env python
#
# Test RTC_DS1307
import sys
import time
import datetime
import RTC_DS1307
# Main Program
print "Program Started at:"+ time.strftime("%Y-%m-%d %H:%M:%S")
filename = time.strftime("%Y-%m-%d%H:%M:%SRTCTest") + ".txt"
starttime = datetime.datetime.utcnow()
ds1307 = RTC_DS1307.RTC_DS1307(2, 0x68)
ds1307.write_now()
# Main Loop - sleeps 10 minutes, then reads and prints values of all clocks
while True:
currenttime = datetime.datetime.utcnow()
deltatime = currenttime - starttime
print ""
print "LeMaker Guitar=\t" + time.strftime("%Y-%m-%d %H:%M:%S")
print "DS1307=\t\t%s" % ds1307.read_datetime()
time.sleep(10.0)
| 18.333333 | 75 | 0.692424 | 101 | 660 | 4.465347 | 0.524752 | 0.079823 | 0.086475 | 0.093126 | 0.117517 | 0.117517 | 0.117517 | 0.079823 | 0 | 0 | 0 | 0.072695 | 0.145455 | 660 | 35 | 76 | 18.857143 | 0.72695 | 0.186364 | 0 | 0 | 0 | 0 | 0.206767 | 0.043233 | 0 | 0 | 0.007519 | 0 | 0 | 0 | null | null | 0 | 0.25 | null | null | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a4fe40aa6eef969719ab20b34d1e9156144719c | 4,536 | py | Python | VS State and Virtual IP Info/avi_virtual_service_info.py | jagmeetsingh91/AviSDK-Scripts | 371c9dadc561efe5087e57beac8b24191d48834d | [
"Apache-2.0"
] | null | null | null | VS State and Virtual IP Info/avi_virtual_service_info.py | jagmeetsingh91/AviSDK-Scripts | 371c9dadc561efe5087e57beac8b24191d48834d | [
"Apache-2.0"
] | null | null | null | VS State and Virtual IP Info/avi_virtual_service_info.py | jagmeetsingh91/AviSDK-Scripts | 371c9dadc561efe5087e57beac8b24191d48834d | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
# Created on Nov 14, 2017
# @author: aziz@avinetworks.com, jagmeet@avinetworks.com
#
# AVISDK based Script to get the status and configuration information of the Virtual Services
#
# Requires AVISDK ("pip install avisdk") and PrettyTable ("pip install PrettyTable")
# Usage:- python avi_virtual_service_info.py -c <Controller-IP> -u <user-name> -p <password>
# Note:- This script works for Avi Controler version 17.1.1 onwards
import json
import argparse
from avi.sdk.avi_api import ApiSession
from requests.packages import urllib3
from prettytable import PrettyTable
from prettytable import ALL as ALL
urllib3.disable_warnings()
def get_vs_list(api, api_version):
vs_list = []
rsp = api.get('virtualservice', api_version=api_version)
for vs in rsp.json()['results']:
vs_list.append(vs['uuid'])
return vs_list
def get_vs_oper_info(api, api_version, vs_list):
oper_dict = {}
for vs in vs_list:
rsp = api.get('virtualservice-inventory/%s' % vs, api_version=api_version)
vs_data = rsp.json()
req_vs_data = { "state": vs_data['runtime']['oper_status']['state'], "name": vs_data['config']['name'],
"uuid": vs_data['config']['uuid'] }
i = 1
for vips in vs_data['config']['vip']:
req_vs_data["vip_"+str(i)] = vips
i = i+1
j = 1
for dns in vs_data['config']['dns_info']:
req_vs_data["dns_"+str(j)] = dns
j = j+1
if vs_data['runtime']['oper_status']['state'] in oper_dict.keys():
oper_dict[vs_data['runtime']['oper_status']['state']].append(req_vs_data)
else:
oper_dict[vs_data['runtime']['oper_status']['state']] = []
oper_dict[vs_data['runtime']['oper_status']['state']].append(req_vs_data)
return oper_dict
def main():
#Getting Required Args
parser = argparse.ArgumentParser(description="AVISDK based Script to get the status and configuration"+
" information of the Virtual Services")
parser.add_argument("-u", "--username", required=True, help="Login username")
parser.add_argument("-p", "--password", required=True, help="Login password")
parser.add_argument("-c", "--controller", required=True, help="Controller IP address")
parser.add_argument("-t", "--tenant", required=False, help="Tenant Name")
parser.add_argument("-a", "--api_version", required=False, help="Tenant Name")
args = parser.parse_args()
user = args.username
host = args.controller
password = args.password
if args.tenant:
tenant=args.tenant
else:
tenant="*"
if args.api_version:
api_version=args.api_version
else:
api_version="17.1.1"
#Getting API session for the intended Controller.
api = ApiSession.get_session(host, user, password, tenant=tenant, api_version=api_version)
#Getting the list of VirtualService(s).
vs_list = get_vs_list(api, api_version)
#Getting VS information
oper_dict = get_vs_oper_info(api, api_version, vs_list)
#print "Final Oper Dict:" + str(oper_dict)
for state, vs in oper_dict.iteritems():
print("VS in State:%s [%s]" % (state, len(vs)))
table = PrettyTable(hrules=ALL)
table.field_names = ["VS Name","VIP_ID", "VIP_Address", "DNS_INFO"]
for vss in vs:
vips = list()
dns_info = list()
vip_count = 0
dns_count = 0
if 'vip_1' in vss.keys():
vips = [value for key, value in vss.iteritems() if 'vip' in key.lower()]
vip_count = len(vips)
if 'dns_1' in vss.keys():
dns_info = [value for key, value in vss.iteritems() if 'dns' in key.lower()]
dns_count = len(dns_info)
vs_name = vss['name']
vip_ids = ''
vips_list = ''
dns_list = ''
for vip in vips:
vip_ids += vip['vip_id'] + "\n"
vips_list += vip['ip_address']['addr']
if vip.get('floating_ip', None):
vips_list += '- ' + vip['floating_ip']['addr']
vips_list+='\n'
for dns in dns_info:
dns_list += dns['fqdn'] + "\n"
table.add_row([vs_name, vip_ids[:-1], vips_list[:-1], dns_list[:-1]])
print table
print "\n"
if __name__ == "__main__":
main()
| 37.8 | 111 | 0.592813 | 598 | 4,536 | 4.294314 | 0.232441 | 0.058411 | 0.017523 | 0.0331 | 0.246885 | 0.225857 | 0.16433 | 0.16433 | 0.125389 | 0.100467 | 0 | 0.008179 | 0.272266 | 4,536 | 119 | 112 | 38.117647 | 0.769767 | 0.132496 | 0 | 0.05618 | 0 | 0 | 0.153571 | 0.006888 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.033708 | 0.067416 | null | null | 0.033708 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a5078f614596e83a998507f278a0b9bd0a27b7f | 1,035 | py | Python | tomograph/transform.py | fkokosinski/tomograph | 4e988e37441efb94d7010e3f1e95aa8519a5a686 | [
"MIT"
] | 4 | 2019-06-22T22:33:52.000Z | 2021-04-21T09:17:26.000Z | tomograph/transform.py | fkokosinski/tomograph | 4e988e37441efb94d7010e3f1e95aa8519a5a686 | [
"MIT"
] | null | null | null | tomograph/transform.py | fkokosinski/tomograph | 4e988e37441efb94d7010e3f1e95aa8519a5a686 | [
"MIT"
] | null | null | null | import numpy as np
def projective(coords):
""" Convert 2D cartesian coordinates to homogeneus/projective. """
num = np.shape(coords)[0]
w = np.array([[1], ]*num)
return np.append(coords, w, axis=1)
def cartesian(coords):
""" Convert 2D homogeneus/projective coordinates to cartesian. """
return coords[:, :2]
def translate(x, y):
""" Return translation matrix. """
return np.array([
[1, 0, x],
[0, 1, y],
[0, 0, 1],
])
def rotate(a):
""" Return rotation matrix. """
return np.array([
[np.cos(a), -np.sin(a), 0],
[np.sin(a), np.cos(a), 0],
[0, 0, 1]
])
def transform_list(coords, matrix):
""" Apply transformation to a list of coordinates. """
return matrix.dot(coords.T).T
def transform_apply(coords, transforms):
""" Apply list of transformations to a list of coordinates. """
out = projective(coords)
for transform in transforms:
out = transform_list(out, transform)
return cartesian(out)
| 21.5625 | 70 | 0.593237 | 136 | 1,035 | 4.492647 | 0.330882 | 0.03437 | 0.0491 | 0.062193 | 0.065466 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023286 | 0.25314 | 1,035 | 47 | 71 | 22.021277 | 0.767141 | 0.267633 | 0 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.038462 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a5498f81765fb1eac207c52c6344cd3eedbeb35 | 164 | py | Python | jp.atcoder/abc005/abc005_2/26220615.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-09T03:06:25.000Z | 2022-02-09T03:06:25.000Z | jp.atcoder/abc005/abc005_2/26220615.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-05T22:53:18.000Z | 2022-02-09T01:29:30.000Z | jp.atcoder/abc005/abc005_2/26220615.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | null | null | null | import sys
import typing
def main() -> typing.NoReturn:
n = int(input())
(*t,) = map(int, sys.stdin.read().split())
print(min(t))
main()
| 13.666667 | 47 | 0.536585 | 22 | 164 | 4 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.268293 | 164 | 11 | 48 | 14.909091 | 0.733333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.285714 | 0 | 0.428571 | 0.142857 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a54b28acedd9ff633d1db4868301520a6ba9dcb | 748 | py | Python | List Events.py | hcaushi/higgs-hunters | f433a71ab01470fb6e72ebd8b69e697e77ae3c94 | [
"MIT"
] | null | null | null | List Events.py | hcaushi/higgs-hunters | f433a71ab01470fb6e72ebd8b69e697e77ae3c94 | [
"MIT"
] | null | null | null | List Events.py | hcaushi/higgs-hunters | f433a71ab01470fb6e72ebd8b69e697e77ae3c94 | [
"MIT"
] | null | null | null | import csv
import sys
#This program was written in Python 3.6.3 by Henry Caushi. You are free to use it for any reason, without my permission, without having to inform myself or anyone else
#This program was was written to aid other programs, by providing a list of all event IDs so that they appear only once
#List of all event IDs
list_ids = []
filename = "Higgs_Hunters_data_ALL.csv"
#Open the data file
f = open(filename+,"r")
reader = csv.reader(f)
for row in reader:
#If an event ID is not already added to the list, add it to the list
if row[3] not in list_ids:
list_ids.append(row[3])
f.close()
#Open a new file, and dump the event IDs
f = open("List IDs.txt","w")
for row in list_ids:
f.write(row+"\n")
f.close()
| 27.703704 | 167 | 0.713904 | 144 | 748 | 3.659722 | 0.527778 | 0.066414 | 0.053131 | 0.053131 | 0.064516 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008361 | 0.200535 | 748 | 26 | 168 | 28.769231 | 0.87291 | 0.573529 | 0 | 0.142857 | 0 | 0 | 0.134185 | 0.083067 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.142857 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a550f338065214a5625283d1ea0bc348f1499f6 | 268 | py | Python | custom_latex_cell_style/scenario2/ipython_nbconvert_config.py | isabella232/nbconvert-examples | 039724f4251cc8183f85534785fbee14809248ac | [
"BSD-3-Clause"
] | 120 | 2015-09-26T22:16:59.000Z | 2022-03-14T19:58:46.000Z | custom_latex_cell_style/scenario2/ipython_nbconvert_config.py | tarkantemizoz/nbconvert-examples | 039724f4251cc8183f85534785fbee14809248ac | [
"BSD-3-Clause"
] | 12 | 2015-09-23T19:52:38.000Z | 2021-08-04T23:30:37.000Z | custom_latex_cell_style/scenario2/ipython_nbconvert_config.py | tarkantemizoz/nbconvert-examples | 039724f4251cc8183f85534785fbee14809248ac | [
"BSD-3-Clause"
] | 82 | 2015-12-11T22:04:01.000Z | 2021-12-08T07:09:31.000Z | c = get_config()
#Export all the notebooks in the current directory to the sphinx_howto format.
c.NbConvertApp.notebooks = ['*.ipynb']
c.NbConvertApp.export_format = 'latex'
c.NbConvertApp.postprocessor_class = 'PDF'
c.Exporter.template_file = 'custom_article.tplx'
| 29.777778 | 78 | 0.779851 | 37 | 268 | 5.486486 | 0.702703 | 0.192118 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108209 | 268 | 8 | 79 | 33.5 | 0.849372 | 0.287313 | 0 | 0 | 0 | 0 | 0.178947 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a612743d582b02908a4b3a8f29574ce5358d4cb | 865 | py | Python | POPGEN/flashpca_to_smartpca.py | Hammarn/Scripts | eb9fb51b614d29aea425168aa16c58410d975f46 | [
"MIT"
] | null | null | null | POPGEN/flashpca_to_smartpca.py | Hammarn/Scripts | eb9fb51b614d29aea425168aa16c58410d975f46 | [
"MIT"
] | null | null | null | POPGEN/flashpca_to_smartpca.py | Hammarn/Scripts | eb9fb51b614d29aea425168aa16c58410d975f46 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import argparse
import pandas as pd
def main(input_file,output):
pd_data = pd.read_csv(input_file, sep = "\t" )
import pdb
pd_data['last'] = pd_data['FID']
for i in pd_data.index:
pd_data.loc[i,'FID'] = "{}:{}".format(pd_data.loc[i,'FID'],pd_data.loc[i,'IID'])
pd_data.to_csv(output, sep = "\t", index=False)
print "Output written to {}".format(output)
if __name__ == "__main__":
# Command line arguments
parser = argparse.ArgumentParser("""Converts FlashPCA output into SmartPCA output
""")
parser.add_argument("-i", "--input", default = 'king.kin',
help="Input file from FlashPCA to convert to SmartPCA output format.")
parser.add_argument("-o", "--output", default = 'pca.evec',
help="Name of Outputfile.")
args = parser.parse_args()
main(args.input, args.output)
| 30.892857 | 88 | 0.649711 | 124 | 865 | 4.346774 | 0.475806 | 0.089054 | 0.050093 | 0.055659 | 0.048237 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.184971 | 865 | 27 | 89 | 32.037037 | 0.764539 | 0.049711 | 0 | 0 | 0 | 0 | 0.267073 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.157895 | null | null | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a65dba3fb6b320ee85ba73a4571435a2d581c12 | 324 | py | Python | gwent/vendor/pygwinc_clone/gwinc/ifo/aLIGO/__init__.py | ark0015/GWDetectorDesignToolkit | 6ee2f7a633c973ea10b450257b1ad4dbd0323738 | [
"MIT"
] | 14 | 2019-10-16T13:27:19.000Z | 2022-03-15T02:14:49.000Z | gwent/vendor/pygwinc_clone/gwinc/ifo/aLIGO/__init__.py | ark0015/GWDetectorDesignToolkit | 6ee2f7a633c973ea10b450257b1ad4dbd0323738 | [
"MIT"
] | 1 | 2019-09-29T21:21:40.000Z | 2019-09-29T21:21:40.000Z | gwent/vendor/pygwinc_clone/gwinc/ifo/aLIGO/__init__.py | ark0015/gwent | 6ee2f7a633c973ea10b450257b1ad4dbd0323738 | [
"MIT"
] | 6 | 2019-11-27T09:45:31.000Z | 2022-03-15T02:14:31.000Z | from gwinc.ifo.noises import *
class aLIGO(nb.Budget):
name = "Advanced LIGO"
noises = [
QuantumVacuum,
Seismic,
Newtonian,
SuspensionThermal,
CoatingBrownian,
CoatingThermoOptic,
SubstrateBrownian,
SubstrateThermoElastic,
ExcessGas,
]
| 17.052632 | 31 | 0.589506 | 22 | 324 | 8.681818 | 0.954545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.342593 | 324 | 18 | 32 | 18 | 0.896714 | 0 | 0 | 0 | 0 | 0 | 0.040123 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.285714 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a6a468047e8c5ffc11c31806e4527c666198d73 | 5,535 | py | Python | firmware/m5mw.micropython.py | RAWSEQ/M5MouseWheel | 08e89d5e5e1b60eb40aba81a16d015bc48077a89 | [
"MIT"
] | null | null | null | firmware/m5mw.micropython.py | RAWSEQ/M5MouseWheel | 08e89d5e5e1b60eb40aba81a16d015bc48077a89 | [
"MIT"
] | null | null | null | firmware/m5mw.micropython.py | RAWSEQ/M5MouseWheel | 08e89d5e5e1b60eb40aba81a16d015bc48077a89 | [
"MIT"
] | 2 | 2021-05-29T16:19:26.000Z | 2021-09-05T13:24:02.000Z | from m5stack import *
from m5stack_ui import *
from uiflow import *
from ble import ble_uart
import face
screen = M5Screen()
screen.clean_screen()
screen.set_screen_bg_color(0x000000)
mb_click = None
rb_click = None
lb_click = None
snd_val = None
st_mode = None
stval = None
prval = None
faces_encode = face.get(face.ENCODE)
direction = M5Label('M5MouseWheel - Please dont touch for processing...', x=0, y=228, color=0xc7c7c7, font=FONT_MONT_12, parent=None)
LBtn = M5Btn(text='L', x=170, y=6, w=65, h=100, bg_c=0x000000, text_c=0xbcbcbc, font=FONT_UNICODE_24, parent=None)
RBtn = M5Btn(text='R', x=240, y=6, w=70, h=48, bg_c=0x000000, text_c=0xbebebe, font=FONT_UNICODE_24, parent=None)
d_w_x = M5Btn(text='WX', x=0, y=162, w=48, h=48, bg_c=0x000000, text_c=0xd4d4d4, font=FONT_UNICODE_24, parent=None)
MBtn = M5Btn(text='M', x=240, y=58, w=70, h=48, bg_c=0x000000, text_c=0xbebebe, font=FONT_UNICODE_24, parent=None)
d_w_y = M5Btn(text='WY', x=52, y=162, w=48, h=48, bg_c=0x000000, text_c=0xd4d4d4, font=FONT_UNICODE_24, parent=None)
b_step = M5Btn(text='STEP', x=0, y=6, w=100, h=100, bg_c=0x000000, text_c=0xd4d4d4, font=FONT_UNICODE_24, parent=None)
d_y = M5Btn(text='Y', x=220, y=110, w=100, h=100, bg_c=0x000000, text_c=0xd4d4d4, font=FONT_MONT_48, parent=None)
d_scr = M5Btn(text='SCR', x=0, y=110, w=100, h=48, bg_c=0x000000, text_c=0xd4d4d4, font=FONT_UNICODE_24, parent=None)
d_x = M5Btn(text='X', x=110, y=110, w=100, h=100, bg_c=0x000000, text_c=0xd4d4d4, font=FONT_MONT_48, parent=None)
v_step = M5Label('1', x=121, y=38, color=0xc7c7c7, font=FONT_MONT_24, parent=None)
# Change Mode
def changeMode():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
snd_val = 0
uart_ble.write((str(st_mode) + str(str(snd_val))))
direction.set_text(str((str(st_mode) + str(str(snd_val)))))
# Reset Mode
def resetMode():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
st_mode = ''
b_step.set_bg_color(0x000000)
d_y.set_bg_color(0x000000)
d_scr.set_bg_color(0x000000)
d_w_x.set_bg_color(0x000000)
d_w_y.set_bg_color(0x000000)
d_x.set_bg_color(0x000000)
def MBtn_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
mb_click = 0 if mb_click == 1 else 1
uart_ble.write((str('M') + str(str(mb_click))))
if mb_click == 1:
MBtn.set_bg_color(0x666666)
else:
MBtn.set_bg_color(0x000000)
direction.set_text(str((str('M') + str(str(mb_click)))))
pass
MBtn.pressed(MBtn_pressed)
def LBtn_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
lb_click = 0 if lb_click == 1 else 1
uart_ble.write((str('L') + str(str(lb_click))))
if lb_click == 1:
LBtn.set_bg_color(0x666666)
else:
LBtn.set_bg_color(0x000000)
direction.set_text(str((str('L') + str(str(lb_click)))))
pass
LBtn.pressed(LBtn_pressed)
def RBtn_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
rb_click = 0 if rb_click == 1 else 1
uart_ble.write((str('R') + str(str(rb_click))))
if rb_click == 1:
RBtn.set_bg_color(0x666666)
else:
RBtn.set_bg_color(0x000000)
direction.set_text(str((str('R') + str(str(rb_click)))))
pass
RBtn.pressed(RBtn_pressed)
def b_step_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
if st_mode != 'T':
resetMode()
st_mode = 'T'
b_step.set_bg_color(0x666666)
faces_encode.setLed(0, 0xffffff)
changeMode()
pass
b_step.pressed(b_step_pressed)
def d_scr_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
if st_mode != 'S':
resetMode()
st_mode = 'S'
d_scr.set_bg_color(0x666666)
faces_encode.setLed(0, 0xff9900)
changeMode()
pass
d_scr.pressed(d_scr_pressed)
def d_x_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
if st_mode != 'X':
resetMode()
st_mode = 'X'
d_x.set_bg_color(0x666666)
faces_encode.setLed(0, 0xff0000)
changeMode()
pass
d_x.pressed(d_x_pressed)
def d_y_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
if st_mode != 'Y':
resetMode()
st_mode = 'Y'
d_y.set_bg_color(0x666666)
faces_encode.setLed(0, 0x3333ff)
changeMode()
pass
d_y.pressed(d_y_pressed)
def d_w_x_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
if st_mode != 'U':
resetMode()
st_mode = 'U'
d_w_x.set_bg_color(0x666666)
faces_encode.setLed(0, 0x33ff33)
changeMode()
pass
d_w_x.pressed(d_w_x_pressed)
def d_w_y_pressed():
global mb_click, lb_click, rb_click, snd_val, st_mode, stval, prval
if st_mode != 'V':
resetMode()
st_mode = 'V'
d_w_y.set_bg_color(0x666666)
faces_encode.setLed(0, 0x00cccc)
changeMode()
pass
d_w_y.pressed(d_w_y_pressed)
resetMode()
uart_ble = ble_uart.init('m5mw_01')
stval = 1
st_mode = 'S'
prval = faces_encode.getValue()
snd_val = 0
d_scr.set_bg_color(0x666666)
faces_encode.setLed(0, 0xff9900)
uart_ble.write((str(st_mode) + str(str(snd_val))))
direction.set_text(str((str(st_mode) + str(str(snd_val)))))
while True:
if (faces_encode.getValue()) != prval:
if st_mode == 'T':
stval = stval + ((faces_encode.getValue()) - prval)
v_step.set_text(str(stval))
else:
snd_val = snd_val + ((faces_encode.getValue()) - prval) * stval
uart_ble.write((str(st_mode) + str(str(snd_val))))
direction.set_text(str((str(st_mode) + str(str(snd_val)))))
prval = faces_encode.getValue()
wait_ms(2)
| 31.271186 | 133 | 0.70551 | 988 | 5,535 | 3.659919 | 0.117409 | 0.054757 | 0.052544 | 0.045631 | 0.61781 | 0.564712 | 0.510509 | 0.504978 | 0.473451 | 0.399336 | 0 | 0.090155 | 0.150316 | 5,535 | 176 | 134 | 31.448864 | 0.678716 | 0.003975 | 0 | 0.341935 | 0 | 0 | 0.01706 | 0 | 0 | 0 | 0.06824 | 0 | 0 | 1 | 0.070968 | false | 0.058065 | 0.032258 | 0 | 0.103226 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
1a6d2d07b82c65f1c0ee7a25477d2386875f5077 | 12,181 | py | Python | main.py | FSlowkey/_csmentor_ | 19db2e43a9418df9cd999cdeaa5845b4e9b721c8 | [
"MIT"
] | null | null | null | main.py | FSlowkey/_csmentor_ | 19db2e43a9418df9cd999cdeaa5845b4e9b721c8 | [
"MIT"
] | null | null | null | main.py | FSlowkey/_csmentor_ | 19db2e43a9418df9cd999cdeaa5845b4e9b721c8 | [
"MIT"
] | null | null | null | import os
import webapp2
import data
import datetime
from google.appengine.ext.webapp import template
from google.appengine.api import users
from google.appengine.api import images
from google.appengine.api import blobstore
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext import ndb
# email stuff
from google.appengine.api import app_identity
from google.appengine.api import mail
import datetime
# FUNCTION
def render_template(handler, file_name, template_values):
path = os.path.join(os.path.dirname(__file__), 'templates/', file_name)
handler.response.out.write(template.render(path, template_values))
def get_user_email():
user = users.get_current_user()
print(user)
if user:
return user.email()
else:
return None
def get_template_parameters():
values = {}
email = get_user_email()
if email:
values['learner'] = data.is_learner(email)
values['expert'] = data.is_expert(email)
values['logout_url'] = users.create_logout_url('/')
values['upload_url'] = blobstore.create_upload_url('/profile-save')
values['user'] = email
else:
values['login_url'] = users.create_login_url('/welcome')
values['upload_url'] = blobstore.create_upload_url('/profile-save')
return values
class MainHandler(webapp2.RequestHandler):
def get(self):
values = get_template_parameters()
email = get_user_email()
render_template(self, 'mainpage.html', values)
#PROFILE SETTING CODE STARS HERE
class DefineHandler(webapp2.RequestHandler):
def get(self):
values = get_template_parameters()
render_template(self, 'areyouor.html', values)
class SaveDefineHandler(webapp2.RequestHandler):
def post(self):
print('testing')
email = get_user_email()
data.save_email(email)
defineStat = self.request.get('defineStat')
if defineStat == "isLearner":
learnerStat = True
expertStat = False
elif defineStat == "isExpert":
expertStat = True
learnerStat = False
data.define_stat(email,learnerStat,expertStat)
self.response.out.write('hello?')
self.redirect('/edit-profile-student')
#PROFILE SAVING CODE STARTS HERE
class EditProfileHandler(webapp2.RequestHandler):
def get(self):
values = get_template_parameters()
render_template(self, 'edit-profile-student.html', values)
#IMAGE SAVING CODE STARTS HERE
class SaveProfileHandler(blobstore_handlers.BlobstoreUploadHandler):
def post(self):
values = get_template_parameters()
if get_user_email():
upload_files = self.get_uploads()
blob_info = upload_files[0]
type = blob_info.content_type
defineStat = self.request.get('defineStat')
email = get_user_email()
name = self.request.get('name')
biography = self.request.get('biography')
location =self.request.get('cityhidden')
if type in ['image/jpeg', 'image/png', 'image/gif', 'image/webp']:
name= self.request.get('name')
data.save_profile(email, name, biography, location, blob_info.key())
self.redirect('/my-feed')
class ImageHandler(webapp2.RequestHandler):
def get(self):
values = get_template_parameters()
image_id=self.request.get('id')
my_image = ndb.Key(urlsafe=image_id).get()
values['image_id'] = image_id
values['image_url'] = images.get_serving_url(
my_image.image, size=150, crop=True
)
values['image_name'] = my_image.name
values['biography'] = self.request.get('biography')
render_template(self, 'profilefeed.html', values)
class ViewPhotoHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self):
user_id = self.request.get('id')
user_profile = ndb.Key(urlsafe=user_id).get()
blob_key = user_profile.profile_pic
self.send_blob(blob_key)
class ImageManipulationHandler(webapp2.RequestHandler):
def get(self):
image_id = self.request.get("id")
my_image = ndb.Key(urlsafe=image_id).get()
blob_key = my_image.image
img = images.Image(blob_key=blob_key)
print(img)
modified = False
h = self.request.get('height')
w = self.request.get('width')
fit = False
if self.request.get('fit'):
fit = True
if h and w:
img.resize(width=int(w), height=int(h), crop_to_fit=fit)
modified = True
optimize = self.request.get('opt')
if optimize:
img.im_feeling_lucky()
modified = True
flip = self.request.get('flip')
if flip:
img.vertical_flip()
modified = True
mirror = self.request.get('mirror')
if mirror:
img.horizontal_flip()
modified = True
rotate = self.request.get('rotate')
if rotate:
img.rotate(int(rotate))
modified = True
result = img
if modified:
result = img.execute_transforms(output_encoding=images.JPEG)
print("about to render image")
img.im_feeling_lucky()
self.response.headers['Content-Type'] = 'image/png'
self.response.out.write(img.execute_transforms(output_encoding=images.JPEG))
#IMAGE MANIPULATION CODE ENDS HERE
#FEED CONTROLLER STARTS HERE
def InterestsMatch(userExpert):
#This function checks to see that the user and expert have at least one interest in common
current_user_interests = data.get_user_interests(get_user_email())
expert_user_interests = data.get_user_interests(userExpert.email)
i = 0
for interest in current_user_interests:
if current_user_interests[interest] and expert_user_interests[interest]:
return True
return False
class FeedHandler(webapp2.RequestHandler):
def get(self):
p = get_user_email()
if p:
values = get_template_parameters()
profile = data.get_user_profile(p)
neededlocation = profile.location
values['image_url'] = '/profilepic?id=' + profile.key.urlsafe()
expert_profiles = data.get_expert_profiles(neededlocation)
expert_list = []
for expert_profile in expert_profiles:
if InterestsMatch(expert_profile):
expert_profile.keyUrl = expert_profile.key.urlsafe()
expert_list.append(expert_profile)
values['available_experts'] = expert_list
for expert in values['available_experts']:
values['expimg']='/profilepic?id=' + expert.key.urlsafe()
values['events'] = []
events_key_list = data.get_user_profile(get_user_email()).events_list
for events_key in events_key_list:
event = events_key.get()
values['events'].append(event)
values['name'] = profile.name
values['location'] = profile.location
values['biography'] = profile.biography
values['interests']= profile.interests
render_template(self, 'profilefeed.html', values)
else:
self.redirect('/')
#FEED CONTROLLER ENDS HERE
#PROFILE SAVING CODE ENDS HERE
#INTERESTS CODE STARTS HERE
class SaveInterestsHandler(webapp2.RequestHandler):
def post(self):
interests = self.request.get('interests')
values = get_template_parameters()
values['interests'] = data.get_user_interests(get_user_email())
for key in values['interests']:
enabled = self.request.get(key)
print(enabled)
if enabled == key:
values['interests'][key]=True
else:
values['interests'][key]=False
new_interests = values['interests']
data.save_interests(get_user_email(), new_interests)
print(new_interests)
self.redirect('/my-feed')
class EditInterestsHandler(webapp2.RequestHandler):
def get(self):
values = get_template_parameters()
if get_user_email():
if data.get_user_interests(get_user_email()):
values['interests'] = data.get_user_interests(get_user_email())
print(values['interests'])
values['interests']= values['interests'].items()
render_template(self, 'interest.html', values)
else:
interests={
"Java":False,
"Python":False,
"JavaScript":False,
"HTML":False,
"CSS":False,
"C#":False,
"Industry Insight":False,
"Internships and Experience":False,
"AI":False,
"Machine Learning":False,
}
render_template(self, 'interest.html', values)
#INTERESTS CODE ENDS HERE
#VIEWING EXPERT PROFILE CODE STARTS HERE
class ExpertProfileViewHandler(webapp2.RequestHandler):
def get(self, name):
values = get_template_parameters()
profile = data.get_user_profile(data.get_user_email_by_name(name))
print ">>>>Profile:"
print profile
if profile:
values['image_url'] = '/profilepic?id=' + profile.key.urlsafe()
values['profileid'] = profile.key.urlsafe()
values['name'] = profile.name
values['biography'] = profile.biography
values['location'] = profile.location
values['profile_pic'] = profile.profile_pic
values['interests'] = data.get_user_interests(get_user_email())
values['interests'] = values['interests'].items()
values['email'] = get_user_email()
values['events'] = []
events_key_list = profile.events_list
for events_key in events_key_list:
event = events_key.get()
values['events'].append(event)
render_template(self, 'expert-from-student.html', values)
class SendMailHandler(webapp2.RequestHandler):
def post(self):
values = get_template_parameters()
subject = "Hi! you have a new message from Hyperlink: " + self.request.get('subject')
body = get_user_email() + " sent you: " + self.request.get('body')
profile_id = self.request.get('profileid')
profile = data.get_profile_by_id(profile_id)
sender_address = 'NoReply@cssi-chat-2.appspotmail.com'
mail.send_mail(sender_address, profile.email, subject, body)
render_template(self, 'profilefeed.html', values)
class SaveEventHandler(webapp2.RequestHandler):
def post(self):
print("hello")
email = get_user_email()
name = self.request.get('name')
description = self.request.get('description')
cap= self.request.get('cap')
date = datetime.datetime.strptime(self.request.get('date'), "%Y-%m-%d")
data.save_event(email, name, date, description,cap)
self.redirect('/my-feed')
class SetUserHandler(webapp2.RequestHandler):
def get(self):
get_template_parameters()
email = get_user_email()
setvallea = data.is_learner(email)
setvalexp = data.is_expert(email)
if setvallea or setvalexp:
print('EMAIL REC.')
self.redirect('/my-feed')
else:
print('EMAIL UNREC.')
self.redirect('/set-profile')
app = webapp2.WSGIApplication([
('/welcome', SetUserHandler),
('/set-profile', DefineHandler),
('/definition', SaveDefineHandler),
('/edit-profile-student', EditProfileHandler),
('/profile-save', SaveProfileHandler),
('/image', ImageHandler),
('/my-feed', FeedHandler),
('/interests', EditInterestsHandler),
('/interests-save', SaveInterestsHandler),
('/p/(.*)', ExpertProfileViewHandler),
('/send-mail', SendMailHandler),
('/img', ImageManipulationHandler),
('/create_event', SaveEventHandler),
('/profilepic', ViewPhotoHandler),
('/.*', MainHandler)
])
| 33.372603 | 93 | 0.628848 | 1,333 | 12,181 | 5.575394 | 0.178545 | 0.027314 | 0.048977 | 0.036329 | 0.368811 | 0.243272 | 0.204252 | 0.167115 | 0.150027 | 0.055436 | 0 | 0.002318 | 0.256383 | 12,181 | 365 | 94 | 33.372603 | 0.818172 | 0.033084 | 0 | 0.302817 | 0 | 0 | 0.110157 | 0.01071 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.045775 | null | null | 0.042254 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a7483bf107ea0fb77bb68f2d2dcf10700bcb562 | 443 | py | Python | 12_find the output/03_In Python/01_GeeksForGeeks/02_Set two/problem_2.py | Magdyedwar1996/python-level-one-codes | 066086672f43488bc8b32c620b5e2f94cedfe3da | [
"MIT"
] | 1 | 2021-11-16T14:14:38.000Z | 2021-11-16T14:14:38.000Z | 12_find the output/03_In Python/01_GeeksForGeeks/02_Set two/problem_2.py | Magdyedwar1996/python-level-one-codes | 066086672f43488bc8b32c620b5e2f94cedfe3da | [
"MIT"
] | null | null | null | 12_find the output/03_In Python/01_GeeksForGeeks/02_Set two/problem_2.py | Magdyedwar1996/python-level-one-codes | 066086672f43488bc8b32c620b5e2f94cedfe3da | [
"MIT"
] | null | null | null | for i in range(2):
print(i) # print 0 then 1
for i in range(4,6):
print (i) # print 4 then 5
"""
Explanation:
If only single argument is passed to the range method,
Python considers this argument as the end of the range and the default start value of range is 0.
So, it will print all the numbers starting from 0 and before the supplied argument.
For the second for loop the starting value is explicitly supplied as 4 and ending is 5.
""" | 36.916667 | 98 | 0.742664 | 85 | 443 | 3.870588 | 0.529412 | 0.024316 | 0.036474 | 0.066869 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.031073 | 0.200903 | 443 | 12 | 99 | 36.916667 | 0.898305 | 0.065463 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
1a7735124d5e69d466b80a312a23be896f940f79 | 505 | py | Python | game.py | pricob/Strategy-Game | df9011b87b6521d1bb156e512eeb120e0b09962e | [
"MIT"
] | null | null | null | game.py | pricob/Strategy-Game | df9011b87b6521d1bb156e512eeb120e0b09962e | [
"MIT"
] | null | null | null | game.py | pricob/Strategy-Game | df9011b87b6521d1bb156e512eeb120e0b09962e | [
"MIT"
] | null | null | null | def game_main():
### IMPORTS ###
import colorama
from colorama import Fore
from engine import engineScript
from engine import clearScript
from os import environ
environ['PYGAME_HIDE_SUPPORT_PROMPT'] = '1'
import pygame
### ENGINE INITIALIZATION ###
settings = ["width", "height"]
engineScript.InitEngine(Fore, settings)
pygame.init()
### PROGRAM TERMINATED ###
clearScript.run()
if __name__ == "__main__":
game_main() | 24.047619 | 48 | 0.625743 | 50 | 505 | 6.06 | 0.58 | 0.052805 | 0.105611 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00271 | 0.269307 | 505 | 21 | 49 | 24.047619 | 0.818428 | 0.09901 | 0 | 0 | 0 | 0 | 0.110577 | 0.0625 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.428571 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
1a78717f3ade0f1b49b87652920497f50424fe03 | 387 | py | Python | src/elementary_flask/components/general/favicon.py | xaled/flaskly | 2ed66d89e42afba830d6c73c9f70f00d1dcac573 | [
"MIT"
] | null | null | null | src/elementary_flask/components/general/favicon.py | xaled/flaskly | 2ed66d89e42afba830d6c73c9f70f00d1dcac573 | [
"MIT"
] | null | null | null | src/elementary_flask/components/general/favicon.py | xaled/flaskly | 2ed66d89e42afba830d6c73c9f70f00d1dcac573 | [
"MIT"
] | null | null | null | __all__ = ['FavIcon']
from dataclasses import dataclass, field
from html import escape as html_escape
@dataclass
class FavIcon:
href: str
rel: str = "icon"
mimetype: str = "image/x-icon"
rendered: str = field(init=False, repr=False)
def __post_init__(self):
self.rendered = f'<link rel="{self.rel}" type="{self.mimetype}" href="{html_escape(self.href)}">'
| 25.8 | 105 | 0.674419 | 53 | 387 | 4.716981 | 0.528302 | 0.08 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.183463 | 387 | 14 | 106 | 27.642857 | 0.791139 | 0 | 0 | 0 | 0 | 0.090909 | 0.260982 | 0.139535 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.181818 | 0 | 0.727273 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
1a7fc96b729905953b1c7215ffb1a13a615d4713 | 518 | py | Python | babysteps/6.combine_strings.py | mvoltz/realpython | 622d700721d8475b1e81964d14c781e7936d120f | [
"BSD-2-Clause"
] | null | null | null | babysteps/6.combine_strings.py | mvoltz/realpython | 622d700721d8475b1e81964d14c781e7936d120f | [
"BSD-2-Clause"
] | null | null | null | babysteps/6.combine_strings.py | mvoltz/realpython | 622d700721d8475b1e81964d14c781e7936d120f | [
"BSD-2-Clause"
] | null | null | null | # called concatenation sometimes..
str1 = 'abra, '
str2 = 'cadabra. '
str3 = 'i wanna reach out and grab ya.'
combo = str1 + str1 + str2 + str3
# you probably don't remember the song.
print(combo)
# you can also do it this way
print('I heat up', '\n', "I can't cool down", '\n', 'my life is spinning', '\n', 'round and round')
# notice the change in single and double quotes. hopefully the change makes sense.
print('not sure why the space for lines 2,3,4 above.', '\n', "i guess there's more to learn... :)")
| 25.9 | 99 | 0.660232 | 88 | 518 | 3.886364 | 0.738636 | 0.011696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024038 | 0.196911 | 518 | 19 | 100 | 27.263158 | 0.798077 | 0.34556 | 0 | 0 | 0 | 0 | 0.577844 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.428571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
1a82bd2a3228a557a4e93765d69c4bc3cf1313d3 | 3,781 | py | Python | mdot_rest/migrations/0002_auto_20150722_2054.py | uw-it-aca/mdot-rest | 3f5aa88ae2ac9693f283b8843ac8998b10dc7bb8 | [
"Apache-2.0"
] | null | null | null | mdot_rest/migrations/0002_auto_20150722_2054.py | uw-it-aca/mdot-rest | 3f5aa88ae2ac9693f283b8843ac8998b10dc7bb8 | [
"Apache-2.0"
] | 67 | 2015-07-23T23:22:14.000Z | 2022-02-04T21:39:43.000Z | mdot_rest/migrations/0002_auto_20150722_2054.py | uw-it-aca/mdot-rest | 3f5aa88ae2ac9693f283b8843ac8998b10dc7bb8 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mdot_rest', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IntendedAudience',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=30)),
('slug', models.SlugField(max_length=30)),
],
),
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=60)),
('slug', models.SlugField(max_length=60)),
('feature_desc', models.CharField(max_length=120)),
('featured', models.BooleanField(default=False)),
('accessible', models.BooleanField(default=False)),
('responsive_web', models.BooleanField(default=False)),
('created_date', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
],
),
migrations.RemoveField(
model_name='resourcelink',
name='Google_Play_url',
),
migrations.RemoveField(
model_name='resourcelink',
name='Windows_Store_url',
),
migrations.RemoveField(
model_name='resourcelink',
name='created_date',
),
migrations.RemoveField(
model_name='resourcelink',
name='feature_desc',
),
migrations.RemoveField(
model_name='resourcelink',
name='iTunes_url',
),
migrations.RemoveField(
model_name='resourcelink',
name='last_modified',
),
migrations.RemoveField(
model_name='resourcelink',
name='name',
),
migrations.RemoveField(
model_name='resourcelink',
name='short_desc',
),
migrations.RemoveField(
model_name='resourcelink',
name='support_url',
),
migrations.RemoveField(
model_name='resourcelink',
name='web_url',
),
migrations.AddField(
model_name='resourcelink',
name='link_type',
field=models.CharField(default='WEB', max_length=3, choices=[(b'AND', b'Android'), (b'IOS', b'iOS'), (b'WEB', b'Web'), (b'WIP', b'Windows Phone')]),
preserve_default=False,
),
migrations.AddField(
model_name='resourcelink',
name='slug',
field=models.SlugField(default='default_slug', max_length=60),
preserve_default=False,
),
migrations.AddField(
model_name='resourcelink',
name='title',
field=models.CharField(default='default_title', max_length=60),
preserve_default=False,
),
migrations.AddField(
model_name='resourcelink',
name='url',
field=models.URLField(default='default_url'),
preserve_default=False,
),
migrations.AddField(
model_name='intendedaudience',
name='resource',
field=models.ManyToManyField(to='mdot_rest.Resource'),
),
migrations.AddField(
model_name='resourcelink',
name='resource',
field=models.ManyToManyField(to='mdot_rest.Resource'),
),
]
| 34.063063 | 160 | 0.545887 | 324 | 3,781 | 6.169753 | 0.262346 | 0.072036 | 0.157579 | 0.187594 | 0.601301 | 0.573287 | 0.44022 | 0.268634 | 0.268634 | 0.181091 | 0 | 0.008255 | 0.327162 | 3,781 | 110 | 161 | 34.372727 | 0.777516 | 0.005554 | 0 | 0.625 | 0 | 0 | 0.158329 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.019231 | 0 | 0.048077 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a8bd1f13f351b7336c171be459cf320b1683b22 | 4,435 | py | Python | src/web/users/forms.py | werelaxe/drapo | 5f78da735819200f0e7efa6a5e6b3b45ba6e0d4b | [
"MIT"
] | 10 | 2017-04-15T05:00:17.000Z | 2019-08-27T21:08:48.000Z | src/web/users/forms.py | werelaxe/drapo | 5f78da735819200f0e7efa6a5e6b3b45ba6e0d4b | [
"MIT"
] | 2 | 2017-10-06T12:35:59.000Z | 2018-12-03T07:17:12.000Z | src/web/users/forms.py | werelaxe/drapo | 5f78da735819200f0e7efa6a5e6b3b45ba6e0d4b | [
"MIT"
] | 4 | 2017-03-08T21:17:21.000Z | 2019-05-10T16:22:58.000Z | from django import forms
from django.utils.translation import ugettext_lazy as _
class LoginForm(forms.Form):
email = forms.CharField(
required=True,
label=_('Email'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Your email'),
'autofocus': 'autofocus',
'class': 'form-control-short',
})
)
password = forms.CharField(
required=True,
label=_('Password'),
max_length=128,
widget=forms.PasswordInput(attrs={
'placeholder': _('Enter password'),
'class': 'form-control-short',
})
)
class FormWithRepeatedPassword(forms.Form):
password = forms.CharField(
required=True,
label=_('Password'),
max_length=128,
widget=forms.PasswordInput(attrs={
'placeholder': _('Enter password'),
'class': 'form-control-short',
})
)
password_repeat = forms.CharField(
required=True,
label=_('Password again'),
max_length=128,
widget=forms.PasswordInput(attrs={
'placeholder': _('Repeat password'),
'class': 'form-control-short',
})
)
def clean_password_repeat(self):
password = self.cleaned_data.get('password')
password_repeat = self.cleaned_data.get('password_repeat')
if password and password_repeat and password != password_repeat:
self._errors['password_repeat'] = self.error_class(['Password are not equal'])
class RegisterForm(FormWithRepeatedPassword):
username = forms.CharField(
required=True,
label=_('Username'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Enter username'),
'autofocus': 'autofocus',
'class': 'form-control-short',
})
)
email = forms.EmailField(
required=True,
label=_('Email'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Enter email'),
'class': 'form-control-short',
})
)
first_name = forms.CharField(
label=_('First name'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Your first name'),
'class': 'form-control-short',
})
)
last_name = forms.CharField(
label=_('Last name'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Your last name'),
'class': 'form-control-short',
})
)
def __init__(self, *args, **kwargs):
if 'field_order' in kwargs:
del kwargs['field_order']
super().__init__(field_order=['username', 'email', 'first_name', 'last_name', 'password', 'password_validation'],
*args, **kwargs)
class EditUserForm(forms.Form):
username = forms.CharField(
required=True,
label=_('Username'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Your username'),
'autofocus': 'autofocus',
'class': 'form-control-short',
})
)
first_name = forms.CharField(
label=_('First name'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Your first name'),
'class': 'form-control-short',
})
)
last_name = forms.CharField(
label=_('Last name'),
max_length=100,
widget=forms.TextInput(attrs={
'placeholder': _('Your last name'),
'class': 'form-control-short',
})
)
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.initial = {
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name
}
class ChangePasswordForm(FormWithRepeatedPassword):
old_password = forms.CharField(
required=True,
label=_('Old password'),
max_length=128,
widget=forms.PasswordInput(attrs={
'class': 'form-control-short'
})
)
def __init__(self, *args, **kwargs):
if 'field_order' in kwargs:
del kwargs['field_order']
super().__init__(field_order=['old_password', 'password', 'password_repeat'], *args, **kwargs)
| 28.429487 | 121 | 0.559414 | 411 | 4,435 | 5.810219 | 0.16545 | 0.045226 | 0.080402 | 0.105528 | 0.717337 | 0.672948 | 0.623953 | 0.593384 | 0.551089 | 0.543551 | 0 | 0.011628 | 0.301917 | 4,435 | 155 | 122 | 28.612903 | 0.75969 | 0 | 0 | 0.656716 | 0 | 0 | 0.209921 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029851 | false | 0.186567 | 0.014925 | 0 | 0.171642 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
1a9b95beef4372766d5b6cf6a163695415727640 | 1,153 | py | Python | src/lib/todo_classes.py | louisroyer/todopy | 8aef035bc82b13a8053394e8942c34de72fae3bf | [
"CC0-1.0"
] | null | null | null | src/lib/todo_classes.py | louisroyer/todopy | 8aef035bc82b13a8053394e8942c34de72fae3bf | [
"CC0-1.0"
] | 2 | 2020-09-01T12:32:25.000Z | 2020-09-01T12:33:11.000Z | src/lib/todo_classes.py | louisroyer/todopy | 8aef035bc82b13a8053394e8942c34de72fae3bf | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''Classes for todo files.'''
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
if __debug__:
if __package__:
from . import todo_parser as _todo_parser
else:
import todo_parser as _todo_parser
if __name__ != '__main__':
__author__ = 'Louis Royer'
__credits__ = '🄯 2018, Louis Royer - CC0-1.0'
__date__ = '2018-09-15'
__version__ = '0.0.1'
class Task:
def __init__(self, title: str, filename: str, status):
assert status in _todo_parser.TASK_STATUS, 'Invalid status'
self._title = title
self._filename = filename
self._status = status
self._updated_status = False
@property
def title(self):
'''Task title.'''
return _title
@property
def filename(self):
'''Filename where task was written.'''
return _filename
@property
def status(self):
'''Task status.'''
return _status
@status.setter
def status(self, value):
assert status in STATUS, 'Invalid status'
self._updated_status = True
self._status = value
| 24.020833 | 67 | 0.61405 | 137 | 1,153 | 4.766423 | 0.452555 | 0.07657 | 0.049005 | 0.05513 | 0.085758 | 0.085758 | 0 | 0 | 0 | 0 | 0 | 0.027678 | 0.279271 | 1,153 | 47 | 68 | 24.531915 | 0.756919 | 0.153513 | 0 | 0.096774 | 0 | 0 | 0.095488 | 0 | 0 | 0 | 0 | 0.021277 | 0.064516 | 1 | 0.16129 | false | 0 | 0.064516 | 0 | 0.354839 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1a9c06b409cec023288fe5de8610286e3d8638d4 | 4,347 | py | Python | attendees/persons/admin.py | xjlin0/attendees | 3c337ee68c00f17cbbbe26f2e33131e57850e4ed | [
"MIT"
] | 1 | 2020-03-26T00:42:04.000Z | 2020-03-26T00:42:04.000Z | attendees/persons/admin.py | xjlin0/attendees | 3c337ee68c00f17cbbbe26f2e33131e57850e4ed | [
"MIT"
] | null | null | null | attendees/persons/admin.py | xjlin0/attendees | 3c337ee68c00f17cbbbe26f2e33131e57850e4ed | [
"MIT"
] | null | null | null | from django_summernote.admin import SummernoteModelAdmin
from django.contrib.postgres import fields
from django_json_widget.widgets import JSONEditorWidget
from django.contrib import admin
from attendees.occasions.models import *
from attendees.whereabouts.models import *
from .models import *
# Register your models here.
class AttendeeAddressInline(admin.StackedInline):
model = AttendeeAddress
extra = 0
class AttendingMeetInline(admin.StackedInline):
model = AttendingMeet
extra = 0
class RelationshipInline(admin.TabularInline):
model = Relationship
fk_name = 'from_attendee'
extra = 0
class FamilyAttendeeInline(admin.TabularInline):
model = FamilyAttendee
extra = 0
class CategoryAdmin(admin.ModelAdmin):
readonly_fields = ['id', 'created', 'modified']
prepopulated_fields = {"slug": ("display_name",)}
list_display = ('id', 'display_name', 'slug', 'display_order', 'description', 'modified')
class FamilyAdmin(admin.ModelAdmin):
readonly_fields = ['id', 'created', 'modified']
inlines = (FamilyAttendeeInline,)
list_display_links = ('display_name',)
list_display = ('id', 'display_name', 'display_order', 'modified')
fieldsets = (
(None, {"fields": (tuple(['display_name', 'display_order']),
tuple(['id', 'created', 'modified']),
), }),
)
class FamilyAttendeeAdmin(admin.ModelAdmin):
readonly_fields = ['id', 'created', 'modified']
list_display = ('id', 'family', 'attendee', 'role', 'modified')
class RelationAdmin(admin.ModelAdmin):
readonly_fields = ['id', 'created', 'modified']
list_display_links = ('title',)
list_display = ('id', 'title', 'reciprocal_ids', 'emergency_contact', 'scheduler', 'relative', 'display_order')
class AttendeeAdmin(admin.ModelAdmin):
formfield_overrides = {
fields.JSONField: {'widget': JSONEditorWidget},
}
search_fields = ('first_name', 'last_name', 'last_name2', 'first_name2')
readonly_fields = ['id', 'created', 'modified']
inlines = (AttendeeAddressInline, RelationshipInline)
list_display_links = ('last_name',)
list_display = ('id', 'first_name', 'last_name', 'last_name2', 'first_name2', 'progressions', 'infos')
class RegistrationAdmin(admin.ModelAdmin):
formfield_overrides = {
fields.JSONField: {'widget': JSONEditorWidget},
}
list_display_links = ('main_attendee',)
list_display = ('id', 'main_attendee', 'assembly', 'infos', 'modified')
class AttendanceInline(admin.StackedInline):
model = Attendance
extra = 0
class AttendingAdmin(admin.ModelAdmin):
formfield_overrides = {
fields.JSONField: {'widget': JSONEditorWidget},
}
search_fields = ('attendee__first_name', 'attendee__last_name', 'attendee__first_name2', 'attendee__last_name2')
list_display_links = ('attendee',)
readonly_fields = ['id', 'created', 'modified']
inlines = (AttendingMeetInline,) # add AttendanceInline when creating new Attending will fails on meet_names
list_display = ('id', 'registration', 'attendee', 'meet_names', 'finish', 'infos')
class NoteAdmin(SummernoteModelAdmin):
summernote_fields = ('body',)
readonly_fields = ['id', 'created', 'modified']
list_display = ('body', 'content_type', 'object_id', 'content_object', 'display_order', 'modified')
class RelationshipAdmin(admin.ModelAdmin):
list_display_links = ('relation',)
readonly_fields = ['id', 'created', 'modified']
list_display = ('id', 'from_attendee', 'relation', 'to_attendee', 'emergency_contact', 'scheduler', 'in_family', 'finish')
class AttendingMeetAdmin(admin.ModelAdmin):
list_display_links = ('attending',)
readonly_fields = ['id', 'created', 'modified']
list_display = ('id', 'attending', 'meet', 'character', 'category', 'modified')
admin.site.register(Category, CategoryAdmin)
admin.site.register(Note, NoteAdmin)
admin.site.register(Family, FamilyAdmin)
admin.site.register(Attendee, AttendeeAdmin)
admin.site.register(FamilyAttendee, FamilyAttendeeAdmin)
admin.site.register(Registration, RegistrationAdmin)
admin.site.register(Attending, AttendingAdmin)
admin.site.register(Relation, RelationAdmin)
admin.site.register(Relationship, RelationshipAdmin)
admin.site.register(AttendingMeet, AttendingMeetAdmin)
| 34.776 | 126 | 0.707844 | 431 | 4,347 | 6.932715 | 0.25058 | 0.062584 | 0.056894 | 0.069277 | 0.287483 | 0.266734 | 0.238956 | 0.170683 | 0.093039 | 0.054886 | 0 | 0.002986 | 0.152519 | 4,347 | 124 | 127 | 35.056452 | 0.808089 | 0.023004 | 0 | 0.222222 | 0 | 0 | 0.219133 | 0.004948 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.077778 | 0 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
1aa7720202db2a1c258c5499dab4c82e6d875c22 | 437 | py | Python | P0053.py | sebastianaldi17/ProjectEuler | 19562fba3456ec904bcc264fb786a92610e42622 | [
"MIT"
] | null | null | null | P0053.py | sebastianaldi17/ProjectEuler | 19562fba3456ec904bcc264fb786a92610e42622 | [
"MIT"
] | null | null | null | P0053.py | sebastianaldi17/ProjectEuler | 19562fba3456ec904bcc264fb786a92610e42622 | [
"MIT"
] | null | null | null | # Combinatoric selections
# https://projecteuler.net/problem=53
from collections import defaultdict
from copy import deepcopy
from itertools import permutations
from math import fmod, sqrt, factorial
from time import time
start = time()
f = [factorial(i) for i in range(101)]
ans = 0
for n in range(1, 101):
for r in range(1, n+1):
if f[n] / (f[r] * f[n-r]) >= 1000000: ans += 1
print(ans)
print(time() - start, "seconds") | 24.277778 | 54 | 0.688787 | 71 | 437 | 4.239437 | 0.507042 | 0.069767 | 0.053156 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056022 | 0.183066 | 437 | 18 | 55 | 24.277778 | 0.787115 | 0.135011 | 0 | 0 | 0 | 0 | 0.018617 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.384615 | 0 | 0.384615 | 0.153846 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
1aab51dc0877d9fd63a1f310c0d32a392b291683 | 1,953 | py | Python | Client_side.py | SanRam/server-client-chat-python | 010a296db57c352a2ace7eac7206fa641981538b | [
"MIT"
] | null | null | null | Client_side.py | SanRam/server-client-chat-python | 010a296db57c352a2ace7eac7206fa641981538b | [
"MIT"
] | null | null | null | Client_side.py | SanRam/server-client-chat-python | 010a296db57c352a2ace7eac7206fa641981538b | [
"MIT"
] | null | null | null | # The client program connects to server and sends data to other connected
# clients through the server
import socket
import thread
import sys
def recv_data():
"Receive data from other clients connected to server"
while 1:
try:
recv_data = client_socket.recv(4096)
except:
#Handle the case when server process terminates
print ("Server closed connection, thread exiting.")
thread.interrupt_main()
break
if not recv_data:
# Recv with no data, server closed connection
print ("Server closed connection, thread exiting.")
thread.interrupt_main()
break
else:
print '{}'.format(recv_data)
def send_data():
"Send data from other clients connected to server"
while 1:
send_data_1 = str(raw_input(''))
send_data=name_id+': '+send_data_1
if send_data_1 == "q" or send_data == "Q":
client_socket.send(send_data)
thread.interrupt_main()
break
else:
client_socket.send(send_data)
if __name__ == "__main__":
print ('\t\t******* Socket Programming Using Python ********')
print ('\t\t******* TCP/IP Chat Client ********')
print ('\nConnecting to server at 173.253.224.102:5000')
global name_id
name_id= str(raw_input('Enter Username: '))
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('173.253.224.102', 5000))
print ('Connected to server at 173.253.224.102:5000')
thread.start_new_thread(send_data,())
thread.start_new_thread(recv_data,())
try:
while 1:
continue
except:
print ("Client program quits....")
client_socket.close()
| 30.046154 | 75 | 0.561188 | 225 | 1,953 | 4.671111 | 0.351111 | 0.076118 | 0.048525 | 0.068506 | 0.344434 | 0.253092 | 0.253092 | 0.253092 | 0.203616 | 0.121789 | 0 | 0.044788 | 0.336918 | 1,953 | 64 | 76 | 30.515625 | 0.766795 | 0.096774 | 0 | 0.413043 | 0 | 0 | 0.261357 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.065217 | null | null | 0.173913 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1ac0e70ee50f70a3cd951509022bba75c1104f45 | 1,738 | gyp | Python | third_party/ctmalloc/ctmalloc.gyp | dandv/syzygy | 2444520c8e6e0b45b2f45b680d878d60b9636f45 | [
"Apache-2.0"
] | 1 | 2019-04-03T13:56:37.000Z | 2019-04-03T13:56:37.000Z | third_party/ctmalloc/ctmalloc.gyp | pombreda/syzygy | 7bac6936c0c28872bfabc10a1108e0157ff65d4a | [
"Apache-2.0"
] | 1 | 2015-03-19T18:20:25.000Z | 2015-03-19T18:20:25.000Z | third_party/ctmalloc/ctmalloc.gyp | sebmarchand/syzygy | 6c6db0e70e8161f1fec171138a825f6412e7778a | [
"Apache-2.0"
] | 1 | 2020-10-10T16:09:45.000Z | 2020-10-10T16:09:45.000Z | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Build configuration for ctmalloc. This is not a part of the original
# library.
{
'targets': [
{
'target_name': 'ctmalloc_lib',
'type': 'static_library',
'sources': [
'wtf/AsanHooks.cpp',
'wtf/AsanHooks.h',
'wtf/Assertions.h',
'wtf/Atomics.h',
'wtf/BitwiseOperations.h',
'wtf/ByteSwap.h',
'wtf/Compiler.h',
'wtf/config.h',
'wtf/CPU.h',
'wtf/malloc.cpp',
'wtf/PageAllocator.cpp',
'wtf/PageAllocator.h',
'wtf/PartitionAlloc.cpp',
'wtf/PartitionAlloc.h',
'wtf/ProcessID.h',
'wtf/SpinLock.h',
'wtf/WTFExport.h',
],
'defines': [
'CTMALLOC_NDEBUG',
],
'include_dirs': [
'<(src)/third_party/ctmalloc',
],
'all_dependent_settings': {
'defines': [
# We disable debug features of the CtMalloc heap as they are redundant
# given SyzyASan's extensive debug features.
'CTMALLOC_NDEBUG',
],
'include_dirs': [
'<(src)/third_party/ctmalloc',
],
},
},
],
}
| 28.491803 | 80 | 0.597814 | 205 | 1,738 | 5.014634 | 0.570732 | 0.046693 | 0.025292 | 0.031128 | 0.089494 | 0.089494 | 0.089494 | 0.089494 | 0 | 0 | 0 | 0.006385 | 0.279056 | 1,738 | 60 | 81 | 28.966667 | 0.814046 | 0.437284 | 0 | 0.341463 | 0 | 0 | 0.492179 | 0.148071 | 0 | 0 | 0 | 0 | 0.02439 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1ac4c3ae760d51232d569cf1431cc2c3ab3cdc2f | 1,499 | py | Python | _/Chapter 03/transfrauddetect.py | paullewallencom/hadoop-978-1-7839-8030-7 | 267f24e736dcee0910593d9ff76c10387e6406c3 | [
"Apache-2.0"
] | 2 | 2019-05-25T22:48:59.000Z | 2021-10-04T04:52:58.000Z | _/Chapter 03/transfrauddetect.py | paullewallencom/hadoop-978-1-7839-8030-7 | 267f24e736dcee0910593d9ff76c10387e6406c3 | [
"Apache-2.0"
] | null | null | null | _/Chapter 03/transfrauddetect.py | paullewallencom/hadoop-978-1-7839-8030-7 | 267f24e736dcee0910593d9ff76c10387e6406c3 | [
"Apache-2.0"
] | 6 | 2016-12-27T13:57:45.000Z | 2021-04-22T18:33:14.000Z | # Submit to spark using
# spark-submit /Users/anurag/hdproject/eclipse/chapt3/transfrauddetect.py
# You need the full path of the python script
from pyspark import SparkContext
from pyspark import SparkConf
from pyspark.mllib.clustering import KMeans, KMeansModel
from pyspark.streaming import StreamingContext
from pyspark.mllib.linalg import Vectors
def detect(rdd):
count = rdd.count()
print "RDD -> ", count
if count > 0:
arrays = rdd.map(lambda line: [float(x) for x in line.split(" ")])
print arrays.collect()
indx = 0
while indx < count:
vec = Vectors.dense(arrays.collect()[indx])
indx += 1
clusternum = model.predict(vec)
print "Cluster -> ", clusternum, vec
return
# Create a local StreamingContext with two working thread and batch interval of 1 second
conf = SparkConf().setAppName("Fraud Detector")
conf = conf.setMaster("local[2]")
sc = SparkContext(conf=conf)
ssc = StreamingContext(sc, 10)
# Create a DStream that will connect to hostname:port, like localhost:9999
lines = ssc.socketTextStream("localhost", 8999)
# Split each line into words
model = KMeansModel.load(sc, "kmeansmodel01")
print model.clusterCenters
print "************************** Loaded the model *********************"
words = lines.flatMap(lambda line: line.split(" "))
lines.foreachRDD(detect)
ssc.start() # Start the computation
ssc.awaitTermination() # Wait for the computation to terminate
| 33.311111 | 88 | 0.683122 | 186 | 1,499 | 5.505376 | 0.564516 | 0.053711 | 0.033203 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014864 | 0.192128 | 1,499 | 44 | 89 | 34.068182 | 0.830718 | 0.256171 | 0 | 0 | 0 | 0 | 0.116637 | 0.042495 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.166667 | null | null | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1ad00f5bdba69c5627c62e40a06d13f11f8f971b | 2,186 | py | Python | quotes/tests/requests/test_home_page.py | daviferreira/defprogramming | a4ec20a6a9d116eb1f82fd146e4bb7a2fad5a516 | [
"MIT"
] | 6 | 2016-01-17T02:21:51.000Z | 2020-09-01T20:16:36.000Z | quotes/tests/requests/test_home_page.py | daviferreira/defprogramming | a4ec20a6a9d116eb1f82fd146e4bb7a2fad5a516 | [
"MIT"
] | 3 | 2017-11-27T17:02:50.000Z | 2021-01-21T14:22:36.000Z | quotes/tests/requests/test_home_page.py | daviferreira/defprogramming | a4ec20a6a9d116eb1f82fd146e4bb7a2fad5a516 | [
"MIT"
] | null | null | null | # coding: utf-8
from lxml import html
from django.test import TestCase
from django.test.client import Client
from quotes.tests.utils import create_test_quote
class HomePageTestCase(TestCase):
def setUp(self):
self.client = Client()
self.quote = create_test_quote()
self.dom = ''
self.quotes = []
def tearDown(self):
self.quote = ''
self.dom = ''
self.quotes = []
def __load_dom(self):
response = self.client.get('/')
self.dom = html.fromstring(response.content)
def testHomePageResponse(self):
response = self.client.get('/')
self.failUnlessEqual(response.status_code, 200)
def testHomePageShouldHaveTheRightTitle(self):
self.__load_dom()
assert self.dom.cssselect('h1 a')[0].text_content(), 'def programming'
def testHomePageShouldListQuotes(self):
self.__load_dom()
assert len(self.dom.cssselect('div.quote-card')), 1
assert self.dom.cssselect('div.quote-card q')[0].text_content(), self.quote.body
assert self.dom.cssselect('div.quote-card .quote-card-author')[0].text_content(), 'Author 1 & Author 2'
assert self.dom.cssselect('div.quote-card .quote-card-tags')[0].text_content(), 'tagged under Tag 1, Tag 2'
# assert self.dom.cssselect('div.quote-card q a')[0].attrib['href'], ("/q/%s/" % self.quote.uuid)
# TODO: not a home page test, more like a site test
# should also test for footer links
def testHomePageShouldShowMenu(self):
self.__load_dom()
menu_links = self.dom.cssselect('header nav a')
assert len(menu_links), 6
assert menu_links[0].text_content(), 'Home'
assert menu_links[0].attrib['href'], '/'
assert menu_links[1].text_content(), 'Authors'
assert menu_links[1].attrib['href'], '/authors'
assert menu_links[2].text_content(), 'Tags'
assert menu_links[2].attrib['href'], '/tags'
assert menu_links[3].text_content(), 'Random'
assert menu_links[3].attrib['href'], '/random'
assert menu_links[4].text_content(), 'Submit'
assert menu_links[4].attrib['href'], '/submit' | 37.689655 | 115 | 0.643184 | 284 | 2,186 | 4.816901 | 0.28169 | 0.078947 | 0.109649 | 0.080409 | 0.245614 | 0.214912 | 0.115497 | 0.115497 | 0.062866 | 0 | 0 | 0.015116 | 0.213175 | 2,186 | 58 | 116 | 37.689655 | 0.780233 | 0.088289 | 0 | 0.209302 | 0 | 0 | 0.12368 | 0 | 0 | 0 | 0 | 0.017241 | 0.372093 | 1 | 0.162791 | false | 0 | 0.093023 | 0 | 0.27907 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1ad19b8e5fbe29f4b8f2f258cc293e0fd9d3e22f | 678 | py | Python | luckydonaldUtils/regex/telegram.py | luckydonald/python-utils | 455f5174707804a39384776185b8bc307223e19f | [
"MIT"
] | 5 | 2016-12-06T00:49:21.000Z | 2019-10-03T04:18:13.000Z | luckydonaldUtils/regex/telegram.py | luckydonald/python-utils | 455f5174707804a39384776185b8bc307223e19f | [
"MIT"
] | 5 | 2016-03-19T02:08:14.000Z | 2018-12-01T02:30:19.000Z | luckydonaldUtils/regex/telegram.py | luckydonald/python-utils | 455f5174707804a39384776185b8bc307223e19f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import re
__author__ = 'luckydonald'
__all__ = [
'USERNAME_REGEX', '_USERNAME_REGEX', 'USER_AT_REGEX', '_USER_AT_REGEX',
'FULL_USERNAME_REGEX', '_FULL_USERNAME_REGEX'
]
_USERNAME_REGEX = '[a-zA-Z](?:[a-zA-Z0-9]|_(?!_)){3,30}[a-zA-Z0-9]' # https://regex101.com/r/nZdOHS/2
USERNAME_REGEX = re.compile(_USERNAME_REGEX)
_USER_AT_REGEX = '@(?P<username>' + _USERNAME_REGEX + ')'
USER_AT_REGEX = re.compile(_USER_AT_REGEX)
from .urls.telegram import _TELEGRAM_DOMAIN_REGEX
_FULL_USERNAME_REGEX = '(?P<prefix>(?P<domain>' + _TELEGRAM_DOMAIN_REGEX + ')|@)(?P<username>' + _USERNAME_REGEX + ')'
FULL_USERNAME_REGEX = re.compile(_FULL_USERNAME_REGEX)
| 35.684211 | 118 | 0.715339 | 95 | 678 | 4.536842 | 0.347368 | 0.361949 | 0.12761 | 0.148492 | 0.359629 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019704 | 0.10177 | 678 | 18 | 119 | 37.666667 | 0.688013 | 0.078171 | 0 | 0 | 0 | 0.076923 | 0.334405 | 0.110932 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46bb3deb8367127a5cfa628614d6868e96cd7fbc | 6,888 | py | Python | app.py | Antinator11/Creative-Space | 73bcd8eeed39c57e1d9098b3fe99e2c92a67e4e8 | [
"Apache-2.0"
] | null | null | null | app.py | Antinator11/Creative-Space | 73bcd8eeed39c57e1d9098b3fe99e2c92a67e4e8 | [
"Apache-2.0"
] | null | null | null | app.py | Antinator11/Creative-Space | 73bcd8eeed39c57e1d9098b3fe99e2c92a67e4e8 | [
"Apache-2.0"
] | null | null | null | from flask import Flask, render_template, request, redirect, url_for, Markup, \
flash # Imports Flask and all required modules
import databasemanager # Provides the functionality to load stuff from the database
app = Flask(__name__)
import errormanager # Enum for types of errors
# DECLARE datamanager as TYPE: databasemanager
datamanager = databasemanager
# DECLARE errorman as TYPE: errormanager
errorman = errormanager
# DECLARE Current User as string
# Provides a means of the application knowing who is signed in
CurrentUser: str
# Route function for homepage.
# @return Returns render template of base.hmtl
@app.route('/')
def Home():
datamanager.LoadContent()
return render_template('base.html', entries=datamanager.entries, bFailure=False, app=datamanager)
# Checks the username and the password and handles any errors
# @route Homepage
# @method: POST
# @return redirect: Redirect to 'AdminHome' function after successful login
# @return render_template: base.html with failure condition
@app.route('/', methods=['POST'])
def Login():
if request.method == "POST":
try:
password = request.form['Password']
username = request.form['Username']
if (password != '') and (username != ''):
if datamanager.CheckUser(username, password) == True:
global CurrentUser
CurrentUser = username
globals()
return redirect(url_for('AdminHome', auth=str(datamanager.Encrypt('True')), user=username))
else:
Failure = errorman.EErrorType.FailedPassword
return render_template('base.html', fail=Failure, failenum=errorman.EErrorType,
entries=datamanager.entries, bFailure=True, app=datamanager)
else:
Failure = errorman.EErrorType.FailedNone
return render_template('base.html', fail=Failure, failenum=errorman.EErrorType, bFailure=True,
entires=datamanager.entries, app=datamanager)
except:
return render_template('base.html', fail=errorman.EErrorType.FailedNone, failenum=errorman.EErrorType,
bFailure=True, entries=datamanager.entries)
# Main route for admin homepage
# Checks for encrypted string to ensure access was granted
# @route: '/adminbase' <auth: encrypted string> <user: user's username>
# @param auth: Encrypted string used for security
# @param user: Username of user
# @return render_template: adminbase.html with entries, the username and the datamanager
# @return redirect: 'Home' will return the user to home if they don't have valid acsses
@app.route('/adminbase/<auth> <user>')
def AdminHome(auth, user):
if auth == str(datamanager.Encrypt('True')):
datamanager.LoadContent()
print(datamanager.entries)
return render_template('adminbase.html', entries=datamanager.entries, user=user, app=datamanager)
else:
return redirect(url_for('Home'))
# Gets the users inputted values for a new entry and adds them to the website
# @route: '/adminbase.html' <user: username of signed in user>
# @param user: username of the signed in user
# @return redirect: 'Admin Home' function with encryption string and username
@app.route('/adminbase.html/<user>', methods=["POST"])
def CreateNew(user: str):
if request.method == "POST":
# try:
title = request.form['Title']
desc = request.form['Desc']
image = request.form['Image']
caption = request.form['Caption']
id = len(datamanager.entries)
ind = str(id)
datamanager.AddNewItem(title, desc, caption, image, id, ind, 0)
return redirect(url_for('AdminHome', auth=str(datamanager.Encrypt('True')), user=user))
# except:
# return render_template('error.html', fail=errorman.EErrorType.FailedNone, failenum=errorman.EErrorType)
# Deprecated
#@app.route('/adminbase', methods=["POST"])
#def Delete():
#if request.method == "POST":
# delete = request.form['Del']
# if delete == True:
# datamanager.RemoveItem(0)
# return render_template(url_for('AdminHome', auth=str(datamanager.Encrypt('True'))))
#else:
# return render_template(url_for('AdminHome', auth=str(datamanager.Encrypt('True'))))
# Main route for signup page
# @route: '/signup'
# @return render_template: signup.html
@app.route('/signup')
def SignUp():
return render_template('signup.html')
# Gets the entry input values and adds to database also handles errors
# @route '/sign' methods: GET and POST
# @return redirect: 'Home'
# @return render_template: 'error.html' with error type
@app.route('/sign', methods=["POST", "GET"])
def AddNewUser():
try:
if request.method == "POST":
AdminKey = request.form['Key']
Password = request.form['Password']
Username = request.form['Username']
ConfirmPass = request.form['ConfirmPassword']
if datamanager.CheckKey(AdminKey) == True:
if ((Password != '') and (Username != '') and (ConfirmPass != '')):
if ConfirmPass == Password:
if datamanager.NewUser(Username, Password) == True:
return redirect(url_for('Home'))
else:
return render_template('error.html', fail=errorman.EErrorType.FailedPassword,
failenum=errorman.EErrorType)
else:
return render_template('error.html', fail=errorman.EErrorType.FailedNone,
failenum=errorman.EErrorType)
return render_template('error.html')
except:
return render_template('error.html', fail=errorman.EErrorType.FailedNone, failenum=errorman.EErrorType)
# Deprecated
@app.route('/likes/<id>')
def Like(id: int):
datamanager.AddLike(id)
return redirect(url_for('Home'))
# Deprecated
@app.route('/deleteconfirm', methods=['GET'])
def ChangeDeleteTarget():
id = request.form['Delete']
global deletetarget
deletetarget = id
print(deletetarget)
globals()
return 'hi' # This exists because Flask is bad
# Deprecated
@app.route('/delete')
def Delete():
datamanager.RemoveItem(datamanager.deletetarget)
global CurrentUser
CurrentUser = 'user'
return redirect(url_for('AdminHome', auth=str(datamanager.Encrypt('True')), user=CurrentUser, app=datamanager))
# Main Flask Loop
if __name__ == '__main__':
app.secret_key = datamanager.Encrypt('key')
app.run()
| 41.745455 | 116 | 0.63313 | 735 | 6,888 | 5.878912 | 0.228571 | 0.06156 | 0.078685 | 0.027771 | 0.330248 | 0.249479 | 0.227262 | 0.227262 | 0.187919 | 0.174265 | 0 | 0.00039 | 0.255226 | 6,888 | 164 | 117 | 42 | 0.84191 | 0.301974 | 0 | 0.23 | 0 | 0 | 0.080993 | 0.00479 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09 | false | 0.1 | 0.03 | 0.01 | 0.29 | 0.02 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
46bff1adfd5577acabc4e0dff9a754921e785d60 | 1,312 | py | Python | filter_plugins/containers2volumes.py | gabriel-duque/sadm | 00483d486b336c71066e244a61c29042a924e75c | [
"MIT"
] | 4 | 2020-07-28T00:22:43.000Z | 2020-12-01T16:03:01.000Z | filter_plugins/containers2volumes.py | gabriel-duque/sadm | 00483d486b336c71066e244a61c29042a924e75c | [
"MIT"
] | 8 | 2020-07-05T23:23:42.000Z | 2020-09-04T00:30:58.000Z | filter_plugins/containers2volumes.py | zuh0/sadm | 00483d486b336c71066e244a61c29042a924e75c | [
"MIT"
] | null | null | null | from ansible.errors import AnsibleFilterError
def container2volumes(container, vol_type="all"):
vol_types = ["generated", "persistent", "volatile"]
catch_all_type = "all"
if vol_type != catch_all_type and vol_type not in vol_types:
raise AnsibleFilterError(
f"container2volumes: {vol_type} is not in allowed volume types ('all', 'generated', 'persistent', 'volatile')"
)
return list(
filter(
lambda vol: vol_type == "all" or vol.get("type") == vol_type,
container.get("volumes", {}).values(),
)
)
def containers2volumes(containers, vol_type="all"):
vol_types = ["generated", "persistent", "volatile"]
catch_all_type = "all"
if vol_type != catch_all_type and vol_type not in vol_types:
raise AnsibleFilterError(
f"containers2volumes: {vol_type} is not in allowed volume types ('all', 'generated', 'persistent', 'volatile')"
)
return sum(
(container2volumes(c, vol_type) for c in containers.values()), []
)
class FilterModule(object):
"""Get volume information from a container or a container list."""
def filters(self):
return {
"containers2volumes": containers2volumes,
"container2volumes": container2volumes,
}
| 32.8 | 123 | 0.637957 | 143 | 1,312 | 5.692308 | 0.314685 | 0.094595 | 0.132678 | 0.031941 | 0.481572 | 0.481572 | 0.481572 | 0.481572 | 0.481572 | 0.481572 | 0 | 0.009082 | 0.244665 | 1,312 | 39 | 124 | 33.641026 | 0.812311 | 0.045732 | 0 | 0.266667 | 1 | 0.066667 | 0.264848 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.033333 | 0.033333 | 0.266667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46c085ac7c0934855c0208b4c1f43ee8a0d905c0 | 381 | py | Python | lowest-unique.py | leaen/Codeeval-solutions | fa83cb4fba3e56f79c0a6b00361c18cd3092c3f0 | [
"MIT"
] | null | null | null | lowest-unique.py | leaen/Codeeval-solutions | fa83cb4fba3e56f79c0a6b00361c18cd3092c3f0 | [
"MIT"
] | null | null | null | lowest-unique.py | leaen/Codeeval-solutions | fa83cb4fba3e56f79c0a6b00361c18cd3092c3f0 | [
"MIT"
] | null | null | null | import sys
def lowest_unique_number(line):
numbers = sorted(map(int, line.split()))
for e in numbers:
if numbers.count(e) == 1:
return line.index(str(e))//2+1
return 0
def main():
with open(sys.argv[1]) as input_file:
for line in input_file:
print(lowest_unique_number(line.strip()))
if __name__ == '__main__':
main()
| 22.411765 | 53 | 0.606299 | 56 | 381 | 3.875 | 0.589286 | 0.110599 | 0.165899 | 0.202765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017731 | 0.259843 | 381 | 16 | 54 | 23.8125 | 0.751773 | 0 | 0 | 0 | 0 | 0 | 0.020997 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153846 | false | 0 | 0.076923 | 0 | 0.384615 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46c2b16869fd2a8293075005691ad0df8c253672 | 830 | py | Python | Company/thoughtworks/FizzBuzzWhizz/solution-python/FizzBuzzWhizz.py | OctopusLian/leetcode-solutions | 40920d11c584504e805d103cdc6ef3f3774172b3 | [
"MIT"
] | 1 | 2020-12-01T18:35:24.000Z | 2020-12-01T18:35:24.000Z | Company/thoughtworks/FizzBuzzWhizz/solution-python/FizzBuzzWhizz.py | OctopusLian/leetcode-solutions | 40920d11c584504e805d103cdc6ef3f3774172b3 | [
"MIT"
] | 18 | 2020-11-10T05:48:29.000Z | 2020-11-26T08:39:20.000Z | Company/thoughtworks/FizzBuzzWhizz/solution-python/FizzBuzzWhizz.py | OctopusLian/leetcode-solutions | 40920d11c584504e805d103cdc6ef3f3774172b3 | [
"MIT"
] | 5 | 2020-11-09T07:43:00.000Z | 2021-12-02T14:59:37.000Z | # This is python2 version.
def FizzBuzzWhizz(args):
"""args[0] = Fizz, Buzz, Whizz
args[1]= 3, 5, 7"""
def FBW(Number):
return Number%args[1] and Number or args[0]
return FBW
def sayWhat(l_sayWhat,Number):
return l_sayWhat.count(Number)<3 and "".join([s for s in l_sayWhat if type(s) is str]) or Number
def zmap(func,seq):
mapped_seq = []
for eachItem in func:
mapped_seq.append(eachItem(seq))
return mapped_seq
def even_filter(nums, rule):
for num in range(1,nums):
yield sayWhat(zmap(map(FizzBuzzWhizz, rule), num),num)
rule = [("Fizz",3),("Buzz", 5),("Whizz",7)]
count = 101
for even in even_filter(count,rule):
print even
fiz = lambda a,b,c,d:['Fizz'*(x%a==0)+'Buzz'*(x%b==0)+'Whizz'*(x%c==0) or x for x in range(1,d)]
print fiz(3,5,7,101) | 25.9375 | 100 | 0.616867 | 144 | 830 | 3.5 | 0.368056 | 0.047619 | 0.011905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.039755 | 0.212048 | 830 | 32 | 101 | 25.9375 | 0.730887 | 0.028916 | 0 | 0 | 0 | 0 | 0.034806 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46c39db6dd0b69722eb312b2a6c9c225e95716f4 | 3,022 | py | Python | figures/perception/randomwalk.py | patricknaughton01/RoboticSystemsBook | 0fc67cbccee0832b5f9b00d848c55697fa69bedf | [
"Apache-2.0"
] | 116 | 2018-08-27T15:32:59.000Z | 2022-02-28T10:41:37.000Z | figures/perception/randomwalk.py | patricknaughton01/RoboticSystemsBook | 0fc67cbccee0832b5f9b00d848c55697fa69bedf | [
"Apache-2.0"
] | 2 | 2021-05-04T12:56:40.000Z | 2022-02-18T23:13:33.000Z | figures/perception/randomwalk.py | patricknaughton01/RoboticSystemsBook | 0fc67cbccee0832b5f9b00d848c55697fa69bedf | [
"Apache-2.0"
] | 29 | 2019-06-20T20:13:36.000Z | 2022-02-20T14:01:34.000Z | import matplotlib.pyplot as plt
import numpy as np
from kalman import *
def kf_trace(F,g,P,H,j,Q,Xmean,Xvar,Z):
if not isinstance(F,np.ndarray): F = np.array([[F]])
if not isinstance(g,np.ndarray): g = np.array([g])
if not isinstance(P,np.ndarray): P = np.array([[P]])
if H is not None:
if not isinstance(H,np.ndarray): H = np.array([[H]])
if not isinstance(j,np.ndarray): j = np.array([j])
if not isinstance(Q,np.ndarray): Q = np.array([[Q]])
if not isinstance(Xmean,np.ndarray): Xmean = np.array([Xmean])
if not isinstance(Xvar,np.ndarray): Xvar = np.array([[Xvar]])
cur_mean,cur_cov = Xmean,Xvar
res_mean = [cur_mean]
res_cov = [cur_cov]
for z in Z:
if not isinstance(z,np.ndarray): z = np.array([z])
cur_mean,cur_cov = kalman_filter_predict(cur_mean,cur_cov,F,g,P)
if H is not None:
cur_mean,cur_cov = kalman_filter_update(cur_mean,cur_cov,F,g,P,H,j,Q,z)
res_mean.append(cur_mean)
res_cov.append(cur_cov)
return res_mean,res_cov
T = 100
N = 20
dt = 0.1
motion_noise_magnitude = 1.0
noise_magnitude = 0.3
fig1 = plt.figure(figsize=(10,4))
ax1 = fig1.add_subplot(1, 2, 1)
ax1.set_xlabel("Time")
ax1.set_ylabel("State")
ax1.set_ylim(-3,3)
ax1.set_xlim(0,10)
x = np.array(range(T))*dt
for i in xrange(N):
eps = np.random.normal(size=T)*motion_noise_magnitude
y = np.cumsum(eps*dt)
ax1.plot(x,y)
y,yvar = kf_trace(F=1,g=0,P=motion_noise_magnitude*dt**2,H=None,j=None,Q=noise_magnitude**2,Xmean=0,Xvar=0,Z=eps)
y = np.array([yi[0] for yi in y])
yvar = np.array([yi[0,0] for yi in yvar])
kf_pred, = ax1.plot(x,y[:-1],label="KF prediction")
ax1.plot(x,y[:-1]+2.0*np.sqrt(yvar)[:-1],label="KF prediction + 2*std",lw=0.5,color='k',linestyle='--')
ax1.plot(x,y[:-1]-2.0*np.sqrt(yvar)[:-1],label="KF prediction + 2*std",lw=0.5,color='k',linestyle='--')
ax1.legend(handles=[kf_pred])
ax2 = fig1.add_subplot(1, 2, 2)
ax2.set_xlabel("Time")
ax2.set_ylabel("State")
ax2.set_ylim(-3,3)
ax2.set_xlim(0,10)
#eps_truth = np.random.normal(size=T)
#y_truth = np.cumsum(eps*dt)
y_truth = np.sin(np.array(range(T))*dt*0.5)*1.0
x = np.array(range(T))*dt
z = y_truth + np.random.normal(size=T)*noise_magnitude
y,yvar = kf_trace(F=1,g=0,P=motion_noise_magnitude*dt**2,H=1,j=0,Q=noise_magnitude**2,Xmean=0,Xvar=0,Z=z)
y = np.array([yi[0] for yi in y])
yvar = np.array([yi[0,0] for yi in yvar])
Zmse = np.sqrt(np.sum((z-y_truth)**2))
KFmse = np.sqrt(np.sum((y[:-1]-y_truth)**2))
print "Z MSE",Zmse
print "KF MSE",KFmse
print "Reduction (%)",(Zmse-KFmse)/Zmse*100
ground_truth, = ax2.plot(x,y_truth,label="Ground truth",color='k')
obs = ax2.scatter(x,z,label="Observations",color='gray',s=9)
kf_estimate, = ax2.plot(x,y[:-1],label="KF estimate")
ax2.plot(x,y[:-1]+2.0*np.sqrt(yvar)[:-1],label="KF estimate + 2*std",lw=0.5,color='k',linestyle='--')
ax2.plot(x,y[:-1]-2.0*np.sqrt(yvar)[:-1],label="KF estimate + 2*std",lw=0.5,color='k',linestyle='--')
ax2.legend(handles=[ground_truth,obs,kf_estimate])
plt.show()
| 39.246753 | 113 | 0.657512 | 605 | 3,022 | 3.181818 | 0.180165 | 0.058182 | 0.07013 | 0.021818 | 0.413506 | 0.377662 | 0.282597 | 0.25039 | 0.25039 | 0.221299 | 0 | 0.043643 | 0.128061 | 3,022 | 76 | 114 | 39.763158 | 0.686907 | 0.020847 | 0 | 0.114286 | 0 | 0 | 0.06324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.042857 | null | null | 0.042857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46c6c52458aed5ead8d1c69894d74a5069c08e0c | 998 | py | Python | mundo2/exercicio065.py | beatriznaimaite/Exercicios-Python-Curso-Em-Video | e4213c2054a67d7948aa9023f2f0f33ab7e8eb96 | [
"MIT"
] | null | null | null | mundo2/exercicio065.py | beatriznaimaite/Exercicios-Python-Curso-Em-Video | e4213c2054a67d7948aa9023f2f0f33ab7e8eb96 | [
"MIT"
] | null | null | null | mundo2/exercicio065.py | beatriznaimaite/Exercicios-Python-Curso-Em-Video | e4213c2054a67d7948aa9023f2f0f33ab7e8eb96 | [
"MIT"
] | null | null | null | """
Crie um programa que leia vários números inteiros pelo teclado. No final da execução, mostre a média entre
todos os valores e qual foi o maior e o menor valores lidos. O programa deve perguntar ao usuário se ele
quer ou não continuar a digitar valores.
"""
resposta = 'S'
cont = soma = maior = menor = media = 0
while resposta == 'S':
numero = int(input('Digite um número: '))
cont += 1
soma += numero
if cont == 1:
maior = menor = numero
else:
if numero > maior:
maior = numero
if numero < menor:
menor = numero
resposta = str(input('Quer continuar? [S/N]: ')).strip().upper()[0]
while resposta not in 'SN':
resposta = str(input('Quer continuar? [S/N]: ')).strip().upper()[0]
if resposta == 'N':
resposta = False
print('Finalizando...')
media = soma/cont
print(f'A média entre os valores lidos foi de {media:.2f}.')
print(f'O maior valor digitado foi {maior} e o menor {menor}.')
| 30.242424 | 106 | 0.614228 | 143 | 998 | 4.286713 | 0.482517 | 0.019576 | 0.035889 | 0.039152 | 0.137031 | 0.137031 | 0.137031 | 0.137031 | 0.137031 | 0.137031 | 0 | 0.008152 | 0.262525 | 998 | 32 | 107 | 31.1875 | 0.824728 | 0.252505 | 0 | 0.090909 | 0 | 0 | 0.252033 | 0 | 0 | 0 | 0 | 0.03125 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.136364 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46cb93aa92bbe683bb38be532385096924e02464 | 915 | py | Python | crawler/crawler/spiders/all_591_cities.py | eala/tw-rental-house-data | 5f595e6bfac8cc85ddff0746b3ee6806e83dec3a | [
"MIT"
] | null | null | null | crawler/crawler/spiders/all_591_cities.py | eala/tw-rental-house-data | 5f595e6bfac8cc85ddff0746b3ee6806e83dec3a | [
"MIT"
] | null | null | null | crawler/crawler/spiders/all_591_cities.py | eala/tw-rental-house-data | 5f595e6bfac8cc85ddff0746b3ee6806e83dec3a | [
"MIT"
] | null | null | null | all_591_cities = [
{
"city": "台北市",
"id": "1"
},
{
"city": "新北市",
"id": "3"
},
{
"city": "桃園市",
"id": "6"
},
{
"city": "新竹市",
"id": "4"
},
{
"city": "新竹縣",
"id": "5"
},
{
"city": "基隆市",
"id": "2"
},
{
"city": "宜蘭縣",
"id": "21"
},
{
"city": "台中市",
"id": "8"
},
{
"city": "彰化縣",
"id": "10"
},
{
"city": "苗栗縣",
"id": "7"
},
{
"city": "雲林縣",
"id": "14"
},
{
"city": "南投縣",
"id": "11"
},
{
"city": "高雄市",
"id": "17"
},
{
"city": "台南市",
"id": "15"
},
{
"city": "嘉義市",
"id": "12"
},
{
"city": "屏東縣",
"id": "19"
},
{
"city": "嘉義縣",
"id": "13"
},
{
"city": "花蓮縣",
"id": "23"
},
{
"city": "台東縣",
"id": "22"
},
{
"city": "金門縣",
"id": "25"
},
{
"city": "澎湖縣",
"id": "24"
}
]
| 10.517241 | 18 | 0.259016 | 87 | 915 | 2.701149 | 0.54023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.069418 | 0.417486 | 915 | 86 | 19 | 10.639535 | 0.371482 | 0 | 0 | 0 | 0 | 0 | 0.243716 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46d61300693e53921017fdd21edfe3a6e707f091 | 4,267 | py | Python | mrtarget/common/Redis.py | pieterlukasse/data_pipeline-1 | 823645a36a999e76dc51584aa784f5f9e3f245e7 | [
"Apache-2.0"
] | null | null | null | mrtarget/common/Redis.py | pieterlukasse/data_pipeline-1 | 823645a36a999e76dc51584aa784f5f9e3f245e7 | [
"Apache-2.0"
] | null | null | null | mrtarget/common/Redis.py | pieterlukasse/data_pipeline-1 | 823645a36a999e76dc51584aa784f5f9e3f245e7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import base64
import simplejson as json
from collections import Counter
import jsonpickle
from mrtarget.common import require_all
from mrtarget.common.connection import new_redis_client
jsonpickle.set_preferred_backend('simplejson')
import logging
import uuid
import datetime
import numpy as np
import cProfile
np.seterr(divide='warn', invalid='warn')
from mrtarget.Settings import Config
try:
import cPickle as pickle
except ImportError:
import pickle
import time
from multiprocessing import Process, current_process
logger = logging.getLogger(__name__)
import signal
class TimeoutException(Exception):
pass
def timeout_handler(signum, frame):
raise TimeoutException
signal.signal(signal.SIGALRM, timeout_handler)
def millify(n):
try:
n = float(n)
millnames=['','K','M','G','P']
millidx=max(0, min(len(millnames) - 1,
int(np.math.floor(np.math.log10(abs(n)) / 3))))
return '%.1f%s'%(n/10**(3*millidx),millnames[millidx])
except Exception:
return str(n)
class RedisLookupTable(object):
'''
Simple Redis-based key value store for string-based objects. Faster than
its subclasses since it does not serialise and unseriliase strings. By
default keys will expire in 2 days.
Allows to store a lookup table (key/value store) in memory/redis so that it
can be accessed quickly from multiple processes, reducing memory usage by
sharing.
'''
LOOK_UPTABLE_NAMESPACE = 'lookuptable:%(namespace)s'
KEY_NAMESPACE = '%(namespace)s:%(key)s'
def __init__(self,
namespace = None,
r_server = None,
ttl = 60*60*24+2):
if namespace is None:
namespace = uuid.uuid4()
self.namespace = self.LOOK_UPTABLE_NAMESPACE % {'namespace': namespace}
self.r_server = new_redis_client() if not r_server else r_server
self.default_ttl = ttl
require_all(self.r_server is not None)
def set(self, key, obj, r_server = None, ttl = None):
self._get_r_server(r_server).setex(self._get_key_namespace(key),
self._encode(obj),
ttl or self.default_ttl)
def get(self, key, r_server = None):
server = self._get_r_server(r_server)
value = server.get(self._get_key_namespace(key))
if value is not None:
return self._decode(value)
raise KeyError(key)
def keys(self, r_server = None):
return [key.replace(self.namespace+':','') \
for key in self._get_r_server(r_server).keys(self.namespace+'*')]
def set_r_server(self, r_server):
self.r_server = r_server
def _get_r_server(self, r_server = None):
return r_server if r_server else self.r_server
def _get_key_namespace(self, key, r_server=None):
return self.KEY_NAMESPACE % {'namespace': self.namespace, 'key': key}
def _encode(self, obj):
return obj
def _decode(self, obj):
return obj
def __contains__(self, key, r_server=None):
server = self._get_r_server(r_server)
return server.exists(self._get_key_namespace(key))
def __getitem__(self, key, r_server=None):
self.get(self._get_key_namespace(key),
r_server=self._get_r_server(r_server))
def __setitem__(self, key, value, r_server=None):
if not self.lt_reuse:
self.set(self._get_key_namespace(key), value,
r_server=self._get_r_server(r_server))
class RedisLookupTableJson(RedisLookupTable):
'''
Simple Redis-based key value store for Json serialised objects
By default keys will expire in 2 days
'''
def _encode(self, obj):
return json.dumps(obj)
def _decode(self, obj):
return json.loads(obj)
class RedisLookupTablePickle(RedisLookupTable):
'''
Simple Redis-based key value store for pickled objects
By default keys will expire in 2 days
'''
def _encode(self, obj):
return base64.encodestring(pickle.dumps(obj, pickle.HIGHEST_PROTOCOL))
def _decode(self, obj):
return pickle.loads(base64.decodestring(obj))
| 28.446667 | 81 | 0.65948 | 569 | 4,267 | 4.739895 | 0.307557 | 0.090842 | 0.036707 | 0.036337 | 0.299221 | 0.209121 | 0.15647 | 0.144605 | 0.077123 | 0.077123 | 0 | 0.008359 | 0.243028 | 4,267 | 149 | 82 | 28.637584 | 0.826625 | 0.134989 | 0 | 0.152174 | 0 | 0 | 0.026825 | 0.012721 | 0 | 0 | 0 | 0 | 0 | 1 | 0.195652 | false | 0.01087 | 0.195652 | 0.097826 | 0.597826 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
46d79143d42745acfc58bf24b940e5ad645fcc18 | 401 | py | Python | project/app/migrations/0006_housing_description.py | ryan-lam/hackupc2021 | 2f63b47f831f3d6d01077a9bf2f94d9babe6bfce | [
"MIT"
] | null | null | null | project/app/migrations/0006_housing_description.py | ryan-lam/hackupc2021 | 2f63b47f831f3d6d01077a9bf2f94d9babe6bfce | [
"MIT"
] | null | null | null | project/app/migrations/0006_housing_description.py | ryan-lam/hackupc2021 | 2f63b47f831f3d6d01077a9bf2f94d9babe6bfce | [
"MIT"
] | 2 | 2021-05-23T04:36:35.000Z | 2021-05-27T04:27:04.000Z | # Generated by Django 3.2 on 2021-05-16 05:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0005_auto_20210515_1932'),
]
operations = [
migrations.AddField(
model_name='housing',
name='description',
field=models.CharField(default='null', max_length=500),
),
]
| 21.105263 | 67 | 0.603491 | 43 | 401 | 5.511628 | 0.837209 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114187 | 0.279302 | 401 | 18 | 68 | 22.277778 | 0.705882 | 0.107232 | 0 | 0 | 1 | 0 | 0.134831 | 0.064607 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46d8731739d55091c02beebba8b44113b38fd70d | 13,068 | py | Python | drizzlepac/haputils/make_poller_files.py | check-spelling/drizzlepac | 19baaf5a416c72f272889800b13d251f33f76d2c | [
"BSD-3-Clause"
] | 28 | 2016-08-16T04:16:32.000Z | 2022-03-27T15:39:29.000Z | drizzlepac/haputils/make_poller_files.py | check-spelling/drizzlepac | 19baaf5a416c72f272889800b13d251f33f76d2c | [
"BSD-3-Clause"
] | 822 | 2016-03-10T01:19:28.000Z | 2022-03-30T20:25:34.000Z | drizzlepac/haputils/make_poller_files.py | check-spelling/drizzlepac | 19baaf5a416c72f272889800b13d251f33f76d2c | [
"BSD-3-Clause"
] | 33 | 2016-03-16T19:18:03.000Z | 2021-12-27T04:20:44.000Z | #!/usr/bin/env python
"""Generates a poller file that will be used as input to runsinglehap.py, hapsequencer.py, runmultihap.py or
hapmultisequencer.py based on the files or rootnames listed user-specified list file.
USAGE
>>> python drizzlepac/haputils/make_poller_files.py <input filename> -[ost]
- input filename: Name of a file containing a list of calibrated fits files (ending with "_flt.fits" or
"_flc.fits") or rootnames (9 characters, usually ending with a "q" to process. The corresponding
flc.fits or flt.fits files must exist in the user-specified path, the current working directory or the
online cache
- The '-o' optional input allows users to input the name of an output poller file that will be created.
If not explicitly specified, the poller file will be named "poller_file.out".
- The '-s' optional input allows users to input the Name of the skycell. The correct syntax for skycell
names is "skycell-pNNNNxXXyXX", where NNNN is the 4-digit projection cell number, and XX and YY are the
two-digit X and Y skycell indices, respectively. NOTE: this input argument is not needed for SVM poller
file creation, but *REQUIRED* for MVM poller file creation. Users can determine the skycell(s) that
their observations occupy using the ``haputils.which_skycell`` script.
- The '-t' optional input allows users to specify the type of poller file that will be created. The
valid input options are "svm" to create a poller file for use with the single-visit mosaics pipeline
or "mvm" to create a poller file for use with the multiple-visit mosaics pipeline. If not explicitly
specified, the default value is "svm". NOTE: if creating a MVM poller file, one must specify the
skycell name using the "-s" input argument.
Python USAGE:
>>> python
>>> from drizzlepac.haputils import make_poller_files
>>> make_poller_files.generate_poller_file(input_list, poller_file_type='svm', output_poller_filename="poller_file.out", skycell_name=None):
"""
import argparse
import os
import re
import sys
from astropy.io import fits
from drizzlepac.haputils import poller_utils
__taskname__ = 'make_poller_files'
def generate_poller_file(input_list, poller_file_type='svm', output_poller_filename="poller_file.out",
skycell_name=None):
"""Creates a properly formatted SVM or MVM poller file.
Parameters
----------
input_list : str
Name of the text file containing the list of filenames or rootnames to process
poller_file_type : str, optional
Type of poller file to create. 'svm' for single visit mosaic, 'mvm' for multi-visit mosaic. Default
value is 'svm'.
output_poller_filename : str, optional
Name of the output poller file that will be created. Default value is 'poller_file.out'.
skycell_name : str, optional
Name of the skycell to use when creating a MVM poller file. skycell_name is REQUIRED for the creation
of a MVM poller file, but completely unnecessary for the creation of a SVM poller file. The correct
syntax for skycell names is 'skycell-pNNNNxXXyXX', where NNNN is the 4-digit projection cell number,
and XX and YY are the two-digit X and Y skycell indices, respectively. Default value is logical
'None'. NOTE: this input argument is not needed for SVM poller file creation, but *REQUIRED* for MVM
poller file creation. Users can determine the skycell(s) that their observations occupy using the
``haputils.which_skycell`` script.
Returns
-------
Nothing.
"""
if poller_file_type == 'svm' and skycell_name:
print("PROTIP: Users only need to provide a skycell name for the creation of MVM poller files, not SVM poller files.")
# Open rootname list file
f = open(input_list, 'r')
rootname_list = f.readlines()
f.close()
output_list = []
for rootname in rootname_list:
rootname = rootname.strip()
fullfilepath = locate_fitsfile(rootname)
if len(fullfilepath) > 0:
if rootname.endswith(".fits"):
print("Found fits file {}".format(fullfilepath))
else:
print("Rootname {}: Found fits file {}".format(rootname, fullfilepath))
imgname = fullfilepath.split(os.sep)[-1]
else:
# Warn user if no fits file can be located for a given rootname, and skip processing of the file.
if rootname.endswith(".fits"):
item_type = "filename"
else:
item_type = "rootname"
print("WARNING: No fits file found for {} '{}'. This {} will be omitted from the poller file.".format(item_type, rootname, item_type))
continue
# Build each individual poller file line
linelist = []
linelist.append(imgname)
imghdu = fits.open(fullfilepath)
imghdr = imghdu[0].header
linelist.append("{}".format(imghdr['proposid']))
linelist.append(imgname.split("_")[-2][1:4].upper())
linelist.append(imghdr['linenum'].split(".")[0])
linelist.append("{}".format(imghdr['exptime']))
if imghdr['INSTRUME'].lower() == "acs":
filter = poller_utils.determine_filter_name("{};{}".format(imghdr['FILTER1'], imghdr['FILTER2']))
elif imghdr['INSTRUME'].lower() == "wfc3":
filter = poller_utils.determine_filter_name(imghdr['FILTER'])
linelist.append(filter.upper())
linelist.append(imghdr['detector'].upper())
if poller_file_type == 'mvm': # Additional stuff to add to MVM poller files
if skycell_name:
pattern = re.compile("(skycell-p\d{4}x\d{2}y\d{2})")
skycell_name_format_check = pattern.match(skycell_name)
if skycell_name_format_check:
linelist.append("{}".format(skycell_name))
else:
raise ValueError("'{}' is an improperly formatted skycell name. Please refer to documentation for information regarding correct skycell name syntax.".format(skycell_name))
else:
raise Exception("No skycell name was provided. The name of the skycell that the observations occupy is required for MVM poller file creation.")
linelist.append("NEW")
linelist.append(fullfilepath)
imghdu.close()
# Append newly created poller file line to the list of lines to be written to the output file.
output_list.append(",".join(linelist))
# adding carriage returns to all but the very last line in the output file.
list_size = len(output_list)
for ctr in range(0, list_size):
if ctr != list_size-1:
trailing_char = "\n"
else:
trailing_char = ""
output_list[ctr] = output_list[ctr]+trailing_char
# write output poller file
with open(output_poller_filename, 'w') as f:
f.writelines(output_list)
print("wrote {} poller file '{}'.".format(poller_file_type.upper(), output_poller_filename))
# ============================================================================================================
def locate_fitsfile(search_string):
"""returns full file name (fullpath + filename) for a specified rootname or filename. The search
algorithm looks for the file in the following order:
- Search for a _flc.fits file in the current working directory
- Search for a _flt.fits file in the current working directory
- Search for a _flc.fits file in subdirectory in the path specified in $DATA_PATH
- Search for a _flt.fits file in subdirectory in the path specified in $DATA_PATH
Parameters
----------
search_string : str
rootname or filename to locate
Returns
-------
fullfilepath : str
full file path + image name of specified search_string.
"""
if search_string.endswith("_flt.fits") or search_string.endswith("_flc.fits"): # Process search_string as a full filename
# Look in user-provided path (assuming they provided one)
if os.path.exists(search_string) and os.sep in search_string:
return search_string
# Look for files in CWD
if os.path.exists(search_string) and os.sep not in search_string:
return os.getcwd() + os.sep + search_string
# If not found in CWD, look elsewhere...
if not os.getenv("DATA_PATH"):
sys.exit("ERROR: Undefined online cache data root path. Please set environment variable 'DATA_PATH'")
fullfilepath = "{}{}{}{}{}{}{}".format(os.getenv("DATA_PATH"), os.sep, search_string[:4],
os.sep, search_string[:-9], os.sep, search_string)
if os.path.exists(search_string):
return fullfilepath
else:
return "" # Return a null string if no file is found
else: # Process search_string as a rootname
# Look for files in CWD first
for fits_ext in ["flc", "flt"]:
if os.path.exists("{}_{}.fits".format(search_string, fits_ext)):
return "{}{}{}_{}.fits".format(os.getcwd(), os.sep, search_string, fits_ext)
# If not found in CWD, look elsewhere...
if not os.getenv("DATA_PATH"):
sys.exit("ERROR: Undefined online cache data root path. Please set environment variable 'DATA_PATH'")
filenamestub = "{}{}{}{}{}{}{}".format(os.getenv("DATA_PATH"), os.sep, search_string[:4],
os.sep, search_string, os.sep, search_string)
for fits_ext in ["flc", "flt"]:
if os.path.exists("{}_{}.fits".format(filenamestub, fits_ext)):
return "{}_{}.fits".format(filenamestub, fits_ext)
# it should never get here unless no file was found either locally or elsewhere in $DATA_PATH.
return "" # Return a null string if no file is found
# ============================================================================================================
if __name__ == '__main__':
# Parse input arguments
parser = argparse.ArgumentParser(description='Create a HAP SVM or MVM poller file')
parser.add_argument('input_list',
help='Name of a file containing a list of calibrated fits files (ending with '
'"_flt.fits" or "_flc.fits") or rootnames (9 characters, usually ending '
'with a "q" to process. The corresponding flc.fits or flt.fits files must '
'exist in the user-specified path, the current working directory or the online '
'cache')
parser.add_argument('-o', '--output_poller_filename', required=False, default="poller_file.out",
help='Name of an output poller file that will be created. If not explicitly '
'specified, the poller file will be named "poller_file.out".')
parser.add_argument('-s', '--skycell_name', required=False, default="None",
help='Name of the skycell. The correct syntax for skycell names is '
'"skycell-pNNNNxXXyXX", where NNNN is the 4-digit projection cell number, and '
'XX and YY are the two-digit X and Y skycell indices, respectively. NOTE: this '
'input argument is not needed for SVM poller file creation, but *REQUIRED* for '
'MVM poller file creation. Users can determine the skycell(s) that their '
'observations occupy using the haputils.which_skycell.py script.')
parser.add_argument('-t', '--poller_file_type', required=False, choices=['svm', 'mvm'], default='svm',
help='Type of poller file to be created. "svm" to create a poller file for use with '
'the single-visit mosaics pipeline and "mvm" to create a poller file for use '
'with the multiple-visit mosaics pipeline. If not explicitly '
'specified, the default value is "svm". NOTE: if creating a MVM poller file, '
'one must specify the skycell name using the "-s" input argument.')
in_args = parser.parse_args()
# reformat input args
if in_args.skycell_name == 'None':
in_args.skycell_name = None
# logic to make sure user has specified the skycell name if a MVM poller file is to be created.
if in_args.poller_file_type == "mvm" and in_args.skycell_name is None:
parser.error("ERROR: To create a MVM poller file, a skycell name must be specified with the '-s' argument.")
generate_poller_file(in_args.input_list,
poller_file_type=in_args.poller_file_type,
output_poller_filename=in_args.output_poller_filename,
skycell_name=in_args.skycell_name)
| 54.224066 | 191 | 0.633762 | 1,712 | 13,068 | 4.726051 | 0.180491 | 0.065505 | 0.019281 | 0.016809 | 0.502781 | 0.430355 | 0.404771 | 0.396737 | 0.396737 | 0.379928 | 0 | 0.002382 | 0.261019 | 13,068 | 240 | 192 | 54.45 | 0.835456 | 0.38491 | 0 | 0.140625 | 1 | 0.03125 | 0.315769 | 0.0126 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015625 | false | 0 | 0.046875 | 0 | 0.117188 | 0.039063 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46dd367bc104a82f56c26623af9a311c62796d6c | 3,932 | py | Python | native/jni/external/selinux/python/sepolicy/sepolicy/templates/rw.py | Joyoe/Magisk-nosbin_magisk-nohide | 449441921740bf85926c14f41b3532822ca0eb65 | [
"MIT"
] | 2 | 2022-01-16T00:59:54.000Z | 2022-02-09T12:00:48.000Z | native/jni/external/selinux/python/sepolicy/sepolicy/templates/rw.py | Joyoe/Magisk-nosbin_magisk-nohide | 449441921740bf85926c14f41b3532822ca0eb65 | [
"MIT"
] | null | null | null | native/jni/external/selinux/python/sepolicy/sepolicy/templates/rw.py | Joyoe/Magisk-nosbin_magisk-nohide | 449441921740bf85926c14f41b3532822ca0eb65 | [
"MIT"
] | 2 | 2022-02-09T12:00:39.000Z | 2022-02-21T18:34:46.000Z | # Copyright (C) 2007-2012 Red Hat
# see file 'COPYING' for use and warranty information
#
# policygentool is a tool for the initial generation of SELinux policy
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# 02111-1307 USA
#
#
########################### tmp Template File #############################
te_types="""
type TEMPLATETYPE_rw_t;
files_type(TEMPLATETYPE_rw_t)
"""
te_rules="""
manage_dirs_pattern(TEMPLATETYPE_t, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t)
manage_files_pattern(TEMPLATETYPE_t, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t)
manage_lnk_files_pattern(TEMPLATETYPE_t, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t)
"""
########################### Interface File #############################
if_rules="""
########################################
## <summary>
## Search TEMPLATETYPE rw directories.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`TEMPLATETYPE_search_rw_dir',`
gen_require(`
type TEMPLATETYPE_rw_t;
')
allow $1 TEMPLATETYPE_rw_t:dir search_dir_perms;
files_search_rw($1)
')
########################################
## <summary>
## Read TEMPLATETYPE rw files.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`TEMPLATETYPE_read_rw_files',`
gen_require(`
type TEMPLATETYPE_rw_t;
')
read_files_pattern($1, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t)
allow $1 TEMPLATETYPE_rw_t:dir list_dir_perms;
files_search_rw($1)
')
########################################
## <summary>
## Manage TEMPLATETYPE rw files.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`TEMPLATETYPE_manage_rw_files',`
gen_require(`
type TEMPLATETYPE_rw_t;
')
manage_files_pattern($1, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t)
')
########################################
## <summary>
## Create, read, write, and delete
## TEMPLATETYPE rw dirs.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`TEMPLATETYPE_manage_rw_dirs',`
gen_require(`
type TEMPLATETYPE_rw_t;
')
manage_dirs_pattern($1, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t)
')
"""
te_stream_rules="""
manage_sock_files_pattern(TEMPLATETYPE_t, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t)
"""
if_stream_rules="""\
########################################
## <summary>
## Connect to TEMPLATETYPE over a unix stream socket.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`TEMPLATETYPE_stream_connect',`
gen_require(`
type TEMPLATETYPE_t, TEMPLATETYPE_rw_t;
')
stream_connect_pattern($1, TEMPLATETYPE_rw_t, TEMPLATETYPE_rw_t, TEMPLATETYPE_t)
')
"""
if_admin_types="""
type TEMPLATETYPE_rw_t;"""
if_admin_rules="""
files_search_etc($1)
admin_pattern($1, TEMPLATETYPE_rw_t)
"""
########################### File Context ##################################
fc_file="""
FILENAME -- gen_context(system_u:object_r:TEMPLATETYPE_rw_t,s0)
"""
fc_sock_file="""\
FILENAME -s gen_context(system_u:object_r:TEMPLATETYPE_etc_rw_t,s0)
"""
fc_dir="""
FILENAME(/.*)? gen_context(system_u:object_r:TEMPLATETYPE_rw_t,s0)
"""
| 24.72956 | 81 | 0.653611 | 483 | 3,932 | 5.031056 | 0.289855 | 0.190123 | 0.179012 | 0.085597 | 0.584774 | 0.528395 | 0.499588 | 0.444033 | 0.383951 | 0.322634 | 0 | 0.010601 | 0.136317 | 3,932 | 158 | 82 | 24.886076 | 0.704947 | 0.240336 | 0 | 0.617391 | 0 | 0 | 0.920817 | 0.314941 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46ebb2375f6354f283088526e6acc20b627eadfb | 1,340 | py | Python | rss/resources.py | victorchen796/reddit-submission-scraper | 01401c6b35af8547eb9640e441a28633c38408bd | [
"MIT"
] | null | null | null | rss/resources.py | victorchen796/reddit-submission-scraper | 01401c6b35af8547eb9640e441a28633c38408bd | [
"MIT"
] | null | null | null | rss/resources.py | victorchen796/reddit-submission-scraper | 01401c6b35af8547eb9640e441a28633c38408bd | [
"MIT"
] | null | null | null | import json
import os
script_path = os.path.abspath(__file__)
script_dir = os.path.split(script_path)[0]
def get_config():
rel_path = 'resources/config.json'
path = os.path.join(script_dir, rel_path)
with open(path, 'r') as f:
config = json.loads(f.read())
return config
def get_submissions():
rel_path = 'resources/submissions.json'
path = os.path.join(script_dir, rel_path)
with open(path, 'r') as f:
submissions = json.loads(f.read())
return submissions
def get_subreddits():
rel_path = 'resources/subreddits.json'
path = os.path.join(script_dir, rel_path)
with open(path, 'r') as f:
subreddits = json.loads(f.read())
return subreddits
def update_config(config):
rel_path = 'resources/config.json'
path = os.path.join(script_dir, rel_path)
with open(path, 'w') as f:
f.write(json.dumps(config, indent=2))
def update_submissions(submissions):
rel_path = 'resources/submissions.json'
path = os.path.join(script_dir, rel_path)
with open(path, 'w') as f:
f.write(json.dumps(submissions, indent=2, default=str))
def update_subreddits(subreddits):
rel_path = 'resources/subreddits.json'
path = os.path.join(script_dir, rel_path)
with open(path, 'w') as f:
f.write(json.dumps(subreddits, indent=2)) | 26.27451 | 63 | 0.671642 | 197 | 1,340 | 4.411168 | 0.172589 | 0.096663 | 0.080552 | 0.096663 | 0.700806 | 0.631761 | 0.631761 | 0.631761 | 0.631761 | 0.631761 | 0 | 0.003707 | 0.194776 | 1,340 | 51 | 64 | 26.27451 | 0.801668 | 0 | 0 | 0.486486 | 0 | 0 | 0.111857 | 0.107383 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162162 | false | 0 | 0.054054 | 0 | 0.297297 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46f05cf6545f1bd019299906868ea89580724e08 | 331 | py | Python | lrthubcore/ratings/admin.py | xrojan/lrthub-core | 757189942c87f7136fd1f1fee536375d248d8233 | [
"BSD-3-Clause"
] | null | null | null | lrthubcore/ratings/admin.py | xrojan/lrthub-core | 757189942c87f7136fd1f1fee536375d248d8233 | [
"BSD-3-Clause"
] | null | null | null | lrthubcore/ratings/admin.py | xrojan/lrthub-core | 757189942c87f7136fd1f1fee536375d248d8233 | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from .models import Rating
# Register your models here.
@admin.register(Rating)
class RatingAdmin(admin.ModelAdmin):
date_hierarchy = 'created_on'
search_fields = ['user_id__username', 'value']
list_display = ('user_id', 'value',)
list_filter = ('user_id', 'value', 'is_deleted')
| 27.583333 | 52 | 0.719033 | 42 | 331 | 5.404762 | 0.666667 | 0.079295 | 0.096916 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151057 | 331 | 11 | 53 | 30.090909 | 0.807829 | 0.07855 | 0 | 0 | 0 | 0 | 0.217822 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
46f2fb3f4a355efbd5abadbe36f9f51a55519a5b | 20,406 | py | Python | scitbx/math/tests/tst_gaussian.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | scitbx/math/tests/tst_gaussian.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | scitbx/math/tests/tst_gaussian.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | from __future__ import division
from scitbx.examples import immoptibox_ports
from scitbx.math import gaussian
from scitbx.array_family import flex
from libtbx.test_utils import approx_equal, eps_eq
from libtbx.utils import format_cpu_times
try:
import cPickle as pickle
except ImportError:
import pickle
from cStringIO import StringIO
import math
import sys
def finite_gradient_dx_at_x(gaussian, x, eps=1.e-5):
if (x == 0): return 0
assert x >= eps
tm = gaussian.at_x(x-eps)
tp = gaussian.at_x(x+eps)
return (tp-tm)/(2*eps)
def exercise_gradient_dx(gaussian, x_max=1., n_points=50):
for i in xrange(n_points+1):
x = x_max * i / n_points
grad_finite = finite_gradient_dx_at_x(gaussian, x)
grad_analytical = gaussian.gradient_dx_at_x(x)
assert eps_eq(grad_finite, grad_analytical)
def exercise_integral_dx(gaussian, x_max=1., n_points=1000):
numerical_integral = 0
x_step = x_max / n_points
for i in xrange(n_points+1):
x = x_max * i / n_points
new_value = gaussian.at_x(x)
if (i):
numerical_integral += (prev_value + new_value) * .5
prev_value = new_value
analytical_integral = gaussian.integral_dx_at_x(x, 1.e-3)
assert eps_eq(analytical_integral, gaussian.integral_dx_at_x(x))
assert eps_eq(numerical_integral*x_step, analytical_integral, eps=1.e-5)
def term_finite_gradient_d_ab_at_x(term, x, eps=1.e-5):
tm = gaussian.term(term.a-eps,term.b).at_x(x)
tp = gaussian.term(term.a+eps,term.b).at_x(x)
gr_a = (tp-tm)/(2*eps)
tm = gaussian.term(term.a,term.b-eps).at_x(x)
tp = gaussian.term(term.a,term.b+eps).at_x(x)
gr_b = (tp-tm)/(2*eps)
return gaussian.term(gr_a, gr_b)
def exercise_term_gradients_d_ab(term, x_max=1., n_points=50):
for i in xrange(n_points+1):
x = x_max * i / n_points
grad_finite = term_finite_gradient_d_ab_at_x(term, x)
grad_analytical = term.gradients_d_ab_at_x_sq(x*x)
assert eps_eq(grad_finite.a, grad_analytical.a)
assert eps_eq(grad_finite.b, grad_analytical.b)
def exercise_term():
t = gaussian.term(2,3)
assert approx_equal(t.a, 2)
assert approx_equal(t.b, 3)
assert approx_equal(t.at_x_sq(4), 2*math.exp(-3*4))
assert approx_equal(t.at_x(2), 2*math.exp(-3*4))
eps = 1.e-5
for ix in (xrange(10)):
x = ix/10.
assert eps_eq((t.at_x(x+eps)-t.at_x(x-eps))/(2*eps), t.gradient_dx_at_x(x))
for f in [1,-1]:
for t in [gaussian.term(f*2,3),
gaussian.term(f*3,0),
gaussian.term(f*4,1.e-4),
gaussian.term(f*5,-1)]:
exercise_gradient_dx(t)
exercise_integral_dx(t)
exercise_term_gradients_d_ab(t)
def exercise_sum():
g = gaussian.sum(0)
assert g.n_terms() == 0
assert g.array_of_a() == ()
assert g.array_of_b() == ()
assert approx_equal(g.c(), 0)
assert g.use_c()
assert g.n_parameters() == 1
assert approx_equal(g.parameters(), [0])
g = gaussian.sum(0, True)
assert g.use_c()
g = gaussian.sum(0, False)
assert not g.use_c()
g = gaussian.sum(1)
assert g.n_terms() == 0
assert g.array_of_a() == ()
assert g.array_of_b() == ()
assert approx_equal(g.c(), 1)
assert g.use_c()
assert g.n_parameters() == 1
assert approx_equal(g.parameters(), [1])
g = gaussian.sum((), ())
assert g.n_terms() == 0
assert g.array_of_a() == ()
assert g.array_of_b() == ()
assert g.c() == 0
assert not g.use_c()
assert g.n_parameters() == 0
assert g.parameters().size() == 0
g = gaussian.sum((), (), -2)
assert g.n_terms() == 0
assert g.array_of_a() == ()
assert g.array_of_b() == ()
assert approx_equal(g.c(), -2)
g = gaussian.sum(flex.double((1,2,3,4)))
assert approx_equal(g.array_of_a(), (1,3))
assert approx_equal(g.array_of_b(), (2,4))
assert approx_equal(g.c(), 0)
assert not g.use_c()
assert approx_equal(g.parameters(), [1,2,3,4])
g = gaussian.sum(flex.double((1,2,3,4)), 0, True)
assert approx_equal(g.c(), 0)
assert g.use_c()
g = gaussian.sum(flex.double((1,2,3,4)), 5)
assert approx_equal(g.c(), 5)
assert g.use_c()
assert approx_equal(g.parameters(), [1,2,3,4,5])
g = gaussian.sum(flex.double((1,2,3,4,5)))
assert approx_equal(g.c(), 5)
assert g.use_c()
assert approx_equal(g.parameters(), [1,2,3,4,5])
g = gaussian.sum((1,-2,3,-4,5), (-.1,.2,-.3,.4,-.5), 6)
assert g.n_terms() == 5
assert approx_equal(g.array_of_a(),(1,-2,3,-4,5))
assert approx_equal(g.array_of_b(),(-.1,.2,-.3,.4,-.5))
assert approx_equal(g.c(), 6)
assert approx_equal(g.at_x_sq(3/4.), 13.4251206)
assert approx_equal(g.at_x_sq(flex.double([2/4.,3/4.])),
[11.8723031, 13.4251206])
assert approx_equal(g.at_x(math.sqrt(3/4.)), 13.4251206)
assert approx_equal(g.at_x(flex.sqrt(flex.double([2/4.,3/4.]))),
[11.8723031, 13.4251206])
s = pickle.dumps(g)
l = pickle.loads(s)
assert l.n_terms() == g.n_terms()
assert approx_equal(l.array_of_a(), g.array_of_a())
assert approx_equal(l.array_of_b(), g.array_of_b())
assert approx_equal(l.c(), g.c())
assert l.use_c()
s = pickle.dumps(gaussian.sum((),()))
l = pickle.loads(s)
assert not l.use_c()
exercise_gradient_dx(gaussian.sum(
[5.5480], [10.4241], 0))
exercise_gradient_dx(gaussian.sum(
[2.657506,1.078079,1.490909,-4.241070,0.713791],
[14.780758,0.776775,42.086842,-0.000294,0.239535],
4.297983))
exercise_integral_dx(gaussian.sum([5.5480], [10.4241]))
exercise_integral_dx(gaussian.sum([5.5480], [10.4241], 3))
exercise_integral_dx(gaussian.sum([5.5480], [0], 0))
exercise_integral_dx(gaussian.sum([5.5480], [-0.01]))
exercise_integral_dx(gaussian.sum(
[2.657506,1.078079,1.490909,-4.241070,0.713791],
[14.780758,0.776775,42.086842,-0.000294,0.239535],
4.297983))
g = gaussian.sum((1,-2,3,-4,5), (-.1,.2,-.3,.4,-.5), 6)
s = StringIO()
g.show(s)
assert len(s.getvalue().split()) == 14
g = gaussian.sum((3,-2,1,-4,5), (-.3,.2,-.1,.4,-.5))
s = StringIO()
g.show(s)
assert len(s.getvalue().split()) == 12
assert isinstance(g.sort(), gaussian.sum)
assert approx_equal(g.sort().array_of_a(), (5,-4,3,-2,1))
assert approx_equal(g.sort().array_of_b(), (-.5,.4,-.3,.2,-.1))
assert not g.sort().use_c()
g = gaussian.sum((1,2),(3,4),5)
assert approx_equal(g.sort().array_of_a(), (2,1))
assert approx_equal(g.sort().array_of_b(), (4,3))
assert approx_equal(g.sort().c(), 5)
assert g.sort().use_c()
def fit_finite_diff_gradients(gfit, x, eps=1.e-2):
gr = flex.double()
c = gfit.c()
use_c = gfit.use_c()
for i in xrange(gfit.n_terms()):
t = []
for seps in (eps, -eps):
a = list(gfit.array_of_a())
a[i] += seps
t.append(
gaussian.sum(a, gfit.array_of_b(), c, use_c).at_x(x))
gr.append((t[0]-t[1])/(2*eps))
t = []
for seps in (eps, -eps):
b = list(gfit.array_of_b())
b[i] += seps
t.append(
gaussian.sum(gfit.array_of_a(), b, c, use_c).at_x(x))
gr.append((t[0]-t[1])/(2*eps))
if (use_c):
t = []
for seps in (eps, -eps):
t.append(
gaussian.sum(
gfit.array_of_a(), gfit.array_of_b(), c+seps, use_c).at_x(x))
gr.append((t[0]-t[1])/(2*eps))
return gr
def fit_finite_diff_target_gradients(gfit, power, use_sigmas, eps=1.e-2):
assert gfit.table_x().size() == 1
weight = 1/gfit.table_sigmas()[0]**2
gr = flex.double()
c = gfit.c()
use_c = gfit.use_c()
for i in xrange(gfit.n_terms()):
t = []
for seps in (eps, -eps):
a = list(gfit.array_of_a())
a[i] += seps
gf = gaussian.fit(
gfit.table_x(),
gfit.table_y(),
gfit.table_sigmas(),
gaussian.sum(a, gfit.array_of_b(), c, use_c))
t.append(gf.target_function(power, use_sigmas, gf.differences()))
gr.append((t[0]-t[1])/(2*eps))
t = []
for seps in (eps, -eps):
b = list(gfit.array_of_b())
b[i] += seps
gf = gaussian.fit(
gfit.table_x(),
gfit.table_y(),
gfit.table_sigmas(),
gaussian.sum(gfit.array_of_a(), b, c, use_c))
t.append(gf.target_function(power, use_sigmas, gf.differences()))
gr.append((t[0]-t[1])/(2*eps))
if (use_c):
t = []
for seps in (eps, -eps):
gf = gaussian.fit(
gfit.table_x(),
gfit.table_y(),
gfit.table_sigmas(),
gaussian.sum(gfit.array_of_a(), gfit.array_of_b(), c+seps, use_c))
t.append(gf.target_function(power, use_sigmas, gf.differences()))
gr.append((t[0]-t[1])/(2*eps))
return gr
def exercise_fit():
x = flex.double((0.1, 0.2, 0.5))
y = flex.double((3,2,1))
sigmas = flex.double((0.04,0.02,0.01))
gf = gaussian.fit(
x, y, sigmas,
gaussian.sum((1,2), (4,5)))
assert approx_equal(gf.array_of_a(), (1,2))
assert approx_equal(gf.array_of_b(), (4,5))
assert approx_equal(gf.c(), 0)
assert not gf.use_c()
assert approx_equal(gf.table_x(), x)
assert approx_equal(gf.table_y(), y)
assert approx_equal(gf.table_sigmas(), sigmas)
assert approx_equal(gf.fitted_values(),
[2.8632482881537511, 2.4896052951221748, 0.94088903489182252])
reference_gaussian = gaussian.sum((1,2,3), (4,5,6))
gf = gaussian.fit(
x, reference_gaussian, sigmas,
gaussian.sum((1,2), (4,5)))
assert approx_equal(gf.array_of_a(), (1,2))
assert approx_equal(gf.array_of_b(), (4,5))
assert approx_equal(gf.c(), 0)
assert approx_equal(gf.table_x(), x)
assert approx_equal(gf.table_y(), reference_gaussian.at_x(x))
assert approx_equal(gf.table_sigmas(), sigmas)
assert isinstance(gf.sort(), gaussian.fit)
assert gf.sort().table_x() == gf.table_x()
assert gf.sort().table_y() == gf.table_y()
assert gf.sort().table_sigmas() == gf.table_sigmas()
assert approx_equal(gf.differences(), gf.at_x(x)-reference_gaussian.at_x(x))
c_fit = gaussian.fit(
flex.double([0.0, 0.066666666666666666, 0.13333333333333333,
0.2, 0.26666666666666666]),
gaussian.sum(
(2.657506, 1.078079, 1.490909, -4.2410698, 0.71379101),
(14.780758, 0.776775, 42.086842, -0.000294, 0.239535),
4.2979832),
flex.double(5, 0.0005),
gaussian.sum(
(1.1423916, 4.1728425, 0.61716694),
(0.50733125, 14.002512, 41.978928)))
differences = flex.double([-0.064797341823577881, 0.003608505180995536,
0.098159179757290715, 0.060724224581695019, -0.10766283796372011])
assert approx_equal(c_fit.differences(), differences)
assert approx_equal(c_fit.significant_relative_errors(),
[0.0107212, 0.0005581, 0.0213236, 0.0169304, 0.0385142])
gf = gaussian.fit(
x, reference_gaussian, flex.double(x.size(), 1),
gaussian.sum((1,2), (4,5)))
assert list(gf.bound_flags(False, False)) == [False,False,False,False]
assert list(gf.bound_flags(True, False)) == [True,False,True,False]
assert list(gf.bound_flags(False, True)) == [False,True,False,True]
sgf = gf.apply_shifts(flex.double((3,-3,4,6)), True)
assert approx_equal(sgf.array_of_a(), (1+3,2+4))
assert approx_equal(sgf.array_of_b(),
((math.sqrt(4)-3)**2,(math.sqrt(5)+6)**2))
assert approx_equal(sgf.c(), 0)
assert not sgf.use_c()
sgf = gf.apply_shifts(flex.double((3,-3,4,6)), False)
assert approx_equal(sgf.array_of_a(), (1+3,2+4))
assert approx_equal(sgf.array_of_b(), (4-3,5+6))
assert approx_equal(sgf.c(), 0)
assert not sgf.use_c()
differences = sgf.differences()
for use_sigmas in [False, True]:
assert approx_equal(sgf.target_function(2, use_sigmas, differences),
25.0320634)
assert approx_equal(sgf.target_function(4, use_sigmas, differences),
256.2682575)
assert approx_equal(
sgf.gradients_d_abc(2, use_sigmas, differences),
[15.6539271, -4.1090114, 10.4562306, -1.6376781])
gfc = gaussian.fit(
x, reference_gaussian, flex.double(x.size(), 1),
gaussian.sum((1,2), (4,5), 6))
assert list(gfc.bound_flags(False, False)) == [False,False,False,False,False]
assert list(gfc.bound_flags(True, False)) == [True,False,True,False,True]
assert list(gfc.bound_flags(False, True)) == [False,True,False,True,False]
sgfc = gfc.apply_shifts(flex.double((3,-3,4,6,-5)), True)
assert approx_equal(sgfc.array_of_a(), (1+3,2+4))
assert approx_equal(sgfc.array_of_b(),
((math.sqrt(4)-3)**2,(math.sqrt(5)+6)**2))
assert approx_equal(sgfc.c(), 6-5)
assert sgfc.use_c()
sgfc = gfc.apply_shifts(flex.double((3,-3,4,6,-5)), False)
assert approx_equal(sgfc.array_of_a(), (1+3,2+4))
assert approx_equal(sgfc.array_of_b(), (4-3,5+6))
assert approx_equal(sgfc.c(), 6-5)
assert sgfc.use_c()
differences = sgfc.differences()
for use_sigmas in [False, True]:
assert approx_equal(sgfc.target_function(2, use_sigmas, differences),
44.8181444)
assert approx_equal(sgfc.target_function(4, use_sigmas, differences),
757.3160329)
assert approx_equal(
sgfc.gradients_d_abc(2, use_sigmas, differences),
[21.1132071, -6.0532695, 13.6638274, -2.2460994, 22.7860809])
differences = c_fit.differences()
gabc = c_fit.gradients_d_abc(2, False, differences)
assert approx_equal(
gabc,
[-0.016525391425206391, 0.0074465239375589107, 0.020055876723667564,
0.00054794635257838251, -0.018754011379726425, -0.0011194004809549143])
assert approx_equal(
c_fit.gradients_d_shifts(flex.double((0.1,0.4,0.2,0.5,0.3,0.6)), gabc),
[-0.0165254, 0.01656512, 0.0200559, 0.0046488, -0.0187540, -0.0158487])
g5c = gaussian.sum(
(2.657505989074707, 1.0780789852142334, 1.4909089803695679,
-4.2410697937011719, 0.71379101276397705),
(14.780757904052734, 0.77677500247955322, 42.086841583251953,
-0.00029399999766610563, 0.23953500390052795),
4.2979831695556641)
for include_constant_term in (False, True):
a = flex.double(g5c.array_of_a())
b = flex.double(g5c.array_of_b())
permutation = flex.sort_permutation(data=flex.abs(a), reverse=True)[:4]
gf = gaussian.fit(
flex.double([0]),
g5c,
flex.double(1, 1),
gaussian.sum(
iter(a.select(permutation)),
iter(b.select(permutation)), 0, include_constant_term))
assert approx_equal(gf.differences(), [-5.01177418232])
shifts = flex.double(8,-1)
if (include_constant_term): shifts.append(-.2)
sgf = gf.apply_shifts(shifts, False)
assert approx_equal(sgf.array_of_a(),
[-5.2410698, 1.657506, 0.49090898, 0.078078985])
assert approx_equal(sgf.array_of_b(),
[-1.0002940, 13.780758, 41.086842, -0.223225])
if (include_constant_term):
assert approx_equal(sgf.c(), -.2)
expected_gradients = [1,0,1,0,1,0,1,0]
if (include_constant_term): expected_gradients.append(1)
assert approx_equal(
fit_finite_diff_gradients(sgf, 0),
expected_gradients,
eps=1.e-4)
for i in xrange(10):
gf = gaussian.fit(
flex.double([i / 10.]),
g5c,
flex.double(1, 1),
sgf)
differences = flex.double([0.5])
assert approx_equal(
gf.gradients_d_abc(2, False, differences),
fit_finite_diff_gradients(gf, gf.table_x()[0]),
eps=1.e-3)
for sigma in [0.04,0.02,0.01]:
gf = gaussian.fit(
flex.double([i / 20.]),
g5c,
flex.double([sigma]),
sgf)
for power in [2,4]:
for use_sigmas in [False, True]:
differences = gf.differences()
an=gf.gradients_d_abc(power, use_sigmas, differences)
fi=fit_finite_diff_target_gradients(gf, power, use_sigmas)
assert eps_eq(an, fi, eps=1.e-3)
carbon_s_y_table = [
0.00, 6.000, 0.01, 5.990, 0.02, 5.958, 0.03, 5.907, 0.04, 5.837, 0.05, 5.749,
0.06, 5.645, 0.07, 5.526, 0.08, 5.396, 0.09, 5.255, 0.10, 5.107, 0.11, 4.952,
0.12, 4.794, 0.13, 4.633, 0.14, 4.472, 0.15, 4.311, 0.16, 4.153, 0.17, 3.998,
0.18, 3.847, 0.19, 3.701, 0.20, 3.560, 0.22, 3.297, 0.24, 3.058, 0.25, 2.949,
0.26, 2.846, 0.28, 2.658, 0.30, 2.494, 0.32, 2.351, 0.34, 2.227, 0.35, 2.171,
0.36, 2.120, 0.38, 2.028, 0.40, 1.948, 0.42, 1.880, 0.44, 1.821, 0.45, 1.794,
0.46, 1.770, 0.48, 1.725, 0.50, 1.685, 0.55, 1.603, 0.60, 1.537, 0.65, 1.479,
0.70, 1.426, 0.80, 1.322, 0.90, 1.219, 1.00, 1.114, 1.10, 1.012, 1.20, 0.914,
1.30, 0.822, 1.40, 0.736, 1.50, 0.659, 1.60, 0.588, 1.70, 0.525, 1.80, 0.468,
1.90, 0.418, 2.00, 0.373, 2.50, 0.216, 3.00, 0.130, 3.50, 0.081, 4.00, 0.053,
5.00, 0.025, 6.00, 0.013]
class tabulated_fit:
def __init__(self, limit, coefficients):
self.limit = limit
self.coefficients = coefficients
carbon_fit_6 = tabulated_fit(6.0, [
2.18188567686, 13.4533708328,
1.77612377639, 32.5790123523,
1.08772011297, 0.747293264573,
0.641460989931, 0.251251498175,
0.207885994451, 80.9799313275,
0.105219184507, 0.0587297979816])
carbon_fit_5 = tabulated_fit(6.0, [
2.65463431663, 14.7665037505,
1.49420264709, 42.0409767208,
1.05563210943, 0.780856499884,
0.688021531597, 0.258963998784,
0.104681246572, 0.0579465611728])
carbon_fit_4 = tabulated_fit(3.0, [
2.21557580709, 12.7523000206,
1.98306066831, 36.4905110196,
1.31636728472, 0.632825354093,
0.480812064621, 0.148079120135])
carbon_fit_3 = tabulated_fit(1.4, [
2.51340127252, 31.8053433708,
1.74867019409, 0.445605499982,
1.72398202356, 10.5831679451])
carbon_fit_2 = tabulated_fit(0.5, [
3.54355550695, 25.6239838191,
2.42579673735, 1.50364460774])
carbon_fit_1 = tabulated_fit(0.15, [
5.96792806111, 14.8957682987])
carbon_it1992 = tabulated_fit(2.0, [
2.31000, 20.8439,
1.02000, 10.2075,
1.58860, 0.568700,
0.865000, 51.6512,
0.215600])
carbon_wk1995 = tabulated_fit(6.0, [
2.657506, 14.780758,
1.078079, 0.776775,
1.490909, 42.086842,
-4.241070, -0.000294,
0.713791, 0.239535,
4.297983])
class carbon_fit(immoptibox_ports.test_function):
def __init__(self, tab_fit, perturb, verbose):
self.tab_fit = tab_fit
self.perturb = perturb
self.verbose = verbose
carbon_ss = flex.double(carbon_s_y_table)[0::2]
carbon_ys = flex.double(carbon_s_y_table)[1::2]
selection = carbon_ss <= tab_fit.limit + 1.e-3
self.fit = gaussian.fit(
carbon_ss.select(selection),
carbon_ys.select(selection),
flex.double(selection.count(True), 1),
gaussian.sum(flex.double(tab_fit.coefficients)))
n = self.fit.n_parameters()
immoptibox_ports.test_function.__init__(self,
m=self.fit.table_x().size(),
n=n,
check_with_finite_differences=(n <= 6 or n == 9),
verbose=verbose)
def initialization(self):
self.x0 = self.fit.parameters()
self.capital_f_x_star = 0.5*self.f(x=self.x0).norm()**2
if (self.perturb):
mersenne_twister = flex.mersenne_twister(seed=0)
self.x0 *= 1 + mersenne_twister.random_double(
size=self.x0.size(), factor=0.01)
self.tau0 = 1e-8
self.delta0 = 10
self.x_star = None
def label(self):
return "carbon_fit(n=%d, perturb=%s)" % (
self.fit.n_parameters(), str(self.perturb))
def check_minimized_capital_f_x_star(self, f_x_star, tolerance=1.e-3):
capital_f_x_star = 0.5*f_x_star.norm()**2
if (capital_f_x_star > self.capital_f_x_star):
assert capital_f_x_star < tolerance, (
capital_f_x_star, self.capital_f_x_star)
if (self.verbose):
print " WARNING: minimization converged to larger residual", \
"than original solution:"
print " original:", self.capital_f_x_star
assert self.perturb
def f(self, x):
fit = gaussian.fit(
self.fit.table_x(), self.fit.table_y(), self.fit.table_sigmas(),
gaussian.sum(x))
return fit.differences()
def jacobian_analytical(self, x):
fit = gaussian.fit(
self.fit.table_x(), self.fit.table_y(), self.fit.table_sigmas(),
gaussian.sum(x))
return fit.least_squares_jacobian_abc()
def hessian_analytical(self, x):
j = self.jacobian_analytical(x=x)
fit = gaussian.fit(
self.fit.table_x(), self.fit.table_y(), self.fit.table_sigmas(),
gaussian.sum(x))
return fit.least_squares_hessian_abc_as_packed_u() \
.matrix_packed_u_as_symmetric()
def exercise_fit_jacobian_and_hessian(verbose):
for tab_fit in [carbon_fit_1, carbon_fit_2, carbon_fit_3,
carbon_fit_4, carbon_fit_5, carbon_fit_6,
carbon_it1992, carbon_wk1995]:
for perturb in [False, True]:
carbon_fit(tab_fit=tab_fit, perturb=perturb, verbose=verbose)
def run():
exercise_term()
exercise_sum()
exercise_fit()
exercise_fit_jacobian_and_hessian(verbose="--verbose" in sys.argv[1:])
print format_cpu_times()
if (__name__ == "__main__"):
run()
| 36.053004 | 79 | 0.653925 | 3,473 | 20,406 | 3.654477 | 0.131011 | 0.065868 | 0.100457 | 0.036874 | 0.540025 | 0.475654 | 0.413568 | 0.379609 | 0.326741 | 0.294595 | 0 | 0.16433 | 0.171567 | 20,406 | 565 | 80 | 36.116814 | 0.586454 | 0 | 0 | 0.330206 | 0 | 0 | 0.006518 | 0 | 0 | 0 | 0 | 0 | 0.255159 | 0 | null | null | 0 | 0.022514 | null | null | 0.005629 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46f4ca2022403d4568c5fcb36b8d0df73945b02b | 366 | py | Python | accounts/migrations/0005_auto_20210104_0129.py | julesc00/CRM1 | ec5955b2cb84e2bb7631bea7201bf6de1f8d8d4b | [
"MIT"
] | null | null | null | accounts/migrations/0005_auto_20210104_0129.py | julesc00/CRM1 | ec5955b2cb84e2bb7631bea7201bf6de1f8d8d4b | [
"MIT"
] | null | null | null | accounts/migrations/0005_auto_20210104_0129.py | julesc00/CRM1 | ec5955b2cb84e2bb7631bea7201bf6de1f8d8d4b | [
"MIT"
] | null | null | null | # Generated by Django 3.1.4 on 2021-01-04 01:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0004_auto_20210103_1820'),
]
operations = [
migrations.RenameField(
model_name='order',
old_name='products',
new_name='product',
),
]
| 19.263158 | 48 | 0.587432 | 39 | 366 | 5.358974 | 0.820513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121094 | 0.300546 | 366 | 18 | 49 | 20.333333 | 0.695313 | 0.122951 | 0 | 0 | 1 | 0 | 0.159875 | 0.0721 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46fa92d6b3fc162164fdf17f192beafbb5b9a007 | 1,227 | py | Python | ppci/cli/yacc.py | jsdelivrbot/ppci-mirror | 67195d628275e2332ceaf44c9e13fc58d0877157 | [
"BSD-2-Clause"
] | null | null | null | ppci/cli/yacc.py | jsdelivrbot/ppci-mirror | 67195d628275e2332ceaf44c9e13fc58d0877157 | [
"BSD-2-Clause"
] | null | null | null | ppci/cli/yacc.py | jsdelivrbot/ppci-mirror | 67195d628275e2332ceaf44c9e13fc58d0877157 | [
"BSD-2-Clause"
] | null | null | null | """ Parser generator utility.
This script can generate a python script from a grammar description.
Invoke the script on a grammar specification file:
.. code::
$ ppci-yacc test.x -o test_parser.py
And use the generated parser by deriving a user class:
.. code::
import test_parser
class MyParser(test_parser.Parser):
pass
p = MyParser()
p.parse()
Alternatively you can load the parser on the fly:
.. code::
import yacc
parser_mod = yacc.load_as_module('mygrammar.x')
class MyParser(parser_mod.Parser):
pass
p = MyParser()
p.parse()
"""
import argparse
from .base import base_parser, LogSetup
from ..lang.tools.yacc import transform
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[base_parser])
parser.add_argument(
'source', type=argparse.FileType('r'), help='the parser specification')
parser.add_argument(
'-o', '--output', type=argparse.FileType('w'), required=True)
def yacc(args=None):
args = parser.parse_args(args)
with LogSetup(args):
transform(args.source, args.output)
args.output.close()
if __name__ == '__main__':
yacc()
| 20.45 | 75 | 0.691932 | 158 | 1,227 | 5.21519 | 0.468354 | 0.036408 | 0.026699 | 0.046117 | 0.06068 | 0.06068 | 0 | 0 | 0 | 0 | 0 | 0 | 0.197229 | 1,227 | 59 | 76 | 20.79661 | 0.836548 | 0.484923 | 0 | 0.111111 | 1 | 0 | 0.080257 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.166667 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
46fc1f3a2a61d15198e5a0cff38cbad84fddfcdc | 402 | py | Python | authors/apps/profiles/migrations/0022_auto_20190123_1211.py | andela/ah-django-unchained | a4e5f6cd11fdc0b9422020693ac1200b849cf0f3 | [
"BSD-3-Clause"
] | null | null | null | authors/apps/profiles/migrations/0022_auto_20190123_1211.py | andela/ah-django-unchained | a4e5f6cd11fdc0b9422020693ac1200b849cf0f3 | [
"BSD-3-Clause"
] | 26 | 2019-01-07T14:22:05.000Z | 2019-02-28T17:11:48.000Z | authors/apps/profiles/migrations/0022_auto_20190123_1211.py | andela/ah-django-unchained | a4e5f6cd11fdc0b9422020693ac1200b849cf0f3 | [
"BSD-3-Clause"
] | 3 | 2019-09-19T22:16:09.000Z | 2019-10-16T21:16:16.000Z | # Generated by Django 2.1.4 on 2019-01-23 12:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profiles', '0021_auto_20190122_1723'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='bio',
field=models.TextField(blank=True, max_length=200),
),
]
| 21.157895 | 63 | 0.606965 | 44 | 402 | 5.431818 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117241 | 0.278607 | 402 | 18 | 64 | 22.333333 | 0.706897 | 0.11194 | 0 | 0 | 1 | 0 | 0.126761 | 0.064789 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2005c90121b8ada17b872ccdb477c07d716b48b8 | 444 | py | Python | megnet/tests/test_losses.py | abdalazizrashid/megnet | 8ad0fca246465bd57d66392f790c5310c610dfff | [
"BSD-3-Clause"
] | null | null | null | megnet/tests/test_losses.py | abdalazizrashid/megnet | 8ad0fca246465bd57d66392f790c5310c610dfff | [
"BSD-3-Clause"
] | null | null | null | megnet/tests/test_losses.py | abdalazizrashid/megnet | 8ad0fca246465bd57d66392f790c5310c610dfff | [
"BSD-3-Clause"
] | null | null | null | import unittest
import numpy as np
import tensorflow as tf
from megnet.losses import mean_squared_error_with_scale
class TestLosses(unittest.TestCase):
def test_mse(self):
x = np.array([0.1, 0.2, 0.3])
y = np.array([0.05, 0.15, 0.25])
loss = mean_squared_error_with_scale(x, y, scale=100)
self.assertAlmostEqual(loss.numpy(), np.mean((x - y) ** 2) * 100)
if __name__ == "__main__":
unittest.main()
| 23.368421 | 73 | 0.657658 | 70 | 444 | 3.928571 | 0.542857 | 0.08 | 0.116364 | 0.145455 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0625 | 0.207207 | 444 | 18 | 74 | 24.666667 | 0.71875 | 0 | 0 | 0 | 0 | 0 | 0.018018 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 1 | 0.083333 | false | 0 | 0.333333 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
200d13d0ad19224b088e6f4e7f46fd1116b6eb06 | 525 | py | Python | src/rust/iced-x86-py/src/iced_x86/CC_g.py | clayne/iced | dcd3db725b1137fec4d2bda9b17587cead49bf4d | [
"MIT"
] | 1,018 | 2018-09-07T20:12:43.000Z | 2021-01-17T18:41:10.000Z | src/rust/iced-x86-py/src/iced_x86/CC_g.py | clayne/iced | dcd3db725b1137fec4d2bda9b17587cead49bf4d | [
"MIT"
] | 127 | 2018-09-07T19:33:48.000Z | 2021-01-17T22:20:33.000Z | src/rust/iced-x86-py/src/iced_x86/CC_g.py | clayne/iced | dcd3db725b1137fec4d2bda9b17587cead49bf4d | [
"MIT"
] | 146 | 2018-09-09T12:38:30.000Z | 2021-01-18T23:37:11.000Z | # SPDX-License-Identifier: MIT
# Copyright (C) 2018-present iced project and contributors
# ⚠️This file was generated by GENERATOR!🦹♂️
# pylint: disable=invalid-name
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
"""
Mnemonic condition code selector (eg. ``JG`` / ``JNLE``)
"""
import typing
if typing.TYPE_CHECKING:
from ._iced_x86_py import CC_g
else:
CC_g = int
G: CC_g = 0 # type: ignore
"""
``JG``, ``CMOVG``, ``SETG``
"""
NLE: CC_g = 1 # type: ignore
"""
``JNLE``, ``CMOVNLE``, ``SETNLE``
"""
| 18.75 | 58 | 0.655238 | 76 | 525 | 4.5 | 0.736842 | 0.035088 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017978 | 0.152381 | 525 | 27 | 59 | 19.444444 | 0.737079 | 0.577143 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
20108249c7501a803109aa38a4367c232811fb45 | 6,491 | py | Python | uis/horsy_package.py | horsy-ml/horsy | 1161df2e83c201784ea674bd1d53e76831b15a0f | [
"MIT"
] | null | null | null | uis/horsy_package.py | horsy-ml/horsy | 1161df2e83c201784ea674bd1d53e76831b15a0f | [
"MIT"
] | null | null | null | uis/horsy_package.py | horsy-ml/horsy | 1161df2e83c201784ea674bd1d53e76831b15a0f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'D:\RAZNOE\prgrming\horsy\Source\client\uis\horsy_package.ui'
#
# Created by: PyQt5 UI code generator 5.15.6
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(331, 433)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setStyleSheet("QWidget{\n"
" background-color: rgb(30, 30, 30);\n"
"}\n"
"")
self.centralwidget.setObjectName("centralwidget")
self.packagename_box = QtWidgets.QLineEdit(self.centralwidget)
self.packagename_box.setGeometry(QtCore.QRect(20, 20, 151, 31))
self.packagename_box.setStyleSheet("background-color: rgb(74, 76, 83);\n"
"border-radius: 5px; \n"
"color: rgb(242, 242, 242);")
self.packagename_box.setText("")
self.packagename_box.setReadOnly(True)
self.packagename_box.setObjectName("packagename_box")
self.main_exe_box = QtWidgets.QLineEdit(self.centralwidget)
self.main_exe_box.setGeometry(QtCore.QRect(20, 305, 291, 31))
self.main_exe_box.setStyleSheet("background-color: rgb(74, 76, 83);\n"
"border-radius: 5px; \n"
"color: rgb(242, 242, 242);")
self.main_exe_box.setObjectName("main_exe_box")
self.source_url_box = QtWidgets.QLineEdit(self.centralwidget)
self.source_url_box.setGeometry(QtCore.QRect(20, 200, 291, 31))
self.source_url_box.setStyleSheet("background-color: rgb(74, 76, 83);\n"
"border-radius: 5px; \n"
"color: rgb(242, 242, 242);")
self.source_url_box.setObjectName("source_url_box")
self.url_of_exe_box = QtWidgets.QLineEdit(self.centralwidget)
self.url_of_exe_box.setGeometry(QtCore.QRect(20, 165, 291, 31))
self.url_of_exe_box.setStyleSheet("background-color: rgb(74, 76, 83);\n"
"border-radius: 5px; \n"
"color: rgb(242, 242, 242);")
self.url_of_exe_box.setObjectName("url_of_exe_box")
self.dependency_url_box = QtWidgets.QLineEdit(self.centralwidget)
self.dependency_url_box.setGeometry(QtCore.QRect(20, 235, 291, 31))
self.dependency_url_box.setStyleSheet("background-color: rgb(74, 76, 83);\n"
"border-radius: 5px; \n"
"color: rgb(242, 242, 242);")
self.dependency_url_box.setObjectName("dependency_url_box")
self.dependency_run_box = QtWidgets.QLineEdit(self.centralwidget)
self.dependency_run_box.setGeometry(QtCore.QRect(20, 270, 291, 31))
self.dependency_run_box.setStyleSheet("background-color: rgb(74, 76, 83);\n"
"border-radius: 5px; \n"
"color: rgb(242, 242, 242);")
self.dependency_run_box.setObjectName("dependency_run_box")
self.package_desc_box = QtWidgets.QTextBrowser(self.centralwidget)
self.package_desc_box.setGeometry(QtCore.QRect(20, 60, 256, 101))
self.package_desc_box.setStyleSheet("background-color: rgb(74, 76, 83);\n"
"border-radius: 5px; \n"
"color: rgb(242, 242, 242);")
self.package_desc_box.setAcceptRichText(False)
self.package_desc_box.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextEditable|QtCore.Qt.TextEditorInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.package_desc_box.setObjectName("package_desc_box")
self.update_button = QtWidgets.QPushButton(self.centralwidget)
self.update_button.setEnabled(True)
self.update_button.setGeometry(QtCore.QRect(20, 360, 291, 50))
self.update_button.setMinimumSize(QtCore.QSize(0, 50))
self.update_button.setStyleSheet("QPushButton {\n"
" color: rgb(204, 204, 204);\n"
" border-width: 1px;\n"
" border-radius:6px;\n"
" border-style: solid;\n"
" background-color: rgb(28, 30, 33);\n"
" border-color: rgb(66, 143, 225);\n"
"}\n"
"QPushButton:hover{\n"
" border-width: 2px;\n"
"}\n"
"QPushButton:pressed{\n"
" background-color: rgb(50, 60, 63);\n"
"}\n"
"QPushButton:disabled{\n"
" border-width: 0px;\n"
" background-color: rgb(92, 99, 109);\n"
"}")
self.update_button.setObjectName("update_button")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "horsy - editing package"))
self.packagename_box.setPlaceholderText(_translate("MainWindow", "Editing package"))
self.main_exe_box.setPlaceholderText(_translate("MainWindow", "Main executable command (file.exe, python main.py, etc)"))
self.source_url_box.setPlaceholderText(_translate("MainWindow", "Url of source (project on GitHub, source archive)"))
self.url_of_exe_box.setPlaceholderText(_translate("MainWindow", "Url of executable (ends on .exe or .zip)"))
self.dependency_url_box.setPlaceholderText(_translate("MainWindow", "Dependency URL (installer in .exe)"))
self.dependency_run_box.setPlaceholderText(_translate("MainWindow", "Dependency run (run this during installation)"))
self.package_desc_box.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.package_desc_box.setPlaceholderText(_translate("MainWindow", "Package description. It should be a short text under 256 characters"))
self.update_button.setText(_translate("MainWindow", "Update"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 52.346774 | 278 | 0.706671 | 836 | 6,491 | 5.339713 | 0.287081 | 0.035842 | 0.044355 | 0.043011 | 0.301971 | 0.240143 | 0.183692 | 0.12948 | 0.12948 | 0.12948 | 0 | 0.050054 | 0.147435 | 6,491 | 123 | 279 | 52.772358 | 0.756596 | 0.049453 | 0 | 0.168224 | 1 | 0 | 0.299578 | 0.007303 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018692 | false | 0 | 0.018692 | 0 | 0.046729 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
201150abd59f44043c0cf22c47036ec2f4759cde | 871 | py | Python | day11/test_lib.py | heijp06/AoC-2021 | f6afead5e1fe9a839d608a5792f84e54803742c1 | [
"MIT"
] | null | null | null | day11/test_lib.py | heijp06/AoC-2021 | f6afead5e1fe9a839d608a5792f84e54803742c1 | [
"MIT"
] | null | null | null | day11/test_lib.py | heijp06/AoC-2021 | f6afead5e1fe9a839d608a5792f84e54803742c1 | [
"MIT"
] | null | null | null | import pytest
from lib import flashing_neighbours, part1, part2
def test_part1():
assert part1(data) == 1656
def test_part2():
assert part2(data) == 195
@pytest.mark.parametrize("steps", range(1, 3))
def test_part1_small(steps):
assert part1(small, steps=1) == 9
@pytest.mark.parametrize(("grid", "expected"), ((["98"], 2), (["988", 3])))
def test_part1_ripple(grid, expected):
assert part1(grid, 1) == expected
def test_octopus_only_flashes_once():
assert part1(["96", "08"], 1) == 2
def test_flashing_neighbours():
assert flashing_neighbours([[10, 9]], 0, 1) == 1
small = [
"11111",
"19991",
"19191",
"19991",
"11111"
]
data = [
"5483143223",
"2745854711",
"5264556173",
"6141336146",
"6357385478",
"4167524645",
"2176841721",
"6882881134",
"4846848554",
"5283751526"
]
| 17.078431 | 75 | 0.614237 | 102 | 871 | 5.107843 | 0.480392 | 0.080614 | 0.069098 | 0.049904 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.242733 | 0.210103 | 871 | 50 | 76 | 17.42 | 0.514535 | 0 | 0 | 0.057143 | 0 | 0 | 0.173364 | 0 | 0 | 0 | 0 | 0 | 0.171429 | 1 | 0.171429 | false | 0 | 0.057143 | 0 | 0.228571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2013ed9ff566c0c9215f3514a117ffdd2d27c869 | 529 | py | Python | src/Python/01_Interakcja_z_konsola/Zad7.py | djeada/Nauka-programowania | b1eb6840c15b830acf552f0a0fc5cc692759152f | [
"MIT"
] | 3 | 2020-09-19T21:38:30.000Z | 2022-03-30T11:02:26.000Z | src/Python/01_Interakcja_z_konsola/Zad7.py | djeada/Nauka-programowania | b1eb6840c15b830acf552f0a0fc5cc692759152f | [
"MIT"
] | null | null | null | src/Python/01_Interakcja_z_konsola/Zad7.py | djeada/Nauka-programowania | b1eb6840c15b830acf552f0a0fc5cc692759152f | [
"MIT"
] | 1 | 2022-02-04T09:13:20.000Z | 2022-02-04T09:13:20.000Z | if __name__ == "__main__":
"""
Pobierz podstawe i wysokosc trojkata i wypisz pole.
"""
print("podaj podstawe i wysokosc trojkata:")
a = int(input())
h = int(input())
print(
"pole trojkata o podstawie ", a, " i wysokosci ", h, " jest rowne ", a * h / 2
)
"""
Pobierz dlugosci bokow prostokata i wypisz pole.
"""
print("podaj dlogosci bokow prostokata:")
a = int(input())
b = int(input())
print("pole prostokata o bokach ", a, " i ", b, " jest rowne ", a * b)
| 20.346154 | 86 | 0.561437 | 67 | 529 | 4.313433 | 0.41791 | 0.110727 | 0.117647 | 0.17301 | 0.145329 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002667 | 0.291115 | 529 | 25 | 87 | 21.16 | 0.768 | 0 | 0 | 0.181818 | 0 | 0 | 0.411911 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.363636 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
201969c88f34f0fa220bab32fdd8cbaf6e2e16f3 | 2,689 | py | Python | ca_node/scripts/ranking_controller.py | hidmic/create_autonomy | 3aec14c9a6aa2d9a7b817d119bfb82b089e60219 | [
"BSD-3-Clause"
] | null | null | null | ca_node/scripts/ranking_controller.py | hidmic/create_autonomy | 3aec14c9a6aa2d9a7b817d119bfb82b089e60219 | [
"BSD-3-Clause"
] | 4 | 2019-10-24T17:19:50.000Z | 2020-02-20T01:06:27.000Z | ca_node/scripts/ranking_controller.py | hidmic/create_autonomy | 3aec14c9a6aa2d9a7b817d119bfb82b089e60219 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
import rospy
import threading
from ca_msgs.msg import Bumper
from geometry_msgs.msg import Twist, Vector3
class StateMachine(object):
def __init__(self):
self.pub = rospy.Publisher("/cmd_vel", Twist, queue_size=10)
self.goal_queue = []
def rotate(self, ang_vel):
self.move(0., ang_vel)
def rotate_left(self, ang_vel):
self.rotate(ang_vel)
def rotate_right(self, ang_vel):
self.rotate(-ang_vel)
def set_goal(self, data):
if data.is_left_pressed and data.is_right_pressed:
self.goal_queue.append({'goal': self.move_backward, 'velocity': 0.1, 'duration': 3.})
if data.is_left_pressed:
self.goal_queue.append({'goal': self.move_backward, 'velocity': 0.1, 'duration': 1.5})
self.goal_queue.append({'goal': self.rotate_right, 'velocity': 0.3, 'duration': 2.})
elif data.is_right_pressed:
self.goal_queue.append({'goal': self.move_backward, 'velocity': 0.1, 'duration': 1.5})
self.goal_queue.append({'goal': self.rotate_left, 'velocity': 0.3, 'duration': 2.})
else:
self.goal_queue.append({'goal': self.move_straight, 'velocity': 0.2, 'duration': 0.})
def stop(self):
self.move(0., 0.)
def close(self):
self.stop()
self.goal_queue = []
def move(self, lin_vel, ang_vel):
msg = Twist()
msg.linear.x = lin_vel
msg.angular.z = ang_vel
self.pub.publish(msg)
def move_straight(self, lin_vel):
self.move(lin_vel, 0.)
def move_backward(self, lin_vel):
self.move_straight(-lin_vel)
def run(self):
if len(self.goal_queue) > 0:
# Execute next goal
goal = self.goal_queue.pop()
end_time = rospy.Time.now().secs + goal.get('duration')
while end_time > rospy.Time.now().secs:
goal.get('goal')(goal.get('velocity'))
else:
# Move straight
self.move_straight(0.2)
class RankingController():
def __init__(self):
rospy.init_node("ranking_controller", log_level=rospy.INFO)
self.sub = rospy.Subscriber("bumper", Bumper, self.callback)
self.state_machine = StateMachine()
self.rate = rospy.Rate(10) # Hz
rospy.on_shutdown(self.stop)
threading.Thread(name="ranking_controller", target=self.run).start()
rospy.spin()
def callback(self, data):
rospy.logdebug("{} {}".format(data.is_left_pressed, data.is_right_pressed))
self.state_machine.set_goal(data)
def stop(self):
rospy.loginfo("Thread stopped.")
self.state_machine.close()
def run(self):
rospy.loginfo("Thread started.")
while not rospy.is_shutdown():
self.state_machine.run()
self.rate.sleep()
if __name__ == "__main__":
rc = RankingController()
| 29.549451 | 92 | 0.664559 | 387 | 2,689 | 4.405685 | 0.260982 | 0.046921 | 0.076246 | 0.066862 | 0.328446 | 0.253959 | 0.253959 | 0.235777 | 0.166569 | 0.166569 | 0 | 0.014579 | 0.183711 | 2,689 | 90 | 93 | 29.877778 | 0.762187 | 0.020454 | 0 | 0.173913 | 0 | 0 | 0.088593 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0 | 0.057971 | 0 | 0.304348 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
201f3558e4dbdd368aeeee6e9f098d5308313493 | 235 | py | Python | music/filename.py | JohanLi/uncharted-waters-2-research | fe6d40a28baed38e894a301da85a80c89e7153fa | [
"MIT"
] | null | null | null | music/filename.py | JohanLi/uncharted-waters-2-research | fe6d40a28baed38e894a301da85a80c89e7153fa | [
"MIT"
] | null | null | null | music/filename.py | JohanLi/uncharted-waters-2-research | fe6d40a28baed38e894a301da85a80c89e7153fa | [
"MIT"
] | null | null | null | import os
path = './converted/'
for filename in os.listdir(path):
newFilename = filename.lower().replace(' ', '-').replace('’', '')
os.rename(path + filename, path + newFilename.lower())
then = os.listdir(path)
print(then)
| 19.583333 | 69 | 0.642553 | 28 | 235 | 5.392857 | 0.5 | 0.119205 | 0.172185 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.157447 | 235 | 11 | 70 | 21.363636 | 0.762626 | 0 | 0 | 0 | 0 | 0 | 0.06383 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0.142857 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2020fcd0b6330a2f620511b1f0629988385a2358 | 2,792 | py | Python | django_rest_resetpassword/tests.py | fasfoxcom/django-rest-resetpassword | b459c44f4ff426b190cb7303b32a23bf7b06b823 | [
"MIT"
] | 4 | 2020-01-14T14:25:57.000Z | 2021-03-21T10:51:48.000Z | django_rest_resetpassword/tests.py | fasfoxcom/django-rest-resetpassword | b459c44f4ff426b190cb7303b32a23bf7b06b823 | [
"MIT"
] | 3 | 2020-09-16T14:09:58.000Z | 2021-03-07T10:53:29.000Z | django_rest_resetpassword/tests.py | fasfoxcom/django-rest-resetpassword | b459c44f4ff426b190cb7303b32a23bf7b06b823 | [
"MIT"
] | 3 | 2020-04-07T10:11:39.000Z | 2022-03-07T04:25:33.000Z | from django.conf import settings
from django.contrib.auth.models import User
from django.urls import reverse
from rest_framework.test import APITestCase
class BaseAPITest(APITestCase):
def setUp(self, password=None) -> None:
self.user = User(username="John Smith", email="john@example.com")
self.user.set_password("123")
self.user.save()
self.client.force_authenticate(user=self.user)
def user_factory(self, username="peter", email="peter@example.com", password="123"):
user = User(username=username, email=email, password=password)
user.save()
return user
class ResetPasswordAPITest(BaseAPITest):
def test_request_password_with_no_settings(self):
# make sure that if no setting, the default password request reset field is the email.
user = self.user_factory()
data = {"email": user.username}
response = self.client.post(reverse("reset-password-request"), data=data)
self.assertEqual(response.status_code, 400)
data = {"email": user.email}
response = self.client.post(reverse("reset-password-request"), data=data)
self.assertEqual(response.status_code, 200)
msg = "A password reset token has been sent to the provided email address"
self.assertEqual(response.data["message"], msg)
def test_request_password_with_django_rest_lookup_field_setting(self):
# Make sure we can still use DJANGO_REST_LOOKUP_FIELD setting for backward compatibility.
settings.DJANGO_REST_LOOKUP_FIELD = "username"
user = self.user_factory()
data = {"email": user.username}
response = self.client.post(reverse("reset-password-request"), data=data)
self.assertEqual(response.status_code, 200)
msg = "A password reset token has been sent to the provided email address"
self.assertEqual(response.data["message"], msg)
def test_request_password_with_django_rest_lookup_fields_setting(self):
# Make sure new users can use DJANGO_REST_LOOKUP_FIELDS setting.
settings.DJANGO_REST_LOOKUP_FIELDS = ["email", "username"]
user = self.user_factory()
data = {"email": user.username}
response = self.client.post(reverse("reset-password-request"), data=data)
self.assertEqual(response.status_code, 200)
msg = "A password reset token has been sent to the provided email address"
self.assertEqual(response.data["message"], msg)
data = {"email": user.email}
response = self.client.post(reverse("reset-password-request"), data=data)
self.assertEqual(response.status_code, 200)
msg = "A password reset token has been sent to the provided email address"
self.assertEqual(response.data["message"], msg)
| 47.322034 | 98 | 0.69735 | 355 | 2,792 | 5.357746 | 0.225352 | 0.070978 | 0.108833 | 0.057834 | 0.634069 | 0.577287 | 0.577287 | 0.577287 | 0.577287 | 0.577287 | 0 | 0.009388 | 0.198782 | 2,792 | 58 | 99 | 48.137931 | 0.840858 | 0.085244 | 0 | 0.543478 | 0 | 0 | 0.196863 | 0.043137 | 0 | 0 | 0 | 0 | 0.195652 | 1 | 0.108696 | false | 0.369565 | 0.086957 | 0 | 0.26087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
2020fead74782498dcbbc501d6a342b6a06a76e1 | 948 | py | Python | day2/netmiko_ex1rudy.py | rudy5rudy/pynet-ons-feb19 | 1fa0b30af35aaae73ced2f77c04ab1cb5f2ac5fc | [
"Apache-2.0"
] | null | null | null | day2/netmiko_ex1rudy.py | rudy5rudy/pynet-ons-feb19 | 1fa0b30af35aaae73ced2f77c04ab1cb5f2ac5fc | [
"Apache-2.0"
] | null | null | null | day2/netmiko_ex1rudy.py | rudy5rudy/pynet-ons-feb19 | 1fa0b30af35aaae73ced2f77c04ab1cb5f2ac5fc | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""Exercises using Netmiko"""
from __future__ import print_function
from getpass import getpass
from netmiko import ConnectHandler
#def save_file(filename, show_run):
# """Save the show run to a file"""
# with open(filename, "w") as f:
# f.write(show_run)
def main():
"""Exercises using Netmiko"""
password = getpass()
cisco3 = {
"device_type": "cisco_ios",
"host": "cisco3.lasthop.io",
"username": "pyclass",
"password": password,
}
netconnect = ConnectHandler(**cisco3)
print(netconnect.find_prompt())
output = netconnect.send_command("show ver")
print(output)
output = netconnect.send_command("show run")
print(output)
save_file("cisc003.txt",output)
#write the file
def save_file(filename, show_run):
"""Save the show run to a file"""
with open(filename, "w") as f:
f.write(show_run)
main()
#save_file()
| 21.066667 | 48 | 0.64135 | 120 | 948 | 4.916667 | 0.416667 | 0.083051 | 0.071186 | 0.064407 | 0.376271 | 0.271186 | 0.271186 | 0.271186 | 0.271186 | 0.271186 | 0 | 0.00813 | 0.221519 | 948 | 44 | 49 | 21.545455 | 0.791328 | 0.264768 | 0 | 0.090909 | 0 | 0 | 0.136296 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0.136364 | 0.136364 | 0 | 0.227273 | 0.181818 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
20214e44beff67803045cc13f9f17cbaa929c06c | 326 | py | Python | tests/test_pytorch.py | szymonmaszke/torchtraining | 1ddf169325b7239d6d6686b20072a406b69a0180 | [
"MIT"
] | 3 | 2020-08-26T06:11:58.000Z | 2020-08-27T08:11:15.000Z | tests/test_pytorch.py | klaudiapalasz/torchtraining | 7ac54009eea2fd84aa635b6f3cbfe306f317d087 | [
"MIT"
] | 1 | 2020-08-25T19:19:43.000Z | 2020-08-25T19:19:43.000Z | tests/test_pytorch.py | klaudiapalasz/torchtraining | 7ac54009eea2fd84aa635b6f3cbfe306f317d087 | [
"MIT"
] | 1 | 2021-04-15T18:55:57.000Z | 2021-04-15T18:55:57.000Z | """Core pytorch operations regarding optimization (optimize, schedule) are placed in general tests."""
import pytest
import torch
import torchtraining.pytorch as P
def test_backward():
backward = P.Backward()
x = torch.randn(10, requires_grad=True)
y = x ** 2
backward(y.sum())
assert x.grad is not None
| 25.076923 | 102 | 0.708589 | 46 | 326 | 4.978261 | 0.73913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011364 | 0.190184 | 326 | 12 | 103 | 27.166667 | 0.856061 | 0.294479 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0.111111 | false | 0 | 0.333333 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
2037cfe78c4cb57f5b145bb1327426566cfe164f | 8,190 | py | Python | slack_sdk/scim/v1/user.py | priya1puresoftware/python-slack-sdk | 3503182feaaf4d41b57fd8bf10038ebc99f1f3c7 | [
"MIT"
] | 2,486 | 2016-11-03T14:31:43.000Z | 2020-10-26T23:07:44.000Z | slack_sdk/scim/v1/user.py | priya1puresoftware/python-slack-sdk | 3503182feaaf4d41b57fd8bf10038ebc99f1f3c7 | [
"MIT"
] | 721 | 2016-11-03T21:26:56.000Z | 2020-10-26T12:41:29.000Z | slack_sdk/scim/v1/user.py | priya1puresoftware/python-slack-sdk | 3503182feaaf4d41b57fd8bf10038ebc99f1f3c7 | [
"MIT"
] | 627 | 2016-11-02T19:04:19.000Z | 2020-10-25T19:21:13.000Z | from typing import Optional, Any, List, Dict, Union
from .default_arg import DefaultArg, NotGiven
from .internal_utils import _to_dict_without_not_given, _is_iterable
from .types import TypeAndValue
class UserAddress:
country: Union[Optional[str], DefaultArg]
locality: Union[Optional[str], DefaultArg]
postal_code: Union[Optional[str], DefaultArg]
primary: Union[Optional[bool], DefaultArg]
region: Union[Optional[str], DefaultArg]
street_address: Union[Optional[str], DefaultArg]
unknown_fields: Dict[str, Any]
def __init__(
self,
*,
country: Union[Optional[str], DefaultArg] = NotGiven,
locality: Union[Optional[str], DefaultArg] = NotGiven,
postal_code: Union[Optional[str], DefaultArg] = NotGiven,
primary: Union[Optional[bool], DefaultArg] = NotGiven,
region: Union[Optional[str], DefaultArg] = NotGiven,
street_address: Union[Optional[str], DefaultArg] = NotGiven,
**kwargs,
) -> None:
self.country = country
self.locality = locality
self.postal_code = postal_code
self.primary = primary
self.region = region
self.street_address = street_address
self.unknown_fields = kwargs
def to_dict(self) -> dict:
return _to_dict_without_not_given(self)
class UserEmail(TypeAndValue):
pass
class UserPhoneNumber(TypeAndValue):
pass
class UserRole(TypeAndValue):
pass
class UserGroup:
display: Union[Optional[str], DefaultArg]
value: Union[Optional[str], DefaultArg]
unknown_fields: Dict[str, Any]
def __init__(
self,
*,
display: Union[Optional[str], DefaultArg] = NotGiven,
value: Union[Optional[str], DefaultArg] = NotGiven,
**kwargs,
) -> None:
self.display = display
self.value = value
self.unknown_fields = kwargs
def to_dict(self) -> dict:
return _to_dict_without_not_given(self)
class UserMeta:
created: Union[Optional[str], DefaultArg]
location: Union[Optional[str], DefaultArg]
unknown_fields: Dict[str, Any]
def __init__(
self,
created: Union[Optional[str], DefaultArg] = NotGiven,
location: Union[Optional[str], DefaultArg] = NotGiven,
**kwargs,
) -> None:
self.created = created
self.location = location
self.unknown_fields = kwargs
def to_dict(self) -> dict:
return _to_dict_without_not_given(self)
class UserName:
family_name: Union[Optional[str], DefaultArg]
given_name: Union[Optional[str], DefaultArg]
unknown_fields: Dict[str, Any]
def __init__(
self,
family_name: Union[Optional[str], DefaultArg] = NotGiven,
given_name: Union[Optional[str], DefaultArg] = NotGiven,
**kwargs,
) -> None:
self.family_name = family_name
self.given_name = given_name
self.unknown_fields = kwargs
def to_dict(self) -> dict:
return _to_dict_without_not_given(self)
class UserPhoto:
type: Union[Optional[str], DefaultArg]
value: Union[Optional[str], DefaultArg]
unknown_fields: Dict[str, Any]
def __init__(
self,
type: Union[Optional[str], DefaultArg] = NotGiven,
value: Union[Optional[str], DefaultArg] = NotGiven,
**kwargs,
) -> None:
self.type = type
self.value = value
self.unknown_fields = kwargs
def to_dict(self) -> dict:
return _to_dict_without_not_given(self)
class User:
active: Union[Optional[bool], DefaultArg]
addresses: Union[Optional[List[UserAddress]], DefaultArg]
display_name: Union[Optional[str], DefaultArg]
emails: Union[Optional[List[TypeAndValue]], DefaultArg]
external_id: Union[Optional[str], DefaultArg]
groups: Union[Optional[List[UserGroup]], DefaultArg]
id: Union[Optional[str], DefaultArg]
meta: Union[Optional[UserMeta], DefaultArg]
name: Union[Optional[UserName], DefaultArg]
nick_name: Union[Optional[str], DefaultArg]
phone_numbers: Union[Optional[List[TypeAndValue]], DefaultArg]
photos: Union[Optional[List[UserPhoto]], DefaultArg]
profile_url: Union[Optional[str], DefaultArg]
roles: Union[Optional[List[TypeAndValue]], DefaultArg]
schemas: Union[Optional[List[str]], DefaultArg]
timezone: Union[Optional[str], DefaultArg]
title: Union[Optional[str], DefaultArg]
user_name: Union[Optional[str], DefaultArg]
unknown_fields: Dict[str, Any]
def __init__(
self,
*,
active: Union[Optional[bool], DefaultArg] = NotGiven,
addresses: Union[
Optional[List[Union[UserAddress, Dict[str, Any]]]], DefaultArg
] = NotGiven,
display_name: Union[Optional[str], DefaultArg] = NotGiven,
emails: Union[
Optional[List[Union[TypeAndValue, Dict[str, Any]]]], DefaultArg
] = NotGiven,
external_id: Union[Optional[str], DefaultArg] = NotGiven,
groups: Union[
Optional[List[Union[UserGroup, Dict[str, Any]]]], DefaultArg
] = NotGiven,
id: Union[Optional[str], DefaultArg] = NotGiven,
meta: Union[Optional[Union[UserMeta, Dict[str, Any]]], DefaultArg] = NotGiven,
name: Union[Optional[Union[UserName, Dict[str, Any]]], DefaultArg] = NotGiven,
nick_name: Union[Optional[str], DefaultArg] = NotGiven,
phone_numbers: Union[
Optional[List[Union[TypeAndValue, Dict[str, Any]]]], DefaultArg
] = NotGiven,
photos: Union[
Optional[List[Union[UserPhoto, Dict[str, Any]]]], DefaultArg
] = NotGiven,
profile_url: Union[Optional[str], DefaultArg] = NotGiven,
roles: Union[
Optional[List[Union[TypeAndValue, Dict[str, Any]]]], DefaultArg
] = NotGiven,
schemas: Union[Optional[List[str]], DefaultArg] = NotGiven,
timezone: Union[Optional[str], DefaultArg] = NotGiven,
title: Union[Optional[str], DefaultArg] = NotGiven,
user_name: Union[Optional[str], DefaultArg] = NotGiven,
**kwargs,
) -> None:
self.active = active
self.addresses = ( # type: ignore
[a if isinstance(a, UserAddress) else UserAddress(**a) for a in addresses]
if _is_iterable(addresses)
else addresses
)
self.display_name = display_name
self.emails = ( # type: ignore
[a if isinstance(a, TypeAndValue) else TypeAndValue(**a) for a in emails]
if _is_iterable(emails)
else emails
)
self.external_id = external_id
self.groups = ( # type: ignore
[a if isinstance(a, UserGroup) else UserGroup(**a) for a in groups]
if _is_iterable(groups)
else groups
)
self.id = id
self.meta = ( # type: ignore
UserMeta(**meta) if meta is not None and isinstance(meta, dict) else meta
)
self.name = ( # type: ignore
UserName(**name) if name is not None and isinstance(name, dict) else name
)
self.nick_name = nick_name
self.phone_numbers = ( # type: ignore
[
a if isinstance(a, TypeAndValue) else TypeAndValue(**a)
for a in phone_numbers
]
if _is_iterable(phone_numbers)
else phone_numbers
)
self.photos = ( # type: ignore
[a if isinstance(a, UserPhoto) else UserPhoto(**a) for a in photos]
if _is_iterable(photos)
else photos
)
self.profile_url = profile_url
self.roles = ( # type: ignore
[a if isinstance(a, TypeAndValue) else TypeAndValue(**a) for a in roles]
if _is_iterable(roles)
else roles
)
self.schemas = schemas
self.timezone = timezone
self.title = title
self.user_name = user_name
self.unknown_fields = kwargs
def to_dict(self):
return _to_dict_without_not_given(self)
def __repr__(self):
return f"<slack_sdk.scim.{self.__class__.__name__}: {self.to_dict()}>"
| 33.842975 | 86 | 0.627961 | 900 | 8,190 | 5.548889 | 0.097778 | 0.1666 | 0.134561 | 0.218662 | 0.672407 | 0.468162 | 0.330396 | 0.324189 | 0.304966 | 0.27533 | 0 | 0 | 0.264713 | 8,190 | 241 | 87 | 33.983402 | 0.829293 | 0.012576 | 0 | 0.326923 | 0 | 0 | 0.007428 | 0.005199 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0.014423 | 0.019231 | 0.033654 | 0.341346 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2049a81f1692b22d6927802aa4aae5e254614b90 | 664 | py | Python | minilabs/test-hypothesis-by-simulating-statistics/m7_l1_tests/q2.py | ebaccay/inferentialthinking | 9f839c76062169b9de498c1e044f668e7517ee94 | [
"MIT"
] | 1 | 2022-02-24T20:32:17.000Z | 2022-02-24T20:32:17.000Z | minilabs/test-hypothesis-by-simulating-statistics/m7_l1_tests/q2.py | ebaccay/inferentialthinking | 9f839c76062169b9de498c1e044f668e7517ee94 | [
"MIT"
] | null | null | null | minilabs/test-hypothesis-by-simulating-statistics/m7_l1_tests/q2.py | ebaccay/inferentialthinking | 9f839c76062169b9de498c1e044f668e7517ee94 | [
"MIT"
] | 3 | 2021-03-04T06:44:47.000Z | 2021-05-05T06:00:33.000Z | test = {
"name": "q2",
"points": 1,
"hidden": True,
"suites": [
{
"cases": [
{
"code": r"""
>>> sample_population(test_results).num_rows
3000
""",
"hidden": False,
"locked": False,
},
{
"code": r"""
>>> "Test Result" in sample_population(test_results).labels
True
""",
"hidden": False,
"locked": False,
},
{
"code": r"""
>>> round(apply_statistic(test_results, "Village Number", np.average), 4)
8.1307
""",
"hidden": False,
"locked": False,
},
],
"scored": False,
"setup": "",
"teardown": "",
"type": "doctest"
},
]
} | 17.025641 | 78 | 0.460843 | 61 | 664 | 4.901639 | 0.606557 | 0.050167 | 0.170569 | 0.220736 | 0.180602 | 0.180602 | 0 | 0 | 0 | 0 | 0 | 0.026726 | 0.323795 | 664 | 39 | 79 | 17.025641 | 0.639198 | 0 | 0 | 0.307692 | 0 | 0 | 0.52782 | 0.169925 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2052abf9b427d7f9d0208d82b5b74f383c928ce5 | 455 | py | Python | inventory/admin.py | Riphiphip/website | dc5bf64f24d5cf78661686af0281705f4d1d2576 | [
"MIT"
] | null | null | null | inventory/admin.py | Riphiphip/website | dc5bf64f24d5cf78661686af0281705f4d1d2576 | [
"MIT"
] | null | null | null | inventory/admin.py | Riphiphip/website | dc5bf64f24d5cf78661686af0281705f4d1d2576 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Item
@admin.register(Item)
class ItemAdmin(admin.ModelAdmin):
fieldsets = [
('Item', {
'fields': [
'name',
'stock',
'description',
'thumbnail'
]
}),
('Meta', {
'fields': [
'views',
]
}),
]
search_fields = [
'name',
]
| 18.958333 | 34 | 0.382418 | 29 | 455 | 5.965517 | 0.689655 | 0.115607 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.487912 | 455 | 23 | 35 | 19.782609 | 0.742489 | 0 | 0 | 0.272727 | 0 | 0 | 0.127473 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.227273 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2053cab2dbdb69606035ee74c6e1b50faa72a65b | 14,806 | py | Python | a4plot/python/rooplot/stacks/stacks.py | a4/a4 | e1de89260cb3894908f1d01dfacea125abc79da9 | [
"BSL-1.0"
] | 4 | 2015-04-07T20:25:16.000Z | 2019-04-27T15:04:02.000Z | a4plot/python/rooplot/stacks/stacks.py | a4/a4 | e1de89260cb3894908f1d01dfacea125abc79da9 | [
"BSL-1.0"
] | null | null | null | a4plot/python/rooplot/stacks/stacks.py | a4/a4 | e1de89260cb3894908f1d01dfacea125abc79da9 | [
"BSL-1.0"
] | 1 | 2021-06-02T17:22:35.000Z | 2021-06-02T17:22:35.000Z | from ROOT import gROOT, gStyle, Double
from ROOT import TLegend, TLatex, TCanvas, THStack, TLine, TBox
from ROOT import kYellow, kBlack, kWhite, kRed, kWhite, kOrange
import os
import random
from colors import set_color_1D, set_color_2D, set_data_style, set_MCTotal_style, set_signal_style_1D
tsize = 0.06
tyoffset = 1.1 * 0.06 / tsize
txoffset = 2.5 * 0.06 / tsize
lmargin = 0.14
def get_legend(data, sum_mc, list_mc, signals):
#legend = TLegend(0.2,0.65,0.4,0.94)
llen = 1 + len(data) + len(list_mc) + len(signals)
#mtop, mright, width, hinc = 0.01, 0.01, 0.38, 0.05
#mtop, mright, width, hinc = 0.07, 0.25, 0.15, 0.01
if tsize == 0.06:
mtop, mright, width, hinc = 0.13, 0.07, 0.25, 0.6666*tsize
else:
mtop, mright, width, hinc = 0.13, 0.1, 0.3, 0.6666*tsize
x1, y1, x2, y2 = 1.0 - mright - width, 1.0 - mtop, 1.0 - mright, 1.0 - mtop - hinc*llen
print x1, y1, x2, y2
legend = TLegend(x1, y1, x2, y2)
legend.SetNColumns(2)
legend.SetColumnSeparation(0.05)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(tsize)
legend.SetFillColor(0)
legend.SetFillStyle(0)
legend.SetLineColor(0)
for d in data:
legend.AddEntry(d, os.path.split(d.GetTitle())[1][:-5] if d.GetTitle()[-5:]=='.root' else os.path.split(d.GetTitle())[1], "p")
if sum_mc:
legend.AddEntry(sum_mc,"MC (stat)","flp") # <== NB: omit this entry for 2D histogram
for h in list_mc: # sorted by initial XS
legend.AddEntry(h, os.path.split(h.GetTitle())[1][:-5] if h.GetTitle()[-5:]=='.root' else os.path.split(h.GetTitle())[1],"f")
for s in signals:
legend.AddEntry(s, os.path.split(s.GetTitle())[1][:-5] if s.GetTitle()[-5:]=='.root' else os.path.split(s.GetTitle())[1],"l")
return legend
#NB: [ATLAS Preliminary label for when plots are approved only:
def get_lumi_label(lumi="168 pb^{-1}",centermass="8", atlas=True, draft=True):
x, y = lmargin + 0.03, (0.75 if atlas else 0.77)
n = TLatex()
n.SetNDC()
n.SetTextFont(32)
n.SetTextColor(kBlack)
n.SetTextSize(tsize*1.25)
n.DrawLatex(x, y,"#intL dt = %s, #sqrt{s} = %s TeV" % (lumi,centermass))
#x, y = 0.21, 0.65
x, y = 0.18, 0.85
if not atlas:
return n, None
l = TLatex()
l.SetNDC()
l.SetTextFont(42)
l.SetTextColor(kBlack)
if draft:
l.DrawLatex(x,y,"#bf{#it{ATLAS work in progress}}")
else:
l.DrawLatex(x,y,"#bf{#it{ATLAS preliminary}}")
return n, l
def create_mc_sum(mc_list, existing_mc_sum=None):
if not mc_list:
return None, None
if existing_mc_sum:
mc_sum = existing_mc_sum
else:
mc_sum = mc_list[0].Clone("mc_sum")
mc_sum.SetDirectory(0)
for h in mc_list[1:]:
for b in xrange(1, h.GetXaxis().GetNbins()+1):
# If there is negative weight in one channel, it should not
# be subtracted from other channels
if not (0 < h.GetBinContent(b)):
h.SetBinContent(b, 0.0)
# Sometimes negative Errors occur - they play havoc with the
# Display of error bands...
if not (0 < h.GetBinError(b)):
h.SetBinError(b, 0.0)
mc_sum.Add(h)
mc_sum.SetMarkerSize(0)
mc_sum.SetLineColor(kRed)
mc_sum.SetFillColor(kOrange)
mc_sum.SetFillStyle(3144)
mc_sum_line = mc_sum.Clone("mc_sum_line")
mc_sum_line.SetDirectory(0)
mc_sum_line.SetFillStyle(0)
mc_sum_line.SetFillColor(kWhite)
#mc_sum.SetLineStyle(0)
mc_sum.SetTitle("SM (stat)")
return mc_sum_line, mc_sum
def create_cuts(cuts_left, cuts_right, ymin, ymax, w):
save = []
hashwidth = 0.01*w
for vl in cuts_left + cuts_right:
l = TLine(vl, ymin, vl, ymax)
l.SetLineColor(kRed)
l.Draw()
save.append(l)
#gStyle.SetHatchesSpacing(0.01)
#gStyle.SetHatchesLineWidth(2)
for vl in cuts_left:
b = TBox(vl, ymin, vl - hashwidth, ymax)
b.SetFillStyle(3345)
b.SetFillColor(kRed)
b.SetLineStyle(0)
b.Draw()
save.append(b)
for vl in cuts_right:
b = TBox(vl, ymin, vl + hashwidth, ymax)
b.SetFillStyle(3354)
b.SetFillColor(kRed)
b.SetLineStyle(0)
b.Draw()
save.append(b)
return save
#-----------------
#Axis labels:
#y-axis labels: Entries / x Units (x = bin width, Units = e.g. GeV)
#x-axis labels: Quantity [Unit] (Quantity = e.g. M_{eff}, Units = e.g. GeV)
#----------------
#Other:
#no plot titles - histogram->SetTitle("");
#to change the maximum number of digits displayed - e.g. TGaxis::SetMaxDigits(3);
#Drawing 2D plots
#- Draw("box") for first MC (dijets)
#- then Draw("boxsame") for subsequent MC (W+jets)
#- Draw("psame") for data
def set_styles(data, mcs, signals):
for d in data:
set_data_style(d)
for signal in signals:
set_signal_style_1D(signal)
for i, mc in enumerate(mcs):
set_color_1D(mc,mc.GetTitle(), i)
from ROOT import gPad, kOrange, kRed
saver = []
def stack_1D(name, data, list_mc, signals, lumi="X",centermass="8", rebin=1, sum_mc=None, rebin_to=None, range=None, compare=False, sigma=False, log=False, prelim=False, cuts_left=(), cuts_right=()):
data = [h.Clone() for h in data]
list_mc = [h.Clone() for h in list_mc]
signals = [h.Clone() for h in signals]
sum_mc = sum_mc.Clone() if sum_mc else sum_mc
all_histos = list_mc + signals + data
saver.extend(all_histos)
saver.append(sum_mc)
h = all_histos[0]
xaxis = h.GetXaxis()
b1, b2 = h.GetXaxis().GetFirst(), h.GetXaxis().GetLast()
if range:
x1, x2 = range
x2 -= 0.000001
range = (x1, x2)
b1, b2 = xaxis.FindBin(x1), xaxis.FindBin(x2)
if rebin_to:
nbins = xaxis.GetNbins()
if range:
nbins = b2 - b1 + 1
rebin = int(round(nbins*1.0/rebin_to))
if rebin < 1:
rebin = 1
if rebin != 1:
for histo in all_histos:
histo.Rebin(rebin)
if True: # squash overflow bins
e = Double()
for histo in all_histos + [sum_mc] if sum_mc else []:
c = histo.IntegralAndError(0, b1, e)
histo.SetBinContent(b1, c)
histo.SetBinError(b1, e)
c = histo.IntegralAndError(b2, histo.GetNbinsX() + 1, e)
histo.SetBinContent(b2, c)
histo.SetBinError(b2, e)
x1, x2 = h.GetXaxis().GetBinLowEdge(b1), h.GetXaxis().GetBinLowEdge(b2)
# set up pads
cpad = gPad.func()
wh, ww = cpad.GetWh(), cpad.GetWw()
pad_fraction = 0
global tsize, tyoffset, txoffset
if compare or sigma:
tsize = 0.06 # was 0.06
tyoffset = 1.1 * 0.06 / tsize
txoffset = 2.5 * 0.06 / tsize
pad_fraction = 0.3
cpad.Divide(1, 2, 0.01, 0.01)
cpad.cd(1).SetPad(0, pad_fraction, 1, 1.0)
#cpad.cd(1).SetBottomMargin(0.15)
cpad.cd(1).SetTopMargin(0.08)
cpad.cd(1).SetBottomMargin(0.0)
cpad.cd(1).SetLeftMargin(lmargin)
cpad.cd(1).SetFillStyle(4000)
#cpad.cd(1).SetGridx()
#cpad.cd(1).SetGridy()
if log:
cpad.cd(1).SetLogy()
cpad.cd(2).SetPad(0, 0.0, 1, pad_fraction+0.1)
cpad.cd(2).SetGridx()
cpad.cd(2).SetGridy()
cpad.cd(2).SetFillStyle(4000)
cpad.cd(2).SetTopMargin(0.25)
cpad.cd(2).SetBottomMargin(0.4)
cpad.cd(2).SetLeftMargin(lmargin)
cpad.cd(1)
down_pad_fraction = pad_fraction+0.1
else:
tsize = 0.04 # was 0.06
tyoffset = 1.1 * 0.06 / tsize
txoffset = 2.5 * 0.06 / tsize
cpad.SetTopMargin(0.08)
cpad.SetBottomMargin(0.16)
cpad.SetLeftMargin(lmargin)
if log:
cpad.SetLogy()
# sort backgrounds by integral
list_mc.sort(key=lambda h : h.Integral())
list_mc.sort(key=lambda h : h.GetTitle() != "QCD")
hsave, mcstack = None, None
if list_mc:
mc_sum_line, mc_sum = create_mc_sum(list_mc, sum_mc)
all_histos.append(mc_sum)
all_histos.append(mc_sum_line)
# Create MC stack
mcstack = THStack()
for h in list_mc:
mcstack.Add(h)
#all_histos.append(mcstack)
# set range
if range:
h = all_histos[0]
xa = h.GetXaxis()
original_size = xa.GetBinLowEdge(xa.GetFirst()), xa.GetBinUpEdge(xa.GetLast())
for histo in all_histos:
xaxis = histo.GetXaxis()
xaxis.SetRangeUser(*range)
# get min/max
ymax = (max(h.GetMaximum() for h in all_histos) + 1) * (1.5 if not log else 100)
ymin = max(1.0 if log else 0.01, min(h.GetMinimum() for h in all_histos))
# unset range for mc
if range:
for histo in list_mc:
xaxis = histo.GetXaxis()
xaxis.SetRangeUser(*original_size)
# Draw everything
axis = None
if list_mc:
axis = mcstack
mcstack.Draw("Hist")
if range:
mcstack.GetXaxis().SetRangeUser(*range)
mc_sum.Draw("e2same")
mc_sum_line.Draw("hist same")
else:
mc_sum = None
mc_sum_line = None
for signal in signals:
if not list_mc and signal == signals[0]:
axis = signal
signal.Draw("hist")
else:
signal.Draw("hist same")
for d in data:
if not signals and not list_mc and d == data[0]:
axis = d
d.Draw("pe")
else:
d.Draw("pe same")
comparefactor = 1
if compare:
comparefactor = 0
pad_factor = 1.0/(1 - pad_fraction)
axis.GetYaxis().SetLabelSize(tsize * pad_factor)
axis.GetYaxis().SetTitleSize(tsize * pad_factor)
axis.GetYaxis().SetTitleOffset(tyoffset / pad_factor)
axis.GetXaxis().SetLabelSize(tsize * pad_factor * comparefactor)
axis.GetXaxis().SetTitleSize(tsize * pad_factor * comparefactor)
axis.GetXaxis().SetTitleOffset(comparefactor * tyoffset / pad_factor)
legend = get_legend(data,mc_sum,list(reversed(list_mc)),signals)
legend.Draw()
save = []
save.extend(create_cuts(cuts_left, cuts_right, 0 if log else ymin, ymax/(1.3 if not log else 50), x2-x1))
# Try to fix the limits...
axis.SetMaximum(ymax)
axis.SetMinimum(ymin)
dhist = mcstack if mcstack else [signals + data][0]
lumiLabel, atlasLabel = get_lumi_label(lumi, centermass, atlas=prelim, draft=True)
lumiLabel.Draw()
if atlasLabel:
atlasLabel.Draw()
save.extend((atlasLabel, lumiLabel))
if (compare or sigma) and mcstack:
cpad.cd(2)
# Create MC sum
cdata = [d.Clone() for d in data]
save.extend(cdata)
for cd in cdata:
cd.SetDirectory(0)
cmc = mc_sum_line.Clone("mc_sum_zero")
cmc2 = mc_sum_line.Clone("mc_sum_zero_line")
cmc.SetFillColor(kOrange)
cmc.SetFillStyle(2001)
cmc2.SetLineColor(kRed)
cmc2.SetFillStyle(0)
cmc.SetDirectory(0)
cmc2.SetDirectory(0)
save.append(cmc)
save.append(cmc2)
Nbins = int(mcstack.GetXaxis().GetNbins())
if sigma and cdata:
for i in xrange(Nbins + 2):
mc, mcerr = cmc.GetBinContent(i), cmc.GetBinError(i)
for cd in cdata:
d, dstat = cd.GetBinContent(i), cd.GetBinError(i)
if dstat < 1:
dstat = 1
sf = (mcerr**2 + dstat**2)**0.5
if d > 0:
cd.SetBinContent(i, (d - mc)/sf)
cd.SetBinError(i, dstat/sf)
else:
pass # content and error are both already zero
cmc.SetBinContent(i, 0.0)
cmc.SetBinError(i, mcerr/sf)
cmc2.SetBinContent(i, 0.0)
cmc2.SetBinError(i, 0.0)
#cmc2.GetYaxis().SetTitle("( Data - SM ) / #sigma_{stat,MC+Data} ")
cmc2.GetYaxis().SetTitle("( Data - MC ) / #sigma_{stat}")
else:
for i in xrange(Nbins + 2):
sf = cmc.GetBinContent(i)
if sf > 0:
cmc.SetBinError(i, cmc.GetBinError(i)/sf)
for cd in cdata:
cd.SetBinContent(i, cd.GetBinContent(i)/sf)
cd.SetBinError(i, cd.GetBinError(i)/sf)
else:
cmc.SetBinError(i, 1.0)
for cd in cdata:
cd.SetBinContent(i, 0)
cd.SetBinError(i, 0)
cmc.SetBinContent(i, 1.0)
cmc2.SetBinContent(i, 1.0)
cmc2.GetYaxis().SetTitle("Data / MC")
#cmc2.GetXaxis().SetTitle("")
if cdata:
mx = max(cd.GetBinContent(cd.GetMaximumBin())+0.2*cd.GetBinError(cd.GetMaximumBin()) for cd in cdata)
#mn = min(cd.GetBinContent(cd.GetMinimumBin())-0.2*cd.GetBinError(cd.GetMinimumBin()) for cd in cdata)
mn = mx
for cd in cdata:
minc, minbin = min([(cd.GetBinContent(i),i) for i in xrange(1, cd.GetNbinsX()+1) if cd.GetBinContent(i) > 0])
mn = min(minc - 0.2*cd.GetBinError(minbin), mn)
if compare:
mx = min(max(1.3, mx), 2)
mn = min(0.7, mn)
for h in cdata + [cmc, cmc2]:
h.SetMaximum(mx)
h.SetMinimum(mn)
cmc2.GetYaxis().SetNdivisions(5,0,0)
cmc2.Draw("hist")
cmc.Draw("e2 same")
for cd in cdata:
cd.Draw("pe same")
sf = 1.0
ysf = 0.7
pad_factor = 1.0/down_pad_fraction
cmc2.GetYaxis().SetLabelSize(tsize*pad_factor*sf*ysf)
cmc2.GetYaxis().SetTitleSize(tsize*pad_factor*sf)
cmc2.GetYaxis().SetTitleOffset(tyoffset / pad_factor / sf)
cmc2.GetXaxis().SetLabelSize(tsize*pad_factor*sf)
cmc2.GetXaxis().SetTitleSize(tsize*pad_factor*sf)
cmc2.GetXaxis().SetTitleOffset(txoffset / pad_factor / sf)
save.extend(create_cuts(cuts_left, cuts_right, mn, mx, x2-x1))
cpad.cd()
return legend, mcstack, mc_sum, mc_sum_line, save
def plot_1D(name, data, list_mc, signals, **kwargs):
set_styles(data, list_mc, signals)
return stack_1D(name, data, list_mc, signals, **kwargs)
#All MC stacked in this order:
#- ttbar 1st
#- Z+jets 2nd
#- W+jets 3rd
#- dijets last
#(i.e. inversely by cross-section)
#If a separate signal sample is drawn - it should not be added to the stack, but instead drawn as a separate line (black and SetLineWidth(4)).
#-----------------
| 33.881007 | 199 | 0.575645 | 2,083 | 14,806 | 4.003361 | 0.181469 | 0.025183 | 0.01403 | 0.011512 | 0.253508 | 0.135028 | 0.094975 | 0.051205 | 0.035376 | 0.026022 | 0 | 0.043292 | 0.291706 | 14,806 | 436 | 200 | 33.958716 | 0.751883 | 0.119344 | 0 | 0.179104 | 0 | 0 | 0.022183 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.002985 | 0.020896 | null | null | 0.002985 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
205a30215e693e91361fba6a10043eebc790a8b7 | 278 | py | Python | web/setup.py | ISTU-Labs/pt-2271-2018 | 4b35f9265420604a6c0d83e5af83936674448185 | [
"Apache-2.0"
] | null | null | null | web/setup.py | ISTU-Labs/pt-2271-2018 | 4b35f9265420604a6c0d83e5af83936674448185 | [
"Apache-2.0"
] | null | null | null | web/setup.py | ISTU-Labs/pt-2271-2018 | 4b35f9265420604a6c0d83e5af83936674448185 | [
"Apache-2.0"
] | null | null | null | from setuptools import setup
requires = [
'pyramid',
'waitress',
'python-dateutil'
]
setup(name='hello',
install_requires=requires,
package_dir={'': "hello"},
entry_points="""\
[paste.app_factory]
main = hello:main
""",
)
| 16.352941 | 32 | 0.564748 | 26 | 278 | 5.884615 | 0.769231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.280576 | 278 | 16 | 33 | 17.375 | 0.765 | 0 | 0 | 0 | 0 | 0 | 0.352518 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.071429 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
6447640aa52e97e82796d679333b4a3d179ae7bb | 24,311 | py | Python | ryu/ryu/app/Ryuretic/Ryuretic_Intf_v6.py | Ryuretic/RAP | 7b0e58af7d8a932770e3c7f7620024e16992b531 | [
"Apache-2.0"
] | 2 | 2019-09-16T17:52:31.000Z | 2021-06-24T17:45:01.000Z | ryu/ryu/app/Ryuretic/Ryuretic_Intf_v6.py | Ryuretic/RAP | 7b0e58af7d8a932770e3c7f7620024e16992b531 | [
"Apache-2.0"
] | null | null | null | ryu/ryu/app/Ryuretic/Ryuretic_Intf_v6.py | Ryuretic/RAP | 7b0e58af7d8a932770e3c7f7620024e16992b531 | [
"Apache-2.0"
] | 3 | 2019-09-23T07:21:40.000Z | 2021-03-03T13:24:25.000Z | #########################################################################
# Ryuretic: A Modular Framework for RYU #
# !/ryu/ryu/app/Ryuretic/Ryuretic_Intf.py #
# Authors: #
# Jacob Cox (jcox70@gatech.edu) #
# Sean Donovan (sdonovan@gatech.edu) #
# Ryuretic_Intf.py #
# date 28 April 2016 #
#########################################################################
# Copyright (C) 2016 Jacob Cox - All Rights Reserved #
# You may use, distribute and modify this code under the #
# terms of the Ryuretic license, provided this work is cited #
# in the work for which it is used. #
# For latest updates, please visit: #
# https://github.com/Ryuretic/RAP #
#########################################################################
"""How To Run This Program
1) Ensure you have Ryu installed.
2) Save the following files to /home/ubuntu/ryu/ryu/app/Ryuretic directory
a) Ryuretic_Intf.py
b) Ryuretic.py
c) Pkt_Parse13.py
d) switch_mod13.py
3) In your controller terminal type: cd ryu
4) Enter PYTHONPATH=. ./bin/ryu-manager ryu/app/Ryuretic/Ryuretic_Intf_v1.py
"""
#########################################################################
from Ryuretic import coupler
#################1 Import Needed Libraries 1###################
#[1] Import needed libraries here #
#########################################################################
import string, random
class Ryuretic_coupler(coupler):
def __init__(self, *args, **kwargs):
super(Ryuretic_coupler, self).__init__(*args, **kwargs)
############## 2 Add User Variables 2 ###################
#[2] Add new global variables here. #
# Ex. ICMP_ECHO_REQUEST = 8, self.netView = {} #
#################################################################
self.cntrl = {'mac':'ca:ca:ca:ad:ad:ad','ip':'192.168.0.40','port':None}
self.validNAT = {'mac':'aa:aa:aa:aa:aa:aa','ip':'192.168.0.224'}
self.t_agentIP = '192.168.0.1'
self.t_agent = {} #Records TA parameter from respond_to_ping
self.dns_tbl = {} #Use to redirect DNS
self.tcp_tbl = {} #Use to redirect TCP
self.port_mac_map = {} #Used by multi-mac detector
self.port_AV = {} #Tracks per port Time-2-ack average
self.tta = {} #Tracks TCP handshake per (src,srcip,srcport,dstip)
self.tcpConnCount = 0 #Future var for tracking total TCP connections
self.policyTbl = {} #Tracks policies applied to port/mac
self.netView = {} #Maps switch connections by port,mac,ip
self.portTbl, self.macTbl, self.ipTbl = {},{},{}
self.testIP = '0.0.0.0' #'192.168.0.22'
#self.portTbl[9]='test'
#self.macTbl['aa:aa:aa:aa:00:22'] = 'test'
#self.ipTbl['192.168.0.22'] = 'test'
#Assigns flag to MAC/Port
self.keyID = 101
ICMP_ECHO_REPLY = 0
ICMP_ECHO_REQUEST = 8
################ 3 Proactive Rule Sets 3 ###################
#[3] Insert proactive rules defined below. Follow format below #
# Options include drop or redirect, fwd is the default. #
#####################################################################
def get_proactive_rules(self, dp, parser, ofproto):
return None, None
#fields, ops = self.honeypot(dp, parser, ofproto)
#return fields, ops
################# 4 Reactive Rule Sets 4 #####################
#[4] use below handles to direct packets to reactive user modules #
# defined in location #[5]. If no rule is added, then #
# the default self.default_Fields_Ops(pkt) must be used #
#####################################################################
# Determine highest priority fields and ops pair, if needed #
# xfields = [fields0, fields1, fields2] #
# xops = [ops0, ops1, ops2] #
# fields,ops = self._build_FldOps(xfields,xops) #
#####################################################################
def handle_eth(self,pkt):
print "Handle Ether: ", pkt['srcmac'],'->',pkt['dstmac']
fields, ops = self.default_Field_Ops(pkt)
self.install_field_ops(pkt,fields,ops)
#def handle_arp(self,pkt):
#print "-------------------------------------------------------------"
#print "Handle ARP: ",pkt['srcmac'],"->",pkt['dstmac']
#print "Handle ARP: ",pkt['srcip'],"->",pkt['dstip']
#fields, ops = self.respond_to_arp(pkt)
##Determin if mac or port has a status
##pkt_status = self.check_net_tbl(pkt['srcmac'],pkt['inport'])
##print pkt_status
#self.install_field_ops(pkt,fields,ops)
def handle_arp(self,pkt):
print "-------------------------------------------------------------"
print "Handle ARP: ",pkt['srcmac'],"->",pkt['dstmac']
print "Handle ARP: ",pkt['srcip'],"->",pkt['dstip']
fields, ops = self.respond_to_arp(pkt)
self.install_field_ops(pkt,fields,ops)
def handle_ip(self,pkt):
print "-------------------------------------------------------------"
print "Handle IP"
#fields, ops = self.TTL_Check(pkt) #Lab 9
fields, ops = self.default_Field_Ops(pkt)
self.install_field_ops(pkt,fields,ops)
def handle_icmp(self,pkt):
print "-------------------------------------------------------------"
print "Handle ICMP: ",pkt['srcmac'],"->",pkt['dstmac']
print "Handle ICMP: ",pkt['srcip'],"->",pkt['dstip']
fields,ops = self.respond_to_ping(pkt)
self.install_field_ops(pkt, fields, ops)
def handle_tcp(self,pkt):
#print "-------------------------------------------------------------"
#print "Handle TCP: ",pkt['srcmac'],"->",pkt['dstmac']
#print "Handle TCP: ",pkt['srcip'],"->",pkt['dstip']
#print "Handle TCP: ",pkt['srcport'],"->",pkt['dstport']
pkt_status = self.check_ip_tbl(pkt)
if pkt_status == 'test': #test src and dest
fields,ops = self.redirect_TCP(pkt)
elif pkt_status == 'deny':
fields,ops = self.redirect_TCP(pkt)
else:
#fields,ops = self.default_Field_Ops(pkt)
#fields,ops = self.test_TCP(pkt)
fields,ops = self.TTA_analysis(pkt)
self.install_field_ops(pkt, fields, ops)
def test_TCP(self,pkt):
fields,ops = self.default_Field_Ops(pkt)
if pkt['srcip'] == self.testIP:
print "IP detected: ", pkt['srcip']
self.flagHost(pkt,'test')
fields,ops=self.redirect_TCP(pkt)
return fields,ops
return fields,ops
def redirect_TCP(self,pkt):
print "Redirect_TCP: "
print "pkt info: ", pkt['srcmac'],' ',pkt['dstmac'],' ',pkt['srcip'],' ',pkt['dstip']
print pkt['srcport'],' ',pkt['dstport']
#Uses ipTbl, tcp_tbl, and t_agent
fields,ops = self.default_Field_Ops(pkt)
if self.ipTbl.has_key(pkt['srcip']):
if self.ipTbl[pkt['srcip']] in ['test','deny']:
print "ipTbl Contents", self.ipTbl
key = (pkt['srcip'],pkt['srcport'])
print "Key is : ", key
self.tcp_tbl[key] = {'dstip':pkt['dstip'],'dstmac':pkt['dstmac'],
'dstport':pkt['dstport']}
fields.update({'srcmac':pkt['srcmac'],'srcip':pkt['srcip']})
fields.update({'dstmac':self.t_agent['mac'],'dstip':self.t_agent['ip']})
#if pkt['dstport'] == 443:
#fields['dstport'] = 80
ops = {'hard_t':None, 'idle_t':None, 'priority':100,\
'op':'mod', 'newport':self.t_agent['port']}
print "TCP Table: ", self.tcp_tbl[key]
elif self.ipTbl.has_key(pkt['dstip']):
print "Returning to ", pkt['dstip']
if self.ipTbl[pkt['dstip']] in ['test','deny']:
print "ipTbl Contents", self.ipTbl
key = (pkt['dstip'],pkt['dstport'])
print "Key and table: ", key, ' ', self.tcp_tbl[key]
fields.update({'srcmac':self.tcp_tbl[key]['dstmac'],
'srcip':self.tcp_tbl[key]['dstip']})
#if self.tcp_tbl[key]['dstport'] == 443:
#fields.update({'srcport':443})
fields.update({'dstmac':pkt['dstmac'], 'dstip':pkt['dstip']})
ops = {'hard_t':None, 'idle_t':None, 'priority':100,\
'op':'mod', 'newport':None}
#self.tcp_tbl.pop(key)
#print "TCP Table: ", self.tcp_tbl
return fields, ops
# Add flag to policyTbl, macTbl, portTbl
def flagHost(self,pkt,flag):
print 'Flag Host: ', pkt['srcmac'],'->',flag
self.macTbl[pkt['srcmac']]={'stat':flag,'port':pkt['inport'],
'ip':pkt['srcip']}
self.portTbl[pkt['inport']]=flag
self.ipTbl[pkt['srcip']] = flag
if flag != 'norm':
keyID = self.keyID
self.keyID += 1
#create passkey
passkey =''.join(random.choice(string.ascii_letters) for x in range(8))
#update policy table
self.policyTbl[keyID]={'inport':pkt['inport'],'srcmac':pkt['srcmac'],
'ip':pkt['srcip'],'passkey':passkey,'stat':flag}
#Notify trusted agent of newly flagged client
self.update_TA(pkt, keyID, 'l') #load message'
def handle_udp(self,pkt):
print "-------------------------------------------------------------"
print "Handle UDP: ",pkt['srcmac'],"->",pkt['dstmac']
print "Handle UDP: ",pkt['srcip'],'->',pkt['dstip']
#Added to build MAC and port associations
pkt_status = self.check_ip_tbl(pkt)
if pkt_status == 'test': #test src and dest
fields,ops = self.redirect_DNS(pkt)
elif pkt_status == 'deny':
fields,ops = self.redirect_DNS(pkt)
else:
fields,ops = self.test_DNS(pkt)
self.install_field_ops(pkt, fields, ops)
def test_DNS(self,pkt):
print "Testing DNS"
fields,ops = self.default_Field_Ops(pkt)
if pkt['srcip'] == self.testIP:
print "IP detected: ", pkt['srcip']
self.flagHost(pkt,'test')
fields,ops=self.redirect_DNS(pkt)
return fields,ops
return fields,ops
def redirect_DNS(self,pkt):
print "Redirect_DNS: "
#Uses macTbl, dns_tbl, and t_agent
fields,ops = self.default_Field_Ops(pkt)
if self.ipTbl.has_key(pkt['srcip']):
if self.ipTbl[pkt['srcip']]== 'test':
key = (pkt['srcip'],pkt['srcport'])
print key
self.dns_tbl[key] = {'dstip':pkt['dstip'],'dstmac':pkt['dstmac']}
fields.update({'dstmac':self.t_agent['mac'],
'dstip':self.t_agent['ip']})
fields.update({'srcmac':pkt['srcmac'],'srcip':pkt['srcip']})
ops = {'hard_t':None, 'idle_t':None, 'priority':100,\
'op':'mod', 'newport':self.t_agent['port']}
elif self.ipTbl.has_key(pkt['dstip']):
if self.ipTbl[pkt['dstip']]== 'test':
key = (pkt['dstip'],pkt['dstport'])
print key
fields.update({'srcmac':self.dns_tbl[key]['dstmac'],
'srcip':self.dns_tbl[key]['dstip']})
fields.update({'dstmac':pkt['dstmac'], 'dstip':pkt['dstip']})
ops = {'hard_t':None, 'idle_t':None, 'priority':100,\
'op':'mod', 'newport':None}
#self.dns_tbl.pop(key)
#print "DNS Table: ", self.dns_tbl
return fields, ops
#Check status of port and mac.
def check_ip_tbl(self,pkt):
#print "Check_ip_tbl:"
srcip,dstip = pkt['srcip'],pkt['dstip']
if self.ipTbl.has_key(srcip):
#print "Found: ", srcip,'->', self.ipTbl[srcip]
return self.ipTbl[srcip]
elif self.ipTbl.has_key(dstip):
#print "Found: ", dstip,'->', self.ipTbl[dstip]
return self.ipTbl[dstip]
else:
#print "Not Found: ", srcip, ', ', dstip
return 'No_Flag'
# All packets not defined above are handled here.
def handle_unk(self,pkt):
print "-------------------------------------------------------------"
print "Handle Uknown"
fields, ops = self.default_Field_Ops(pkt)
self.install_field_ops(pkt, fields, ops)
######################################################################
# The following are from the old NFG file.
def default_Field_Ops(self,pkt):
def _loadFields(pkt):
#keys specifies match fields for action. Default is
#inport and srcmac. ptype used for craft icmp, udp, etc.
fields = {'keys':['inport','srcmac'],'ptype':[], 'dp':pkt['dp'],
'ofproto':pkt['ofproto'], 'msg':pkt['msg'],
'inport':pkt['inport'], 'srcmac':pkt['srcmac'],
'ethtype':pkt['ethtype'], 'dstmac':None, 'srcip':None,
'proto':None, 'dstip':None, 'srcport':None, 'dstport':None,
'com':None, 'id':0}
return fields
def _loadOps():
#print "Loading ops"
#Specifies the timeouts, priority, operation and outport
#options for op: 'fwd','drop', 'mir', 'redir', 'craft'
ops = {'hard_t':None, 'idle_t':None, 'priority':10, \
'op':'fwd', 'newport':None}
return ops
#print "default Field_Ops called"
fields = _loadFields(pkt)
ops = _loadOps()
return fields, ops
######################################################################
############ 5 Ryuretic Network Application Modules 5 ##############
#[5] Add user created methods below. Examples are provided to assist #
# the user with basic python, dictionary, list, and function calls #
######################################################################
# Confirm mac has been seen before and no issues are recorded
def TTL_Check(self, pkt):
#initialize fields and ops with default settings
fields, ops = self.default_Field_Ops(pkt)
if pkt['srcmac'] != self.validNAT['mac']:
if pkt['ttl']==63 or pkt['ttl']==127:
print 'TTL Decrement Detected on ',pkt['srcmac'],' TTL is :',pkt['ttl']
fields, ops = self.add_drop_params(pkt,fields,ops)
else:
ops['idle_t'] = 5
print "Packet TTL: ", pkt['ttl'], ' ', pkt['srcip'],' ', \
pkt['inport'],' ', pkt['srcmac']
else:
ops['idle_t'] = 20
priority = 10
return fields, ops
def Multi_MAC_Checker(self, pkt):
fields, ops = self.default_Field_Ops(pkt)
print "*** Checking MAC ***"
#self.port_mac_map = {}
if self.port_mac_map.has_key(pkt['inport']):
if pkt['srcmac'] != self.port_mac_map[pkt['inport']]:
print " Multi-mac port detected "
fields, ops = self.add_drop_params(pkt,fields,ops)
else:
fields, ops = self.fwd_persist(pkt,fields,ops)
else:
self.port_mac_map[pkt['inport']] = pkt['srcmac']
return fields, ops
#change name to monitor_TCP for RAP
def TTA_analysis(self,pkt):
fields, ops = self.default_Field_Ops(pkt)
bits = pkt['bits']
dst, dstip, dstport = pkt['dstmac'], pkt['dstip'], pkt['dstport']
src, srcip, srcport = pkt['srcmac'], pkt['srcip'], pkt['srcport']
inport = pkt['inport']
send = (src,srcip,srcport,dstip)
arrive = (dst,dstip,dstport,srcip)
t_in = pkt['t_in']
#print"*****\n"+self.tta+"/n******/n"+self.port_AV+"/n*****"
if bits == 20:
if self.tta.has_key(send):
self.tta[send]['stage'] = 0
elif self.tta.has_key(arrive):
#print pkt
self.tta[arrive]['stage'] = 0
return fields, ops
if bits == 2:
if self.tta.has_key(send):
self.tta[send].update({'inport':inport,'stage':1})
else:
self.tta.update({send:{'inport':inport,'stage':1}})
return fields, ops
if bits == 18:
if self.tta.has_key(arrive):
if self.tta[arrive]['stage']==1:
self.tta[arrive].update({'syn':t_in,'stage':2})
return fields,ops
if bits == 16:
if self.tta.has_key(send):
if self.tta[send]['stage']==2:
tta = t_in - self.tta[send]['syn']
self.tta[send].update({'stage':3, 'ack':t_in, 'tta':tta})
#print '** Calc TTA :', tta
if self.port_AV.has_key(self.tta[send]['inport']):
portAV = ((self.port_AV[self.tta[send]['inport']] * \
9) + tta)/10
self.port_AV[self.tta[send]['inport']] = portAV
else:
portAV = ((0.001*9)+tta)/10
self.port_AV.update({self.tta[send]['inport']:portAV})
#print "****"
#print "Port and TTA: ", inport, self.tta[send]['tta']
print '****\nPort Averages: ', self.port_AV, '\n****'
#print "****"
del self.tta[send]
return fields, ops
#print "Persist"
fields, ops = self.tcp_persist(pkt,fields,ops)
return fields, ops
if bits == 24:
#print "HTTP Push"
return fields, ops
if bits == 17:
print 'Port Averages: ', self.port_AV
if self.tta.has_key(send):
del self.tta[send]
elif self.tta.has_key(arrive):
del self.tta[arrive]
return fields, ops
print "Packet not addressed", bits, inport, src, dstip
return fields, ops
# Call to temporarily install drop parameter for a packet to switch
def add_drop_params(self, pkt, fields, ops):
#may need to include priority
fields['keys'] = ['inport']
fields['inport'] = pkt['inport']
ops['priority'] = 100
ops['idle_t'] = 60
ops['op']='drop'
return fields, ops
# Call to temporarily install TCP flow connection on switch
def tcp_persist(self, pkt,fields,ops):
#print "TCP_Persist: ", pkt['srcmac'],'->', pkt['dstmac']
#print "TCP_Persist: ", pkt['srcip'],'->',pkt['dstip']
fields['keys'] = ['inport', 'srcmac', 'srcip', 'ethtype', 'srcport']
fields['srcport'] = pkt['srcport']
fields['srcip'] = pkt['srcip']
ops['idle_t'] = 5
ops['priority'] = 10
return fields, ops
def fwd_persist(self, pkt,fields,ops):
ops['idle_t'] = 3
ops['priority'] = 10
return fields, ops
def arp_persist(self, pkt):
fields, ops = self.default_Field_Ops(pkt)
fields['keys'] = ['inport','srcmac','ethtype']
ops['idle_t'] = 10
ops['priority'] = 2
return fields, ops
################################################################
"""
The following code is implemented to allow the trusted agent to comm
with the controller and vice versa.
"""
################################################################
#Receive and respond to arp
def respond_to_arp(self,pkt):
print 'Respond to Arp:', pkt['srcmac'],'->',pkt['dstmac']
print 'Respond to Arp:', pkt['srcip'],'->',pkt['dstip']
fields, ops = self.default_Field_Ops(pkt)
#Added to build MAC and port associations
if not self.macTbl.has_key(pkt['srcmac']):
self.macTbl[pkt['srcmac']] = {'port':pkt['inport'], 'stat':'unk'}
if pkt['dstip'] == self.cntrl['ip']:
print "Message to Controller"
fields['keys']=['srcmac', 'srcip', 'ethtype', 'inport']
fields['ptype'] = 'arp'
fields['dstip'] = pkt['srcip']
fields['srcip'] = self.cntrl['ip']
fields['dstmac'] = pkt['srcmac']
fields['srcmac'] = self.cntrl['mac']
fields['ethtype'] = 0x0806
ops['op'] = 'craft'
ops['newport'] = pkt['inport']
#print "INPORT: ", pkt['inport']
return fields, ops
#Respond to ping. Forward or respond if to cntrl from trusted agent.
def respond_to_ping(self,pkt):
def get_fields(keyID):
srcmac = self.policyTbl[keyID]['srcmac']
inport = self.policyTbl[keyID]['inport']
srcip = self.policyTbl[keyID]['ip']
print inport, ', ', srcmac, ', ', srcip
return srcmac, inport, srcip
def remove_keyID(keyID):
print "Policy Table Contents: ", self.policyTbl
if self.policyTbl.has_key(keyID):
srcmac, inport, srcip = get_fields(keyID)
if self.macTbl.has_key(srcmac):
print "Removing MAC", srcmac
self.macTbl.pop(srcmac)
if self.portTbl.has_key(inport):
print "Removing Port", inport
self.portTbl.pop(inport)
if self.ipTbl.has_key(srcip):
print "Removing IP", srcip
self.ipTbl.pop(srcip)
self.policyTbl.pop(keyID)
print "Respond to Ping: ", pkt['srcmac'],'->',pkt['dstmac']
fields, ops = self.default_Field_Ops(pkt)
if pkt['dstip'] == self.cntrl['ip'] and pkt['srcip'] == self.t_agentIP:
#print'respond to ping'
rcvData = pkt['data'].data
#Actions {a-acknowledge, i-init, d-delete, r-result, v-verify}
#action, keyID = rcvData.split(',')
#keyID = keyID.rstrip(' \t\r\n\0')
print rcvData
try:
action, keyID, result = rcvData.split(',')
result = result.rstrip(' \t\r\n\0')
print "Received Result"
except:
action, keyID = rcvData.split(',')
print "Received Revocation."
keyID = keyID.rstrip(' \t\r\n\0')
print "Key ID Length: ", len(keyID)
keyID = int(keyID)
print "KeyID is ", keyID, ', ', type(keyID)
print "Action is ", action, "\n\n\n*********"
######################################################
if action == 'i':
self.t_agent = {'ip':pkt['srcip'],'mac':pkt['srcmac'],
'port':pkt['inport'],'msg':pkt['msg'],
'ofproto':pkt['ofproto'], 'dp':pkt['dp']}
print "T_AGENT Loaded"
elif action == 'd':
#Deleting flagged host policy
print "Removing (",keyID,") from Policy Table"
print "Existing Keys: ", self.policyTbl.keys()
remove_keyID(keyID)
elif action == 'r':
print "Validating result"
print "Key present?", self.policyTbl.has_key(keyID)
if self.policyTbl.has_key(keyID):
print "Test Result is: ", result
if result == 'P':
print "Removing keyID"
remove_keyID(keyID)
elif result =='F':
print "Flagging Host: ", self.policyTbl[keyID]['ip']
self.policyTbl[keyID]['stat'] = 'deny'
srcmac, inport, srcip = get_fields(keyID)
self.macTbl[srcmac].update({'stat':'deny'})
self.portTbl[inport],self.ipTbl[srcip] ='deny','deny'
self.update_TA(pkt, keyID,'e') #send edit message
#Notify TA of update_TA(self,pkt, keyID)
else:
print "An Error Occured"
elif action is 'u':
#This is more complicated it requires data not being stored
#may need to add fields to policyTable. Maybe not.
pass
elif action is 'a':
#Acknowledge receipt
pass
else:
print "No match"
fields.update({'srcmac':self.cntrl['mac'], 'dstmac':pkt['srcmac']})
fields.update({'srcip':self.cntrl['ip'], 'dstip':pkt['srcip']})
fields.update({'ptype':'icmp','ethtype':0x0800, 'proto':1})
fields['com'] = 'a,'+rcvData
ops.update({'op':'craft', 'newport':pkt['inport']})
return fields, ops
#Crafts tailored ICMP message for trusted agent
def update_TA(self,pkt, keyID, message):
table = self.policyTbl[keyID]
print 'Update Table: ', pkt['srcmac'],'->',keyID,'->',table['stat']
print 'Update Table: ', table['srcmac'],'->',keyID,'->',table['stat']
#print "Updating Trusted Agent"
fields, ops = {},{}
fields['keys'] = ['inport', 'srcip']
fields.update({'dstip':self.t_agent['ip'], 'srcip':self.cntrl['ip']})
fields.update({'dstmac':self.t_agent['mac'], 'srcmac':self.cntrl['mac']})
fields.update({'dp':self.t_agent['dp'], 'msg':self.t_agent['msg']})
fields.update({'inport':self.t_agent['port'],'ofproto':\
self.t_agent['ofproto']})
fields.update({'ptype':'icmp', 'ethtype':0x0800, 'proto':1, 'id':0})
fields['com'] = message+','+table['srcmac']+','+str(table['inport'])+\
','+str(table['passkey'])+','+table['stat']+\
','+str(keyID)
ops = {'hard_t':None, 'idle_t':None, 'priority':0, \
'op':'craft', 'newport':self.t_agent['port']}
self.install_field_ops(pkt, fields, ops)
################################################################
"""
The following code controls the redirection of packets from their intended
destination to our trusted agent. This occurs when a port is flagged.
"""
################################################################
#Create a method to inject a redirect anytime the sta4 IP address is
#Check status of port and mac.
def check_net_tbl(self,pkt):
mac, ip, port = pkt['srcmac'], pkt['srcip'], pkt['inport']
print "(536) Check NetTbl: ", mac, ' & ', port,'->',self.macTbl.keys()
if mac in self.macTbl.keys():
#print "Found: ", mac,'->', self.macTbl[mac]['stat']
return self.macTbl[mac]['stat']
elif port in self.portTbl.keys():
#print "Port ", port, " found in table."
return self.portTbl[port]
elif ip in self.ipTbl.keys():
#print "IP ", ip, " found in table."
return self.ipTbl[ip]
else:
#print "Not Found: ", mac
return 'new'
#Redirect ICMP packets to trusted agent
def Icmp_Redirect(self,pkt):
print "Redirecting ICMP", pkt['srcmac'],'->',pkt['dstmac'],'||',self.t_agent['mac']
fields, ops = self.default_Field_Ops(pkt)
fields['keys'] = ['inport', 'ethtype']
fields['dstmac'] = self.t_agent['mac']
fields['dstip'] = self.t_agent['ip']
fields['ethtype'] = pkt['ethtype']
ops['op'] = 'redir'
ops['newport'] = self.t_agent['port']
ops['priority'] = 100
ops['idle_t'] = 180
#ops['hard_t'] = 180
return fields, ops
| 37.003044 | 87 | 0.569948 | 3,140 | 24,311 | 4.324522 | 0.146178 | 0.051035 | 0.03255 | 0.022093 | 0.393328 | 0.3012 | 0.24965 | 0.207747 | 0.173356 | 0.139922 | 0 | 0.01131 | 0.192629 | 24,311 | 656 | 88 | 37.059451 | 0.680507 | 0.238493 | 0 | 0.314904 | 0 | 0 | 0.201075 | 0.018624 | 0 | 0 | 0.001099 | 0 | 0 | 0 | null | null | 0.012019 | 0.004808 | null | null | 0.158654 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
64487297478b72747418471787c1d20d2191f34f | 510 | py | Python | vpn-proxy/app/migrations/0005_tunnel_protocol.py | dimrozakis/priv-net | 3eadea10c3b437ea82d8233579b31f60eaac51b1 | [
"Apache-2.0"
] | null | null | null | vpn-proxy/app/migrations/0005_tunnel_protocol.py | dimrozakis/priv-net | 3eadea10c3b437ea82d8233579b31f60eaac51b1 | [
"Apache-2.0"
] | null | null | null | vpn-proxy/app/migrations/0005_tunnel_protocol.py | dimrozakis/priv-net | 3eadea10c3b437ea82d8233579b31f60eaac51b1 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-01 13:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0004_remove_forwarding_src_addr'),
]
operations = [
migrations.AddField(
model_name='tunnel',
name='protocol',
field=models.CharField(choices=[('udp', 'UDP'), ('tcp', 'TCP')], default='udp', max_length=3),
),
]
| 24.285714 | 106 | 0.605882 | 58 | 510 | 5.137931 | 0.810345 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054545 | 0.245098 | 510 | 20 | 107 | 25.5 | 0.719481 | 0.131373 | 0 | 0 | 1 | 0 | 0.143182 | 0.070455 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
644fa6c7575de74309c593d18054afd49f392625 | 1,343 | py | Python | mhvdb2/models.py | kjnsn/mhvdb2 | ce3fc77f76ca32e2aaeff928b291cc45d041b68f | [
"MIT"
] | null | null | null | mhvdb2/models.py | kjnsn/mhvdb2 | ce3fc77f76ca32e2aaeff928b291cc45d041b68f | [
"MIT"
] | null | null | null | mhvdb2/models.py | kjnsn/mhvdb2 | ce3fc77f76ca32e2aaeff928b291cc45d041b68f | [
"MIT"
] | null | null | null | from mhvdb2 import database
from peewee import *
class BaseModel(Model):
class Meta:
database = database
class Entity(BaseModel):
"""
An Entity sends money to the organisation or recieves money from the
organistaion. Members are a special type of entity.
"""
is_member = BooleanField() # Is the entity a member (past or present)
name = CharField()
email = CharField(null=True) # Email is required for members
phone = CharField(null=True)
reminder_date = DateField(null=True) # When to send reminder to member
joined_date = DateField(null=True) # date the person first joined
agreement_date = DateField(null=True) # date the person agreed to rules
class Payment(BaseModel):
"""
A Payment is a transaction between an entity and the organisation. A
payment can be either incoming or outgoing, depending on the sign of
"amount".
"""
time = DateTimeField() # Date & time the payment occured
entity = ForeignKeyField(Entity, related_name='payments')
amount = FloatField()
source = IntegerField(choices=[(0, 'Other'), (1, 'Bank Transfer')])
is_donation = BooleanField() # For members, donation vs payment for goods
notes = TextField(null=True)
bank_reference = CharField(null=True) # For bank transfers
pending = BooleanField()
| 35.342105 | 78 | 0.696947 | 170 | 1,343 | 5.464706 | 0.5 | 0.06028 | 0.054898 | 0.067815 | 0.073197 | 0.073197 | 0.073197 | 0 | 0 | 0 | 0 | 0.00286 | 0.218913 | 1,343 | 37 | 79 | 36.297297 | 0.882745 | 0.392405 | 0 | 0 | 0 | 0 | 0.033943 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.954545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
6459d5e70633b4a25bd89627161b0973bbe59d67 | 3,382 | py | Python | run_w2v.py | hugochan/K-Competitive-Autoencoder-for-Text-Analytics | 5433de649028a4e021b8ad17cd0ec5da8c726031 | [
"BSD-3-Clause"
] | 133 | 2017-05-30T20:28:24.000Z | 2022-03-10T01:27:43.000Z | run_w2v.py | hugochan/K-Competitive-Autoencoder-for-Text-Analytics | 5433de649028a4e021b8ad17cd0ec5da8c726031 | [
"BSD-3-Clause"
] | 34 | 2017-09-04T08:04:50.000Z | 2022-02-10T01:12:17.000Z | run_w2v.py | hugochan/K-Competitive-Autoencoder-for-Text-Analytics | 5433de649028a4e021b8ad17cd0ec5da8c726031 | [
"BSD-3-Clause"
] | 49 | 2017-07-08T09:30:17.000Z | 2021-07-30T04:37:29.000Z | '''
Created on Jan, 2017
@author: hugo
'''
from __future__ import absolute_import
import argparse
from os import path
import timeit
import numpy as np
from autoencoder.baseline.word2vec import Word2Vec, save_w2v, load_w2v
from autoencoder.baseline.doc_word2vec import doc_word2vec
from autoencoder.utils.io_utils import load_json, dump_json, write_file
from autoencoder.preprocessing.preprocessing import load_corpus
# from autoencoder.datasets.reuters import CorpusIterReuters
from autoencoder.datasets.the20news import CorpusIter20News
# from autoencoder.datasets.movie_review_data import CorpusIterMRD
# from autoencoder.datasets.wiki10plus import CorpusIterWiki10plus
def train(args):
vocab = load_json(args.vocab)
# import pdb;pdb.set_trace()
# load corpus
corpus = CorpusIter20News(args.corpus[0], recursive=True, stem=True, with_docname=False)
# corpus = CorpusIterMRD(args.corpus[0], load_json(args.docnames), stem=True, with_docname=False)
# corpus = CorpusIterWiki10plus(args.corpus[0], load_json(args.docnames), stem=True, with_docname=False)
# corpus = CorpusIterReuters(args.corpus, load_json(args.docnames), with_docname=False)
# print len([1 for x in corpus])
corpus_iter = lambda: ([word for word in sentence if word in vocab] for sentence in corpus)
w2v = Word2Vec(args.n_dim, window=args.window_size, \
negative=args.negative, epoches=args.n_epoch)
start = timeit.default_timer()
w2v.train(corpus_iter)
print 'runtime: %ss' % (timeit.default_timer() - start)
save_w2v(w2v.model, args.save_model)
import pdb;pdb.set_trace()
def test(args):
corpus = load_corpus(args.corpus[0])
docs, vocab_dict = corpus['docs'], corpus['vocab']
doc_codes = doc_word2vec(docs, revdict(vocab_dict), args.load_model, args.output, avg=True)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--train', action='store_true', help='train flag')
parser.add_argument('--corpus', nargs='*', required=True, type=str, help='path to the corpus dir (in training phase) or file (in test phase)')
parser.add_argument('-doc', '--docnames', type=str, help='path to the docnames file (in training phase)')
parser.add_argument('--vocab', required=True, type=str, help='path to the vocab file')
parser.add_argument('-ne', '--n_epoch', required=True, type=int, help='num of epoches')
parser.add_argument('-nd', '--n_dim', type=int, help='num of dimensions')
parser.add_argument('-ws', '--window_size', required=True, type=int, help='window size')
parser.add_argument('-neg', '--negative', required=True, type=int, help='num of negative samples')
parser.add_argument('-sm', '--save_model', type=str, default='w2v.mod', help='path to the output model')
parser.add_argument('-lm', '--load_model', type=str, help='path to the trained model')
parser.add_argument('-o', '--output', type=str, help='path to the output doc codes file')
args = parser.parse_args()
if args.train:
if not args.n_dim:
raise Exception('n_dim arg needed in training phase')
train(args)
else:
if not args.output:
raise Exception('output arg needed in test phase')
if not args.load_model:
raise Exception('load_model arg needed in test phase')
test(args)
if __name__ == '__main__':
main()
| 44.5 | 146 | 0.715257 | 472 | 3,382 | 4.974576 | 0.275424 | 0.042164 | 0.079642 | 0.03322 | 0.202726 | 0.141397 | 0.103066 | 0.079216 | 0.051959 | 0.051959 | 0 | 0.011942 | 0.15819 | 3,382 | 75 | 147 | 45.093333 | 0.812785 | 0.160556 | 0 | 0 | 0 | 0 | 0.202873 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.215686 | null | null | 0.019608 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
645b1f549815ff06f8102522d4899632169c198c | 713 | py | Python | code/udls/datasets/sol_string.py | acids-ircam/lottery_mir | 1440d717d7fd688ac43c1a406602aaf2d5a3842d | [
"MIT"
] | 10 | 2020-07-29T23:12:15.000Z | 2022-03-23T16:27:43.000Z | code/udls/datasets/sol_string.py | acids-ircam/lottery_mir | 1440d717d7fd688ac43c1a406602aaf2d5a3842d | [
"MIT"
] | null | null | null | code/udls/datasets/sol_string.py | acids-ircam/lottery_mir | 1440d717d7fd688ac43c1a406602aaf2d5a3842d | [
"MIT"
] | 1 | 2022-02-06T11:42:28.000Z | 2022-02-06T11:42:28.000Z | from .. import DomainAdaptationDataset, SimpleDataset
SolV4folders = [
"/fast-2/datasets/Solv4_strings_wav/audio/Cello",
"/fast-2/datasets/Solv4_strings_wav/audio/Contrabass",
"/fast-2/datasets/Solv4_strings_wav/audio/Violin",
"/fast-2/datasets/Solv4_strings_wav/audio/Viola"
]
def Solv4Strings_DomainAdaptation(out_database_location, preprocess_function):
return DomainAdaptationDataset(out_database_location, SolV4folders,
preprocess_function, "*.wav", 1e11)
def Solv4Strings_Simple(out_database_location, preprocess_function):
return SimpleDataset(out_database_location, SolV4folders,
preprocess_function, "*.wav", 1e11)
| 37.526316 | 78 | 0.734923 | 73 | 713 | 6.876712 | 0.369863 | 0.039841 | 0.103586 | 0.143426 | 0.657371 | 0.657371 | 0.486056 | 0.223108 | 0 | 0 | 0 | 0.03204 | 0.168303 | 713 | 18 | 79 | 39.611111 | 0.814503 | 0 | 0 | 0.153846 | 0 | 0 | 0.280505 | 0.26648 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153846 | false | 0 | 0.076923 | 0.153846 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
646208f0693f4cb46abcaf9ae8ce2a78afead206 | 2,205 | py | Python | loaner/deployments/lib/password.py | gng-demo/travisfix | 6d64de6dac44d89059eb92f76410fdcc2d41a247 | [
"Apache-2.0"
] | 175 | 2018-03-28T20:33:39.000Z | 2022-03-27T06:02:39.000Z | loaner/deployments/lib/password.py | gng-demo/travisfix | 6d64de6dac44d89059eb92f76410fdcc2d41a247 | [
"Apache-2.0"
] | 111 | 2018-05-22T18:50:59.000Z | 2022-01-23T23:11:15.000Z | loaner/deployments/lib/password.py | gng-demo/travisfix | 6d64de6dac44d89059eb92f76410fdcc2d41a247 | [
"Apache-2.0"
] | 70 | 2018-03-30T01:52:06.000Z | 2021-10-13T11:20:10.000Z | # Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This library provides a random password generator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
from absl import flags
from absl import logging
_MIN = 8
_MAX = 100
FLAGS = flags.FLAGS
flags.DEFINE_integer(
'password_length', _MAX,
'The length of the password to be generated for the Grab n Go Role Account.'
'\nNOTE: The length must be between 8 and 100 and must be compliant with '
'the G Suite Admin password settings.\nThe Security Settings can be found '
'in the Google Admin console: admin.google.com'
)
flags.register_validator(
'password_length', lambda length: length >= _MIN and length <= _MAX,
'Password length must be between {} and {} characters.'.format(_MIN, _MAX),
)
def generate(length):
"""Generates a new password of a given length.
Args:
length: int, the length of the password to generate.
Returns:
A random password of type string with the given length.
Raises:
ValueError: if the length provided is invalid.
"""
if length < _MIN or length > _MAX:
raise ValueError(
'password length must be between {!r} and {!r} characters length '
'provided was: {!r}'.format(_MIN, _MAX, length))
logging.debug('Generating a password with length: %r.', length)
chars = (
'abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789'
'!$%^&*()-_=+@:;~#,.<>? '
)
password = ''
rand = random.SystemRandom()
while len(password) < length:
password += rand.choice(chars)
return password
| 29.4 | 80 | 0.711111 | 300 | 2,205 | 5.126667 | 0.476667 | 0.039012 | 0.031209 | 0.037061 | 0.06632 | 0.031209 | 0 | 0 | 0 | 0 | 0 | 0.014723 | 0.199093 | 2,205 | 74 | 81 | 29.797297 | 0.856172 | 0.386848 | 0 | 0 | 0 | 0 | 0.420091 | 0.056317 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027027 | false | 0.297297 | 0.162162 | 0 | 0.216216 | 0.027027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
64628052cc79203f1662d5c3075c0ef300636aa0 | 732 | py | Python | debug/test_call.py | ccj5351/hmr_rgbd | d1dcf81d72c11e1f502f2c494cd86425f384d9cc | [
"MIT"
] | null | null | null | debug/test_call.py | ccj5351/hmr_rgbd | d1dcf81d72c11e1f502f2c494cd86425f384d9cc | [
"MIT"
] | 1 | 2020-12-09T07:29:00.000Z | 2020-12-09T07:29:00.000Z | debug/test_call.py | ccj5351/hmr_rgbd | d1dcf81d72c11e1f502f2c494cd86425f384d9cc | [
"MIT"
] | null | null | null | # !/usr/bin/env python3
# -*-coding:utf-8-*-
# @file: test_call.py
# @brief:
# @author: Changjiang Cai, ccai1@stevens.edu, caicj5351@gmail.com
# @version: 0.0.1
# @creation date: 09-07-2019
# @last modified: Tue 09 Jul 2019 07:09:07 PM EDT
class Stuff(object):
def __init__(self, x, y, rge):
super(Stuff, self).__init__()
self.x = x
self.y = y
self.range = rge
def __call__(self, x, y):
self.x = x
self.y = y
print '__call__ with (%d,%d)' % (self.x, self.y)
def __del__(self):
del self.x
del self.y
del self.range
print ('delete all')
if __name__ == "__main__":
s = Stuff(1,2,3)
print (s.x)
s(7, 8)
s(14, 10)
| 20.333333 | 65 | 0.546448 | 115 | 732 | 3.226087 | 0.530435 | 0.080863 | 0.048518 | 0.053908 | 0.06469 | 0.06469 | 0 | 0 | 0 | 0 | 0 | 0.075435 | 0.293716 | 732 | 35 | 66 | 20.914286 | 0.642166 | 0.304645 | 0 | 0.2 | 0 | 0 | 0.078 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.15 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
6479a0308b3995ce53fc23a94f0f47e1afdd3615 | 3,846 | py | Python | usfm_references/__init__.py | anthonyraj/usfm-references | e0e2cc804545b029df55f9780c1361a8a2702f9c | [
"MIT"
] | null | null | null | usfm_references/__init__.py | anthonyraj/usfm-references | e0e2cc804545b029df55f9780c1361a8a2702f9c | [
"MIT"
] | null | null | null | usfm_references/__init__.py | anthonyraj/usfm-references | e0e2cc804545b029df55f9780c1361a8a2702f9c | [
"MIT"
] | null | null | null | """
USFM References Tools
"""
import re
__version__ = '1.1.0'
ANY_REF = re.compile(r'^[1-9A-Z]{3}\.([0-9]{1,3}(_[0-9]+)?(\.[0-9]{1,3})?|INTRO\d+)$')
CHAPTER = re.compile(r'^[1-6A-Z]{3}\.[0-9]{1,3}(_[0-9]+)?$')
CHAPTER_OR_INTRO = re.compile(r'^[1-9A-Z]{3}\.([0-9]{1,3}(_[0-9]+)?|INTRO\d+)$')
SINGLE_CHAPTER_OR_VERSE = re.compile(r'^([A-Za-z]{3})\.([1-9]+\.{0,1}[1-9]*)$')
VERSE = re.compile(r'^[1-6A-Z]{3}\.[0-9]{1,3}(_[0-9]+)?\.[0-9]{1,3}$')
BOOKS = [
'GEN', 'EXO', 'LEV', 'NUM', 'DEU', 'JOS', 'JDG', 'RUT', '1SA', '2SA', '1KI', '2KI', '1CH',
'2CH', 'EZR', 'NEH', 'EST', 'JOB', 'PSA', 'PRO', 'ECC', 'SNG', 'ISA', 'JER', 'LAM', 'EZK',
'DAN', 'HOS', 'JOL', 'AMO', 'OBA', 'JON', 'MIC', 'NAM', 'HAB', 'ZEP', 'HAG', 'ZEC', 'MAL',
'MAT', 'MRK', 'LUK', 'JHN', 'ACT', 'ROM', '1CO', '2CO', 'GAL', 'EPH', 'PHP', 'COL', '1TH',
'2TH', '1TI', '2TI', 'TIT', 'PHM', 'HEB', 'JAS', '1PE', '2PE', '1JN', '2JN', '3JN', 'JUD',
'REV', 'TOB', 'JDT', 'ESG', 'WIS', 'SIR', 'BAR', 'LJE', 'S3Y', 'SUS', 'BEL', '1MA', '2MA',
'3MA', '4MA', '1ES', '2ES', 'MAN', 'PS2', 'ODA', 'PSS', 'EZA', '5EZ', '6EZ', 'DAG', 'PS3',
'2BA', 'LBA', '2MQ', '3MQ', 'REP', '4BA', 'LAO', 'LKA'
]
def valid_chapter(ref):
"""
Succeeds if the given string is a validly structured USFM Bible chapter reference.
A valid, capitalized (English) book abbreviation,
followed by a period (.) and a (chapter) number of any length,
optionally followed by an underscore (_) and a (sub-chapter?) number of any length.
"""
return bool(re.match(CHAPTER, ref) and ref.split('.')[0] in BOOKS)
def valid_chapter_or_intro(ref):
"""
Succeeds if the given string is a validly structured USFM Bible chapter reference or and INTRO.
A valid, capitalized (English) book abbreviation,
followed by a period (.) and a (chapter) number of any length,
optionally followed by an underscore (_) and a (sub-chapter?) number of any length.
OR
followed by a period (.) and INTRO, followed by a number
"""
return bool(CHAPTER_OR_INTRO.match(ref)) and ref.split('.')[0] in BOOKS
def valid_usfm(ref):
"""
Succeeds if the given string is a validly structured USFM Bible reference.
A valid, capitalized (English) book abbreviation,
followed by a period (.) and a (chapter) number of any length,
optionally followed by an underscore (_) and a (sub-chapter?) number of any length,
optionally followed by a period (.) and a (verse) number of any length.
"""
return bool(ANY_REF.match(ref)) and ref.split('.')[0] in BOOKS
def valid_verse(ref):
"""
Succeeds if the given string is a validly structured USFM Bible verse reference.
A valid, capitalized (English) book abbreviation,
followed by a period (.) and a (chapter) number of any length,
optionally followed by an underscore (_) and a (sub-chapter?) number of any length,
optionally followed by a period (.) and a (verse) number of any length.
"""
return bool(re.match(VERSE, ref) and ref.split('.')[0] in BOOKS)
def valid_multi_usfm(ref, delimiter='+'):
"""
Succeeds if the given string is a validly structured set of UFM Bible references.
A valid, capitalized (English) book abbreviation,
followed by a period (.) and a (chapter) number of any length,
optionally followed by an underscore (_) and a (sub-chapter?) number of any length,
optionally followed by a period (.) and a (verse) number of any length.
Multiple verses are seperated by a plus (+)
Example Multi USFM ref (James1:1-5): JAS.1.1+JAS.1.2+JAS.1.3+JAS.1.4+JAS.1.5
Another Example with COMMA delimiter: JAS.1.1,JAS.1.2,JAS.1.3,JAS.1.4,JAS.1.5
"""
if any([not valid_usfm(usfm) for usfm in ref.split(delimiter)]):
return False
return True
| 46.337349 | 99 | 0.606084 | 600 | 3,846 | 3.833333 | 0.296667 | 0.065217 | 0.062174 | 0.096087 | 0.675217 | 0.666522 | 0.666522 | 0.666522 | 0.656087 | 0.61087 | 0 | 0.036665 | 0.198648 | 3,846 | 82 | 100 | 46.902439 | 0.709604 | 0.50806 | 0 | 0 | 0 | 0.172414 | 0.311008 | 0.132207 | 0 | 0 | 0 | 0 | 0 | 1 | 0.172414 | false | 0 | 0.034483 | 0 | 0.413793 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.