text stringlengths 0 1.25M | meta stringlengths 47 1.89k |
|---|---|
"""Design Matrix Constructor Functions
This file contains the classes
This file can also be imported as a module and contains the following
classes:
* Equation
"""
# TODO: Consider making a param maps super class and have param map be a
# subclass that returns a Equation class when they're added together.
# Al... | {"hexsha": "5f190b92e69ae34d2aa5fbed8dc3589bbd15e4ca", "size": 9703, "ext": "py", "lang": "Python", "max_stars_repo_path": "lininvbox/lininvbox/equation.py", "max_stars_repo_name": "uofuseismo/YPMLRecalibration", "max_stars_repo_head_hexsha": "18a4231eb12775cf808d83d38a11cc02664b3e35", "max_stars_repo_licenses": ["MIT"... |
#
# This file is part of the ErlotinibGefitinib repository
# (https://github.com/DavAug/ErlotinibGefitinib/) which is released under the
# BSD 3-clause license. See accompanying LICENSE.md for copyright notice and
# full license details.
#
import unittest
import numpy as np
import pints
import pints.toy
import pkpd
... | {"hexsha": "fa5422f73469b3e6fbd2c6f772e209aa686a28b7", "size": 3036, "ext": "py", "lang": "Python", "max_stars_repo_path": "pkpd/tests/test_optimisation.py", "max_stars_repo_name": "DavAug/ErlotinibGefinitib", "max_stars_repo_head_hexsha": "f0f2a3918dfaeb360bd5c27e8502d070dbe87160", "max_stars_repo_licenses": ["BSD-3-C... |
#
#
#
#
import numpy
import Image
import struct
import argparse
#
parser = argparse.ArgumentParser()
parser.add_argument('src')
parser.add_argument('dst')
args = parser.parse_args()
#
def izdvoji_piksele(im, dst):
#
h = im.shape[0]
w = im.shape[1]
#
f = open(dst, 'wb')
tmp = [None]*w*h
for y in range(0, h):... | {"hexsha": "8f090ec97691f81ffdb184b3267370ab38ddef57", "size": 601, "ext": "py", "lang": "Python", "max_stars_repo_path": "prikazipiksele/izdvojipiksele.py", "max_stars_repo_name": "dariodsa/Information-Theory", "max_stars_repo_head_hexsha": "8102af59be9258159d480d3a079cb3d8938154f3", "max_stars_repo_licenses": ["MIT"]... |
function outn=gt(x,y)
precAndSize
for ii=1:max(ex,ey)
imag=false;
if ex==1
[xrval,xival]=getVals(x,1);
[yrval,yival]=getVals(y,ii);
elseif ey==1
[xrval,xival]=getVals(x,ii);
[yrval,yival]=getVals(y,1);
else
[xrval,xival]=getVals(x,ii);
[yrval,yival]=getVals(y,ii);
end
outn(ii)=mpfr_gt(precision,xr... | {"author": "opencobra", "repo": "cobratoolbox", "sha": "e60274d127f65d518535fd0814d20c53dc530f73", "save_path": "github-repos/MATLAB/opencobra-cobratoolbox", "path": "github-repos/MATLAB/opencobra-cobratoolbox/cobratoolbox-e60274d127f65d518535fd0814d20c53dc530f73/external/analysis/mptoolbox/@mp/gt.m"} |
"""
This code is for the visualization of the spike number generated by every neuron.
The spike numbers are loaded from the 'iteration_x' folder.
"""
"""
on 25th May
by xiaoquinNUDT
version 0.0
"""
"""
test: no
"""
"""
optimazation record:
"""
##------------------------------------------------------------------------... | {"hexsha": "c93005a933de2c0d1bd76e191c3fafbf55f06945", "size": 1601, "ext": "py", "lang": "Python", "max_stars_repo_path": "SNN simulation/evaluation/spike_counter_visualization.py", "max_stars_repo_name": "XiaoquinNUDT/Three-SNN-learning-algorithms-in-Brian2", "max_stars_repo_head_hexsha": "b7a5b0aba03172cdc04e738f02a... |
# Autogenerated wrapper script for PAPI_jll for armv7l-linux-musleabihf
export libpapi
JLLWrappers.@generate_wrapper_header("PAPI")
JLLWrappers.@declare_library_product(libpapi, "libpapi.so.6.0")
function __init__()
JLLWrappers.@generate_init_header()
JLLWrappers.@init_library_product(
libpapi,
... | {"hexsha": "f95f61ea33f9493160c829032a10304d0b18b19b", "size": 439, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/wrappers/armv7l-linux-musleabihf.jl", "max_stars_repo_name": "JuliaBinaryWrappers/PAPI_jll.jl", "max_stars_repo_head_hexsha": "810d606290f6669648c0051042b4dd66930ec217", "max_stars_repo_licenses... |
"""Compute class probabilities for raw IFCB data"""
import shutil
from collections import namedtuple
from configparser import ConfigParser
from pathlib import Path
import numpy as np
import torch
from torch.nn import functional as F
from torch.utils.data import DataLoader
from tqdm import tqdm
from sykepic.train.con... | {"hexsha": "e4b3799f560ce5541083bc600d4b3da070ae70d4", "size": 4825, "ext": "py", "lang": "Python", "max_stars_repo_path": "sykepic/compute/probability.py", "max_stars_repo_name": "veot/syke-pic", "max_stars_repo_head_hexsha": "c2bbf5f87b64348122fb7014ab4e19294ee90009", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
import numpy as np
from PyQt5.QtWidgets import (QAction, QLabel, QWidget, QVBoxLayout, QHBoxLayout, QMenu, QPushButton, QGridLayout, QApplication)
from PyQt5.QtCore import Qt
from PyQt5.QtGui import (QImage, QPixmap)
from PyQt5.QtTest import QTest
import utils
import time
class Application(QWidget):
def __init_... | {"hexsha": "86759170186dc98885cb5ba51042173eda516d4e", "size": 12498, "ext": "py", "lang": "Python", "max_stars_repo_path": "deocc_app.py", "max_stars_repo_name": "XiaohangZhan/deocclusion-demo", "max_stars_repo_head_hexsha": "c8da2e914a8106599fbd998af7f052117091fe97", "max_stars_repo_licenses": ["MIT"], "max_stars_cou... |
import matplotlib.pyplot as plt
import numpy as np
__all__ = [
'ImgWatcher', 'SImgWatcher'
]
try:
from IPython import display
except ImportError:
display = None
class ImgWatcher(object):
def __init__(self,
n_rows=3, img_size=(128, 128), cmap1=plt.cm.gray_r, cmap2=plt.cm.gray_r, fig_size=3,
... | {"hexsha": "0390d074ab92039e7c94aa4f970f5ee7af468623", "size": 2714, "ext": "py", "lang": "Python", "max_stars_repo_path": "craynn/viz/img_watcher.py", "max_stars_repo_name": "maxim-borisyak/craynn", "max_stars_repo_head_hexsha": "fceabd33f5969033fb3605f894778c42c42f3e08", "max_stars_repo_licenses": ["MIT"], "max_stars... |
"""
Routines for working with rotation matrices
"""
"""
comment
author : Thomas Haslwanter
date : April-2018
"""
import numpy as np
import sympy
from collections import namedtuple
# The following construct is required since I want to run the module as a script
# inside the skinematics-directory
import os
import ... | {"hexsha": "97c0ac741fae0e768e4ec47d68440dce2b4e7105", "size": 19311, "ext": "py", "lang": "Python", "max_stars_repo_path": "skinematics/rotmat.py", "max_stars_repo_name": "stes/scikit-kinematics", "max_stars_repo_head_hexsha": "1a4d7212c8fff93428cb1d56ac6d77faa32e6bc5", "max_stars_repo_licenses": ["BSD-3-Clause"], "ma... |
\section*{Abkürzungsverzeichnis}
\begin{acronym}[HTTP]
\acro{ACM}{Association for Computing Machinery}
\acro{API}{Application Programming Interface}
\acro{AV}{Autonomous Vehicles}
\acro{A}{Artikel}
\acro{B}{Blockchain}
\acro{CEA}{Cognitive Expert Advisors}
\acro{CH}{Connected Home}
\acro{CSV}{Comma-Seperated Va... | {"hexsha": "67c59da1736f530f3173908dced8f7a5ece3ec4b", "size": 1011, "ext": "tex", "lang": "TeX", "max_stars_repo_path": "Thesis/main/y_acronyms.tex", "max_stars_repo_name": "sako3334/fom", "max_stars_repo_head_hexsha": "5138a3ad67137bfd67927858f729b6bf1728eb0d", "max_stars_repo_licenses": ["MIT"], "max_stars_count": n... |
module Ch01.Broadcast where
import Numeric.LinearAlgebra
as = matrix 2 [1,2,3,4]
bs = matrix 2 [10,20]
r = as * bs
xs :: Matrix Int
xs = (3><2) [51,55,14,19,0,4]
| {"hexsha": "95c4f5e9a5b7236964ce12effc01990d87217268", "size": 166, "ext": "hs", "lang": "Haskell", "max_stars_repo_path": "src/Ch01/Bloadcast.hs", "max_stars_repo_name": "knih/deep-learning-from-scratch-in-haskell", "max_stars_repo_head_hexsha": "1a92a98a958744da2a04d534319b599d25225dd4", "max_stars_repo_licenses": ["... |
# Eric S. Tellez <eric.tellez@infotec.mx>
#Pkg.add("GZip")
#Pkg.add("Glob")
#Pkg.add("JSON")
using ArgParse
import JSON
import Glob
include("textmodel.jl")
include("io.jl")
function delete(fun, input, keys)
itertweets(input) do tweet
for key in keys
delete!(tweet, key)
end
f... | {"hexsha": "b97f16121a8c1fc7644a9ef717c64efea6d1a170", "size": 1272, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "jsonclean.jl", "max_stars_repo_name": "INGEOTEC/PatternAnalysisTools", "max_stars_repo_head_hexsha": "cae1a9a0c4ee84efea13d095c2eca0f0f56fe09c", "max_stars_repo_licenses": ["Apache-2.0"], "max_star... |
import os
import sys
import numpy as np
from bokeh.io import curdoc
from bokeh.models import ColumnDataSource, Span, Label, Slider
from bokeh.models.widgets import Div
from bokeh.models.glyphs import Circle
from bokeh.plotting import figure
from bokeh.layouts import row, column, widgetbox
BASE_DIR = os.path.dirname(
... | {"hexsha": "708a8ca052342edd172f09f97f70d8da0d8a0bec", "size": 6147, "ext": "py", "lang": "Python", "max_stars_repo_path": "squash/dashboard/viz/AMx/main.py", "max_stars_repo_name": "jhoblitt/qa-dashboard", "max_stars_repo_head_hexsha": "480c7a1084097354dc15c0190870beb2b2065b1c", "max_stars_repo_licenses": ["MIT"], "ma... |
#' mph2ms
#'
#' Conversion miles per hour in meters per second.
#'
#' @param mph numeric Speed miles per hour.
#' @return meters per second
#'
#'
#' @author Istituto per la Bioeconomia CNR Firenze Italy Alfonso Crisci \email{alfonso.crisci@@ibe.cnr.it}
#' @keywords mph2ms
#'
#' @export
#'
#'
#'
#'
mph2ms=function... | {"hexsha": "da6abdc998b510e544905af8840f74403b959eb2", "size": 605, "ext": "r", "lang": "R", "max_stars_repo_path": "R/mph2ms.r", "max_stars_repo_name": "alfcrisci/rBiometeo", "max_stars_repo_head_hexsha": "1fe0113d017372393de2ced18b884f356c76049b", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 2, "max_stars_r... |
#!/usr/bin/env python
#
# Copyright (c) 2017 Yunhai Luo
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, ... | {"hexsha": "80f92112806dd65010d535d06ce380833cc818bd", "size": 28964, "ext": "py", "lang": "Python", "max_stars_repo_path": "guidescount.py", "max_stars_repo_name": "yunhailuo/CRISPR_scr", "max_stars_repo_head_hexsha": "7049a650c8f4fcb00814a8d13d85f08fd9254fec", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 1,... |
#redirect Joseph Harney
| {"hexsha": "628b54e5102014eab9a3c5bbbe7c2b20e36c068e", "size": 24, "ext": "f", "lang": "FORTRAN", "max_stars_repo_path": "lab/davisWiki/Joe_Harney.f", "max_stars_repo_name": "voflo/Search", "max_stars_repo_head_hexsha": "55088b2fe6a9d6c90590f090542e0c0e3c188c7d", "max_stars_repo_licenses": ["MIT"], "max_stars_count": n... |
#include <iostream>
#include <boost/lexical_cast.hpp>
#include <boost/fusion/adapted.hpp>
#define BOOST_LOG_DYN_LINK 1
#include <boost/log/trivial.hpp>
#include <boost/log/utility/setup.hpp>
#include <restc-cpp/restc-cpp.h>
#include <restc-cpp/RequestBuilder.h>
using namespace std;
using namespace restc_cpp;
namesp... | {"hexsha": "8e3a1d40115cad5c6b862e96705b60022235d0c9", "size": 1913, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "tests/client-test.cpp", "max_stars_repo_name": "JugglerRick/studiolights", "max_stars_repo_head_hexsha": "5bbd3f6f715338d59942b24c4cda24ca4fdb3d71", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
# ctsb init file
import os
import sys
import warnings
import ctsb
from ctsb import error
from ctsb.problems import Problem, problem, problem_registry, problem_spec
from ctsb.models import Model, model, CustomModel, model_registry, model_spec
from ctsb.help import help
from ctsb.utils import tests, set_key
# initiali... | {"hexsha": "833cc4877508201b165664369bb0bd2a86855f4b", "size": 592, "ext": "py", "lang": "Python", "max_stars_repo_path": "ctsb/__init__.py", "max_stars_repo_name": "paula-gradu/ctsb", "max_stars_repo_head_hexsha": "fdc00acb798949ce1120778ad4725faf170f80c3", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_count":... |
# -*- coding: utf-8 -*-
"""
decode: decodes binary logged sensor data from crazyflie2 with uSD-Card-Deck
createConfig: create config file which has to placed on µSD-Card
@author: jsschell
"""
from zlib import crc32
import struct
import numpy as np
import os
# lookup dictionary to determine size of data type... | {"hexsha": "e7aad900312336842cc48d76228c00176c7d1202", "size": 4152, "ext": "py", "lang": "Python", "max_stars_repo_path": "crazyflie-firmware/tools/usdlog/CF_functions.py", "max_stars_repo_name": "chengque/iswarm", "max_stars_repo_head_hexsha": "098b8670481cbecd2fe4088377c96b7eae729357", "max_stars_repo_licenses": ["M... |
import faiss #faiss
import tensorflow as tf
import numpy as np
import pandas as pd
from voctor.dataset import convert_text_to_feature
from voctor.models import MedicalQAModelwithBert
from voctor.tokenization import FullTokenizer
from tf_bert.loader import checkpoint_loader
from collections import defaultdict
from multi... | {"hexsha": "cad8c7aca215cd41099ab04f3dd89d4be72af520", "size": 11674, "ext": "py", "lang": "Python", "max_stars_repo_path": "ML/Voctor/voctor/predictor.py", "max_stars_repo_name": "Technocrats-nitw/Care", "max_stars_repo_head_hexsha": "8649e874340339b9ada089702343919fe557c26e", "max_stars_repo_licenses": ["CC0-1.0"], "... |
# Copyright (c) 2021, The Board of Trustees of the Leland Stanford Junior University
"""Test functions for complex.py."""
import numpy as np
import torch
from util import complex
class TestComplex:
"""A test cllass for complex.py."""
def setup(self):
"""Set up input arrays."""
n = 3
... | {"hexsha": "e3017ef08c8b8debaaefb2b32dadcf451e775cdf", "size": 2063, "ext": "py", "lang": "Python", "max_stars_repo_path": "test/test_complex.py", "max_stars_repo_name": "computational-imaging/multishot-localization-microscopy", "max_stars_repo_head_hexsha": "573fb35d376a13ce6c91b40833189cab3b5dc630", "max_stars_repo_l... |
push!(LOAD_PATH, "./src")
using DelimitedFiles
using WriteVTK
using CommonUtils
using Basis1D
using Basis2DQuad
using UniformQuadMesh
using UnPack
using SetupDG
using SparseArrays
N = 2
K1D = 400
CFL = 0.9
Np = (N+1)*(N+1)
"Mesh related variables"
VX, VY, EToV = uniform_quad_mesh(2*K1D,K1D)
@. VX = (VX+1)/2
@. VY ... | {"hexsha": "13856b78d87cc4f504c30db04ac364b52094c974", "size": 3284, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "output_to_vtk_shocktube2.jl", "max_stars_repo_name": "yiminllin/ESDG-PosLimit", "max_stars_repo_head_hexsha": "f135a9b16fd3d818ac8b71711bb7f8300035d4b1", "max_stars_repo_licenses": ["MIT"], "max_st... |
# NLP written by GAMS Convert at 04/21/18 13:54:00
#
# Equation counts
# Total E G L N X C B
# 36 1 8 27 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc ... | {"hexsha": "2b9d188234eab995b7607f68dcffcfb78c6fb271", "size": 3945, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/examples/minlplib/prob07.py", "max_stars_repo_name": "ouyang-w-19/decogo", "max_stars_repo_head_hexsha": "52546480e49776251d4d27856e18a46f40c824a1", "max_stars_repo_licenses": ["MIT"], "max_... |
!> @brief allocate derivative objects (used for AD)
!> @param[in] ismpl local sample index
SUBROUTINE alloc_arrays_ad(ismpl)
use arrays
USE arrays_ad
USE mod_genrl
USE mod_data
USE mod_time
USE mod_conc
IMPLICIT NONE
integer :: ismpl
IN... | {"hexsha": "9b83d2cf319bb62ccbac4a2332e83493343219a2", "size": 2707, "ext": "f90", "lang": "FORTRAN", "max_stars_repo_path": "inverse/ad_tap/alloc_arrays_ad.f90", "max_stars_repo_name": "arielthomas1/SHEMAT-Suite-Open", "max_stars_repo_head_hexsha": "f46bd3f8a9a24faea9fc7e48ea9ea88438e20d78", "max_stars_repo_licenses":... |
import numpy as np
import pandas as pd
class EconomicDataModel:
def __init__(self, data):
self.data = {}
for year in data.time.unique():
self.data[year] = EconomicDataModel.make_dataframe(data, year)
def rca(self):
if not hasattr(self, 'rca_data'):
self.rca_da... | {"hexsha": "5216595d546b1e6123134350a57ecc2ecf1aaea4", "size": 5346, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/models/ecomplexity_model.py", "max_stars_repo_name": "hydrophis-spiralis/regional_economics_complexity", "max_stars_repo_head_hexsha": "c507c7307c068dd3e1f6b846b5c25641a5dd507b", "max_stars_re... |
import dynet as dy
import numpy as np
import time
def run(filename):
startTime = time.time()
# read word embedding
word_embedding_size = 300
word_embedding_file = "small_glove.txt"
word_embedding = []
with open(word_embedding_file, 'r') as f:
for (counter, line) in enumerate(f):
if counter =... | {"hexsha": "7c3f4332a17aa8c482f536d2699c5bd4dfb8e54c", "size": 5716, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/out/NIPS18evaluation/evaluationTreeLSTM/Dynet/treernnDynet.py", "max_stars_repo_name": "supunab/Lantern", "max_stars_repo_head_hexsha": "932a031816617d71c46653f3b2245129a6a8a7c8", "max_stars_r... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu May 30 08:21:48 2019
@author: paco
"""
from marketsimulator.orderbook import Orderbook
from marketsimulator.gateway import Gateway
import numpy as np
import matplotlib.pyplot as plt
import time
from datetime import datetime
import pandas as pd
import ... | {"hexsha": "592e634cb9dcf5509bbced35369afdb204a6ef28", "size": 1026, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/performance.py", "max_stars_repo_name": "Surbeivol/PythonMatchingEngine", "max_stars_repo_head_hexsha": "f94150294a85d7b415ca4518590b5a661d6f9958", "max_stars_repo_licenses": ["MIT"], "max_s... |
# raster
# holoviews
import numpy as np
import holoviews as hv
hv.extension('bokeh')
xvals = np.linspace(0,4,202)
ys,xs = np.meshgrid(xvals, -xvals[::-1])
hv.Raster(np.sin(((ys)**3)*xs))
| {"hexsha": "7c1dc0c2885c7b791bc5729d4e6b5db8cbe83338", "size": 189, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/rasters/raster.py", "max_stars_repo_name": "Ellon-M/visualizations", "max_stars_repo_head_hexsha": "5a42c213ea8fd0597e2035778d9ae6460eb9e821", "max_stars_repo_licenses": ["MIT"], "max_stars_cou... |
include("../src/simulation.jl")
using .TennisRanking
# Test Run for Simulation
simulation(
player_count = 700,
seasons = 1,
number_of_replications = 1,
burn_in_seasons = 1,
ranking_schemes = Vector{String}(["ATP"]),
metrics = Vector{String}(["Spearman Correlation"]),
verbose = true,
st... | {"hexsha": "94277f96c4d0a7a597f908119484a7887b944790", "size": 342, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "example/run.jl", "max_stars_repo_name": "Danial-Hussain/Tennis-Ranking", "max_stars_repo_head_hexsha": "0cc4f4e27158c0690a9d70b4bdb1ec621e11d212", "max_stars_repo_licenses": ["MIT"], "max_stars_coun... |
# To minimize poverty, should UBI be provided for adults, children, or both?
*By Nate Golden, 2020-07-07*
While [US GDP per capita has more than doubled in the past 50 years](https://fred.stlouisfed.org/series/A939RX0Q048SBEA), many Americans still remain in poverty. According to the Census Bureau's 2018 Supplemental... | {"hexsha": "6e71d34dd7df0b96d1f988c419b186f24a4b6e56", "size": 13120, "ext": "py", "lang": "Python", "max_stars_repo_path": "jb/_build/jupyter_execute/20200707/adult_child_ubi.py", "max_stars_repo_name": "ngpsu22/blog", "max_stars_repo_head_hexsha": "7bf69f3954d26759e47d09925ea0586acc3f923b", "max_stars_repo_licenses":... |
# Not transforming Data into supervised learning paradigm
# --> keeping the time series format
# Firstly not doing an oop version -> TODO: will reformat to oop later on for automation and management
import os
from pathlib import Path
import numpy as np
from pandas import read_csv
from sklearn.preprocessing import MinM... | {"hexsha": "2690cf4416f72d6d2be19f4787538cd4ee02a165", "size": 8512, "ext": "py", "lang": "Python", "max_stars_repo_path": "Model/Multivariate_LSTM.py", "max_stars_repo_name": "kimdanny/Quant", "max_stars_repo_head_hexsha": "69d66f98bd2882f7fc54d3408aa1f943d487bcb9", "max_stars_repo_licenses": ["MIT"], "max_stars_count... |
"""
Tutorial 2: Customizing and aligning gradients
=================================================
In this tutorial you’ll learn about the methods available within the
GradientMaps class. The flexible usage of this class allows for the
customization of gradient computation with different kernels and dimensionality
re... | {"hexsha": "c2db73b7d4e4b41c677156ccdccdf3648a2aec5f", "size": 10005, "ext": "py", "lang": "Python", "max_stars_repo_path": "docs/python_doc/auto_examples/plot_tutorial2.py", "max_stars_repo_name": "vinferrer/BrainSpace", "max_stars_repo_head_hexsha": "349d47cb0649a74c3e5f8c7714f246504a2cb01a", "max_stars_repo_licenses... |
[STATEMENT]
lemma omit_redundant_points:
assumes "point p"
shows "p \<sqinter> x\<^sup>\<star> = (p \<sqinter> 1) \<squnion> (p \<sqinter> x) * (-p \<sqinter> x)\<^sup>\<star>"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. p \<sqinter> x\<^sup>\<star> = p \<sqinter> (1::'a) \<squnion> (p \<sqinter> x) * (- p \<... | {"llama_tokens": 8646, "file": "Relational_Disjoint_Set_Forests_Disjoint_Set_Forests", "length": 45} |
#helper fns
import pickle
import pdb
import numpy as np
from screen_conf import *
def round_tup(t, dec=5):
#decimal = 5
return tuple(map(lambda x: isinstance(x, float) and round(x, dec) or x, t))
def dump_dict(save_name, items, item_name, savepath=None):
# THIS FN DUMPS ALL THE RECORDED DATA
param... | {"hexsha": "60d1cd455844522a9014fdd6a29ff8493b2fc4c4", "size": 3745, "ext": "py", "lang": "Python", "max_stars_repo_path": "helper_fn.py", "max_stars_repo_name": "kenkyusha/eyeGazeToScreen", "max_stars_repo_head_hexsha": "7fc5abb2e51eb52f05e786221deac0a6d3a49643", "max_stars_repo_licenses": ["MIT"], "max_stars_count": ... |
function horner(coeff, x0)
degree = length(coeff) - 1
y = coeff[1]
z = 0
for j ∈ 2:(degree + 1)
println(coeff[j])
z = y + x0 * z
y = x0 * y + coeff[j]
end
return (y, z)
end
println(horner([5, 2, 1], 5)[1])
| {"hexsha": "0a65558ac1cbb4e611a1b0f8b5ece42ce1d20679", "size": 219, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "chapter2/horner.jl", "max_stars_repo_name": "Matt8898/julia-numerical", "max_stars_repo_head_hexsha": "ad9ec5ab109aaacbdce9c3ec88adc5446f65419e", "max_stars_repo_licenses": ["Unlicense"], "max_stars... |
#
# Helper module for working with command line arguments.
# It understands arguments separated by space(s) symbol.
# Argument itself may consist of name and value. e.g.:
# "file:test.txt" or "arg=value" or "param/val". You may
# use different separators between name and the value.
# See ARG_VAL_SEPARATORS constant for... | {"hexsha": "01bbe7e1af00d8fbfe0cd83cc253667f3130bcad", "size": 3181, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/util/CommandLine.jl", "max_stars_repo_name": "tmptrash/jevo", "max_stars_repo_head_hexsha": "6d862fac11efd939666d3ac59aa8c7d62843458e", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 48... |
module helloworld where
open import IO
main = run (putStrLn "Hello, World!")
| {"hexsha": "fff21fdc0caa7402a852794895e111c69a1953f7", "size": 81, "ext": "agda", "lang": "Agda", "max_stars_repo_path": "archive/a/agda/HelloWorld.agda", "max_stars_repo_name": "Ayush7-BIT/sample-programs", "max_stars_repo_head_hexsha": "827d8961d3a548daf8fe3b674642a1562daaa5c4", "max_stars_repo_licenses": ["MIT"], "m... |
------------------------------------------------------------------------------
-- The gcd program is correct
------------------------------------------------------------------------------
{-# OPTIONS --exact-split #-}
{-# OPTIONS --no-sized-types #-}
{-# OPTIONS --no-universe-polymorphism #-}
{-... | {"hexsha": "110bec4a351baac0a38d018e55018f1824f5e6f1", "size": 1527, "ext": "agda", "lang": "Agda", "max_stars_repo_path": "src/fot/FOTC/Program/GCD/Partial/CorrectnessProofATP.agda", "max_stars_repo_name": "asr/fotc", "max_stars_repo_head_hexsha": "2fc9f2b81052a2e0822669f02036c5750371b72d", "max_stars_repo_licenses": ... |
"""
Train a CP3D model.
"""
import os
import argparse
import json
import sys
import copy
import pickle
import torch
from torch.utils.data import DataLoader
from torch.optim import Adam
from nff.data import Dataset, split_train_validation_test, collate_dicts
from nff.data.loader import ImbalancedDatasetSampler
from nf... | {"hexsha": "6477c836518632f13c8cfab936db1a3e52d0e986", "size": 33905, "ext": "py", "lang": "Python", "max_stars_repo_path": "scripts/cp3d/train/train_single.py", "max_stars_repo_name": "jkaraguesian/NeuralForceField", "max_stars_repo_head_hexsha": "4ca4f4c7edc0ed1f70952db9e42d8ef9bbe109d8", "max_stars_repo_licenses": [... |
######### Jags batch program example ###########
using StatsPlots, Jags, Statistics
cd(ProjDir) do
dyes = "
model {
for (i in 1:BATCHES) {
for (j in 1:SAMPLES) {
y[i,j] ~ dnorm(mu[i], tau.within);
}
mu[i] ~ dnorm(theta, tau.between);
}
theta ~ dnorm(0.0, 1.0E-1... | {"hexsha": "8a6f1c23d29d98c37478c3dc39b22837c4689cf2", "size": 1551, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "Examples/Dyes/jdyes.jl", "max_stars_repo_name": "itsdfish/Jags.jl", "max_stars_repo_head_hexsha": "95a092d12c60eaeeb1d2e1be3c04c6e27b366775", "max_stars_repo_licenses": ["MIT"], "max_stars_count": ... |
/*==============================================================================
Copyright (c) 2001-2010 Joel de Guzman
Copyright (c) 2010 Eric Niebler
Copyright (c) 2015 John Fletcher
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http:... | {"hexsha": "28d8a8eead59557f77f296df258ec44aeeab1b43", "size": 5447, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "external/boost_1_60_0/qsboost/phoenix/function/function.hpp", "max_stars_repo_name": "wouterboomsma/quickstep", "max_stars_repo_head_hexsha": "a33447562eca1350c626883f21c68125bd9f776c", "max_stars_r... |
import os
import numpy as np
convert = {"0":"0", "1":"0", "2":"0", "3":"1"}
train_f = open("loveu_wide_train_2s_30fps_annotation_valid.txt")
val_f = open("loveu_wide_val_2s_30fps_annotation_valid.txt")
train_f_w = open("loveu_wide_train_2cls_2s_30fps_annotation_valid.txt", 'w')
val_f_w = open("loveu_wide_val_2cls... | {"hexsha": "754517cdd455aaee5bdf801adbe09782b9e51cb4", "size": 619, "ext": "py", "lang": "Python", "max_stars_repo_path": "data_preprocess/clsnum_convert.py", "max_stars_repo_name": "VisualAnalysisOfHumans/LOVEU_TRACK1_TOP3_SUBMISSION", "max_stars_repo_head_hexsha": "6f4d1c7e6883d6b0664fcd04265f437247afab54", "max_star... |
"""
Code adapted from the rendering code by Maxim Tatarchenko
Original version here: https://github.com/lmb-freiburg/ogn/blob/master/python/rendering/render_model.py
"""
import sys
import os.path
import argparse
import math
import numpy as np
import bmesh
import bpy
particle_materials = []
particle_prototypes = []
... | {"hexsha": "657aa1f2b73c47100a67ed31a63c2e5c2cf4db5f", "size": 8650, "ext": "py", "lang": "Python", "max_stars_repo_path": "2Dpm/render/render_point_cloud_blender.py", "max_stars_repo_name": "Sirish07/2D_projection_matching", "max_stars_repo_head_hexsha": "11c8ea81e3cbf5ecd3daba602cde0b7a9efcc15d", "max_stars_repo_lice... |
/-
Copyright (c) 2021 Yaël Dillies, Bhavik Mehta. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Yaël Dillies, Bhavik Mehta
-/
import combinatorics.simplicial_complex.pure
open_locale classical affine big_operators
open set
namespace affine
variables {m n : ℕ} {E : ... | {"author": "mmasdeu", "repo": "brouwerfixedpoint", "sha": "548270f79ecf12d7e20a256806ccb9fcf57b87e2", "save_path": "github-repos/lean/mmasdeu-brouwerfixedpoint", "path": "github-repos/lean/mmasdeu-brouwerfixedpoint/brouwerfixedpoint-548270f79ecf12d7e20a256806ccb9fcf57b87e2/src/combinatorics/simplicial_complex/closure.l... |
import numpy as np
import string
import pickle
import math
from wordsegment import load, segment
import emoji
import ark_tweet.CMUTweetTagger as ct
from nltk.stem import WordNetLemmatizer
from langdetect import detect
import random
import re
# %%
load() # loads word segment
wordnet_lemmatizer = WordNetLemmatizer()
ma... | {"hexsha": "b50f9a66c55e8a3d13d0e8c78406523db8c14510", "size": 21472, "ext": "py", "lang": "Python", "max_stars_repo_path": "Utils/preprocess.py", "max_stars_repo_name": "JRC1995/Tweet-Disaster-Keyphrase", "max_stars_repo_head_hexsha": "4fb28dd1e16068adef6cf83c8adf11cc75c3091f", "max_stars_repo_licenses": ["Apache-2.0"... |
#
# Implements the localization and detection metrics proposed in the paper
#
# Joint Measurement of Localization and Detection of Sound Events
# Annamaria Mesaros, Sharath Adavanne, Archontis Politis, Toni Heittola, Tuomas Virtanen
# WASPAA 2019
#
#
# This script has MIT license
#
import numpy as np
from IPython impo... | {"hexsha": "150a708b7bf996f65b98e13927439235cddc23c1", "size": 13559, "ext": "py", "lang": "Python", "max_stars_repo_path": "seld/methods/utils/SELD_evaluation_metrics_2020.py", "max_stars_repo_name": "marwanelshantaly/EIN-SELD", "max_stars_repo_head_hexsha": "2e794006630b65940216559b31f9c0048fe4d2da", "max_stars_repo_... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#... | {"hexsha": "f60c905ab7909d97fe59d248879205a5efa98a8f", "size": 2941, "ext": "py", "lang": "Python", "max_stars_repo_path": "python/gated_wav_record_f.py", "max_stars_repo_name": "jbkcrash/gated_wav_record", "max_stars_repo_head_hexsha": "ffe9e5f10a268775ea9d8d25fa9237de13ef0dff", "max_stars_repo_licenses": ["MIT"], "ma... |
% Jean-Baptiste Carré's paper for Purdue 2016
%
% This conference manuscript template is prepared for: Kim Stockment,
% Conference Coordinator, Ray W. Herrick Laboratories, Purdue
% University, West Lafayette, IN, USA.
% Latest revision = 2016-02-23
\documentclass[10pt]{extarticle}
\usepackage{amssymb,amsmath,multic... | {"hexsha": "86529f45ed1f91662a678cca8308c61b7d821e51", "size": 29378, "ext": "tex", "lang": "TeX", "max_stars_repo_path": "carre-purdue-2016.tex", "max_stars_repo_name": "speredenn/purdue-2016-conf-paper", "max_stars_repo_head_hexsha": "46f420865361274bb929ee862d78d73847ac1904", "max_stars_repo_licenses": ["CC-BY-4.0"]... |
from typing import Dict, List, Optional
import numpy as np
import gym
from gym import spaces
from gym.utils import seeding
import logging
import copy
logger = logging.getLogger(__name__)
from symbolic_behaviour_benchmark.symbolic_continuous_stimulus_dataset import SymbolicContinuousStimulusDataset
from symboli... | {"hexsha": "d76c5a661fb3a60515778f58e18367bea92c5397", "size": 21444, "ext": "py", "lang": "Python", "max_stars_repo_path": "symbolic_behaviour_benchmark/envs/symbolic_behaviour_benchmark_recall_test_env.py", "max_stars_repo_name": "Near32/SymbolicBehaviourBenchmark", "max_stars_repo_head_hexsha": "d1f9f14ed186292e2280... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from numpy import linspace, zeros, exp
import matplotlib.pyplot as plt
N_0 = int(input('Give initial population size N_0: '))
r = float(input('Give net growth rate r: '))
dt = float(input('Give time step size: '))
N_t = int(input('Give number of steps: '))
t = linspace... | {"hexsha": "4808c56594666455cac0a5a719d66b32fe32b83a", "size": 705, "ext": "py", "lang": "Python", "max_stars_repo_path": "Computations/growth1.py", "max_stars_repo_name": "Fernal73/LearnPython3", "max_stars_repo_head_hexsha": "5288017c0dbf95633b84f1e6324f00dec6982d36", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
# This file is a part of TypeDBClient. License is MIT: https://github.com/Humans-of-Julia/TypeDBClient.jl/blob/main/LICENSE
abstract type AbstractTypeDBStub end
mutable struct Core_TypeDBStub <: AbstractTypeDBStub
blockingStub::Proto.TypeDBBlockingStub
asyncStub::Proto.TypeDBStub
end
function Core_TypeDBStub... | {"hexsha": "155e74af330dc6e287fd058c61f91e43b96b596a", "size": 920, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/common/rpc/TypeDBStub.jl", "max_stars_repo_name": "mkschulze/GraknClient.jl", "max_stars_repo_head_hexsha": "1d5843545c84db386be2e834c07d22e08c44130e", "max_stars_repo_licenses": ["MIT"], "max_s... |
"""
getAmountOfTrades
calculates amount of trades from a given array of trades
"""
function getAmountOfTrades(arr::Array{Float64})::Real
amountOfTrades=length(arr)
return amountOfTrades
end
| {"hexsha": "4b5bec1dccc4b8a673a71d4ad56946bc3116f662", "size": 202, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "simpleStatistics.jl", "max_stars_repo_name": "RainerNeu/tradeStatistics", "max_stars_repo_head_hexsha": "76e879f365cac0f6b88bfbfc4d1b7cf4582237fd", "max_stars_repo_licenses": ["MIT"], "max_stars_cou... |
# -*- coding: utf-8 -*-
"""
Created on Mon May 16 10:42:18 2016
@author: hossam
"""
import math
import random
import time
import numpy
from solution import solution
def MFO(objf, lb, ub, dim, n, maxIteration):
# maxIteration = 1000
# lb = -100
# ub = 100
# dim = 30
n = 50 # Number of sear... | {"hexsha": "052bdbff65d9200227d64253b0a46f61e31f9b23", "size": 5595, "ext": "py", "lang": "Python", "max_stars_repo_path": "optimizers/MFO.py", "max_stars_repo_name": "Veltys/GWO", "max_stars_repo_head_hexsha": "9fcf4fe9a6d1c0c5603a0b1903cae57a7953ecc3", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_count": nul... |
"""Is matrix a permutation matrix."""
import numpy as np
def is_permutation(mat: np.ndarray) -> bool:
r"""
Determine if a matrix is a permutation matrix [WikiPermutation]_.
A matrix is a permutation matrix if each row and column has a
single element of 1 and all others are 0.
Examples... | {"hexsha": "50223566c0d915cfe320e05e11537f4386bc0840", "size": 1776, "ext": "py", "lang": "Python", "max_stars_repo_path": "toqito/matrix_props/is_permutation.py", "max_stars_repo_name": "paniash/toqito", "max_stars_repo_head_hexsha": "ab67c2a3fca77b3827be11d1e79531042ea62b82", "max_stars_repo_licenses": ["MIT"], "max_... |
import math
import numpy as np
from cellfinder.detect.filters.plane_filters.base_tile_filter import (
OutOfBrainTileFilter,
)
class TileWalker(object):
def __init__(self, img, soma_diameter):
self.img = img
self.thresholded_img = img.copy()
self.img_width, self.img_height = img.shape... | {"hexsha": "6250ee67a63b82f8cbd2217260d3cd0815c5b0f2", "size": 2196, "ext": "py", "lang": "Python", "max_stars_repo_path": "cellfinder/detect/filters/plane_filters/tile_walker.py", "max_stars_repo_name": "nickdelgrosso/cellfinder", "max_stars_repo_head_hexsha": "5577c08d7641f377a36b81c1cde5d6645bc783d3", "max_stars_rep... |
[STATEMENT]
lemma lsl_comp_closed_var [simp]: "\<nu> (\<nu> x \<cdot> \<nu> (y::'a::unital_quantale)) = \<nu> x \<cdot> \<nu> y"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<nu> (\<nu> x \<cdot> \<nu> y) = \<nu> x \<cdot> \<nu> y
[PROOF STEP]
by (metis Fix_lsl_iff lsl_def lsl_range_fix mult.assoc rangeI) | {"llama_tokens": 141, "file": "Quantales_Quantale_Left_Sided", "length": 1} |
import matplotlib.pyplot as pl
import numpy as np
from scipy import signal
import os
if __name__ == "__main__":
Z = np.fromfile("USRP_polyphase_filter_window.dat", dtype=np.complex64)
w, h = signal.freqz(Z, fs = 1e6, worN = int(1e5))
pl.plot(w,np.abs(h))
pl.figure()
pl.plot(Z.real, label = "real pa... | {"hexsha": "5ec3ad41b150413c6c4167acbbbf5f54a58bba45", "size": 396, "ext": "py", "lang": "Python", "max_stars_repo_path": "tools/read_filter_window.py", "max_stars_repo_name": "zjc263/GPU_SDR", "max_stars_repo_head_hexsha": "a57a29925b915c6eb995eb3c76c4ec34f96c3a0b", "max_stars_repo_licenses": ["Apache-2.0"], "max_star... |
using SIAN
println("Setting up the problem")
ode = @ODEmodel(
S'(t) = -b * S(t) * In(t) / N(t),
E'(t) = b * S(t) * In(t) / N(t) - nu * E(t),
In'(t) = nu * E(t) - a * In(t),
N'(t) = 0,
Cu'(t) = nu * E(t),
y1(t) = Cu(t),
y2(t) = N(t)
)
res = identifiability_ode(ode, get_parameters(ode); p=0.99,... | {"hexsha": "a75af362dce66b9144b087e7a193d9f6fa8b1f69", "size": 354, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "examples/SEIR2.jl", "max_stars_repo_name": "iliailmer/SIAN-Julia", "max_stars_repo_head_hexsha": "b7e88d70d7495770d9669a9f340a1ddf30536b99", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 17... |
import glob
import logging
import os
from pprint import pprint
import coloredlogs
from utoolbox.data import SPIMDataset
from events import SpatialGraph
from utils import find_dataset_dir
logging.getLogger("pandas").setLevel(logging.ERROR)
logging.getLogger("tifffile").setLevel(logging.ERROR)
coloredlogs.install(
... | {"hexsha": "ab8cbe33a6d53a7db2d97d914044baa4d64470d8", "size": 1682, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/pipeline/fusion_fission.py", "max_stars_repo_name": "liuyenting/mito-analysis", "max_stars_repo_head_hexsha": "34d58d34a9b6a47d0c1c7dfff12edf58e25fd8e3", "max_stars_repo_licenses": ["Apache-2.... |
import argparse
import numpy as np
import torch
from utils import get_dataset, get_net, get_strategy
from pprint import pprint
import os
parser = argparse.ArgumentParser()
parser.add_argument('--seed', type=int, default=1, help="random seed")
parser.add_argument('--n_init_labeled', type=int, default=10000, help="numbe... | {"hexsha": "31212a063bc9e124b3600150ebe4cb0c27dfc167", "size": 3681, "ext": "py", "lang": "Python", "max_stars_repo_path": "demo.py", "max_stars_repo_name": "k-kirihara-mame/deep-active-learning", "max_stars_repo_head_hexsha": "6b79017bfcc8236dd737ae329bcc6da4674c8cfd", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
'''
CVE Compare
Version 1.2
Functionality:
Scans software in Windows and compares against the
NIST Vulnerability Database (NVD) to identify present vulnerabilities.
Includes optional scan for Microsoft hotfixes and patches.
Identifies:
* Vendor Name
* Vulnerable Software
* Software Version
... | {"hexsha": "74cae29a3d51266cdbfdbdd600464576242cc13f", "size": 11970, "ext": "py", "lang": "Python", "max_stars_repo_path": "python/v1.2/cve_compare.py", "max_stars_repo_name": "Jean13/CVE_Compare", "max_stars_repo_head_hexsha": "5c302dd501556d806b15a716b28c1cb78e13510d", "max_stars_repo_licenses": ["Apache-2.0"], "max... |
!> @file
!! Include fortran file for memory remapping
!! @author
!! Copyright (C) 2012-2013 BigDFT group
!! This file is distributed under the terms of the
!! GNU General Public License, see ~/COPYING file
!! or http://www.gnu.org/copyleft/gpl.txt .
!! For the list of contributors, see ~/AUTHORS
intege... | {"hexsha": "be2eb76f6d5fc13eae990c2b983a21e39682d230", "size": 860, "ext": "f90", "lang": "FORTRAN", "max_stars_repo_path": "aiida_bigdft/futile/flib/f_map-inc.f90", "max_stars_repo_name": "adegomme/aiida-bigdft-plugin", "max_stars_repo_head_hexsha": "dfd17f166a8cd547d3e581c7c3c9f4eb32bd2aab", "max_stars_repo_licenses"... |
Set Warnings "-notation-overridden,-parsing,-deprecated-hint-without-locality".
From LF Require Export IndProp.
(* a playground module that helps me to recall the refined reflection *)
Module Silly.
Inductive reflect (P : Prop) : bool -> Set :=
| ReflectT : P -> reflect P true
| ReflectF : ~ P -> reflect P false... | {"author": "duinomaker", "repo": "LearningStuff", "sha": "73410047a0d9ee36e54580ee9d22460037b7459c", "save_path": "github-repos/coq/duinomaker-LearningStuff", "path": "github-repos/coq/duinomaker-LearningStuff/LearningStuff-73410047a0d9ee36e54580ee9d22460037b7459c/LogicalFoundations/ch9.v"} |
import numpy as np
import math
import cv2
def flow_stack_oversample(flow_stack, crop_dims):
"""
This function performs oversampling on flow stacks.
Adapted from pyCaffe's oversample function
:param flow_stack:
:param crop_dims:
:return:
"""
im_shape = np.array(flow_stack.shape[1:])
... | {"hexsha": "a2dd30f9dd981256bd931f6edd589b9babfe3b65", "size": 10846, "ext": "py", "lang": "Python", "max_stars_repo_path": "pyActionRecog/utils/io.py", "max_stars_repo_name": "kurenaifi/Flow-Guided2", "max_stars_repo_head_hexsha": "bb95a9d177ad101f5bd8e10e3177af1069d68362", "max_stars_repo_licenses": ["MIT"], "max_sta... |
"""
Generating Wild Bootstrap samples.
Author: Chao Huang (chaohuang.stat@gmail.com)
Last update: 2017-08-14
"""
import numpy as np
"""
installed all the libraries above
"""
def grs(efity_design, efit_eta, res_eta):
"""
Generating Wild Bootstrap samples.
Args:
efity_design (matrix): fitted res... | {"hexsha": "54aad394e93de97d8e106aa9be14426718d8afcf", "size": 950, "ext": "py", "lang": "Python", "max_stars_repo_path": "MFSDA/Resources/Libraries/stat_grs.py", "max_stars_repo_name": "bpaniagua/MFSDA_Python", "max_stars_repo_head_hexsha": "d7e439fe670d5e2731c9ec722919a74f67b01e30", "max_stars_repo_licenses": ["Apach... |
import numpy as np
import pandas as pd
class CellHppcData:
"""
Battery cell data from HPPC test.
"""
def __init__(self, path):
"""
Initialize with path to HPPC data file.
Parameters
----------
path : str
Path to HPPC data file.
Attributes
... | {"hexsha": "3b1f12fa709aca67a0b2a2231ba4d4bbed58a797", "size": 3592, "ext": "py", "lang": "Python", "max_stars_repo_path": "ecm/cell_hppc_data.py", "max_stars_repo_name": "sratgh/equiv-circ-model", "max_stars_repo_head_hexsha": "0c8c30814b819e893f49a810eae090a6dabe39e9", "max_stars_repo_licenses": ["MIT"], "max_stars_c... |
import numpy as np
class Kmeans(object):
def __init__(self, n_cluster=5, initCent='random', max_iter=300):
"""
:param n_cluster: 聚类的个数
:param initCent: 质心初始化
:param max_iter: 最大迭代次数
"""
if getattr(initCent, '__array__'):
n_cluster = initCent.shape[0]... | {"hexsha": "b59e64217ef0134b3a399d9acd80cbcfb37ef0bc", "size": 3360, "ext": "py", "lang": "Python", "max_stars_repo_path": "Tutorials/2_Models/KMeans/python/kmeans.py", "max_stars_repo_name": "chenghuiyu/MachineLearning-Tutorials", "max_stars_repo_head_hexsha": "7dd570f2fce57decd5722d163befe7407c194872", "max_stars_rep... |
from __future__ import unicode_literals
from collections import Counter
import numpy
from django.contrib.gis.geos import MultiPolygon, Polygon
from django.core.exceptions import ObjectDoesNotExist
from raster.algebra.parser import FormulaParser, RasterAlgebraParser
from raster.exceptions import RasterAggregationExce... | {"hexsha": "8510ed0f2310e6840d5b251c087a407c00d0227b", "size": 11695, "ext": "py", "lang": "Python", "max_stars_repo_path": "raster/valuecount.py", "max_stars_repo_name": "bpneumann/django-raster", "max_stars_repo_head_hexsha": "74daf9d396f2332a2cd83723b7330e6b10d73b1c", "max_stars_repo_licenses": ["BSD-3-Clause"], "ma... |
#ifndef ALTERNATIVE_ROUTING_LIB_TEST_TYPES_HPP
#define ALTERNATIVE_ROUTING_LIB_TEST_TYPES_HPP
#include <boost/graph/adjacency_list.hpp>
namespace arlib {
namespace test {
using Graph = boost::adjacency_list<boost::vecS, boost::vecS,
boost::bidirectionalS, boost::no_property,
... | {"hexsha": "46c66b76ee71aa931e2190012935187607d82079", "size": 736, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "test/test_types.hpp", "max_stars_repo_name": "ashishkashinath/arlib", "max_stars_repo_head_hexsha": "891aa8603a6e07a16aec5700e7129a0d14a40b84", "max_stars_repo_licenses": ["MIT"], "max_stars_count": ... |
@testset "serialize" begin
@testset "MVHistory" begin
expected = MVHistory()
push!(expected, :a, 1, 1.2)
push!(expected, :a, 2, 2.2)
push!(expected, :b, 1, 3)
push!(expected, :b, 2, 4)
push!(expected, :c, 1, OneClassActiveLearning.ConfusionMatrix(1,1,1,1))
p... | {"hexsha": "c085c27cf39eb45f9e4074a84036e3923ea12a5d", "size": 3214, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "test/serialize_test.jl", "max_stars_repo_name": "uncoded-namer/OneClassActiveLearning.jl", "max_stars_repo_head_hexsha": "28eafe49f3bbd32f4ed6f9a10d2dfc8ae752532e", "max_stars_repo_licenses": ["MIT... |
import unittest
import numpy as np
from utilities.NumpyHelper import NumpyDynamic
class NumpyDynamicTests(unittest.TestCase):
def empty_test(self):
data = NumpyDynamic(np.int32)
result = data.finalize()
self.assertEquals(0, len(result))
def add_test(self):
data = NumpyDynamic... | {"hexsha": "dbb10ec9b57113ece8db353120d2a261bcaf3ce5", "size": 738, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/utilities/tests/NumpyHelperTests.py", "max_stars_repo_name": "AndMu/Unsupervised-Domain-Specific-Sentiment-Analysis", "max_stars_repo_head_hexsha": "b5a81f5d23419789a027403b4d72d53ed760e091", "... |
import ASTInterpreter2
import DebuggerFramework: DebuggerState, execute_command, print_status, locinfo, eval_code, dummy_state
import ..Atom: fullpath, handle, @msg, wsitem, Inline, EvalError, Console
import Juno: Row, ProgressBar, Tree
import REPL
using Media
using MacroTools
chan = nothing
state = nothing
isdebuggi... | {"hexsha": "d1b5c010d43e307a190519afb9578cbaf161f7a2", "size": 7571, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/debugger/stepper.jl", "max_stars_repo_name": "SimonDanisch/Atom.jl", "max_stars_repo_head_hexsha": "55e105006dddf0dad8b75654daf0c2998e9609bc", "max_stars_repo_licenses": ["MIT"], "max_stars_cou... |
import torch
import numpy as np
from scipy import fft
import random
import math
from typing import Callable, Union, Optional, Tuple, List, Any, Dict
from SurFree.utils import *
class SurFree(object):
def __init__(
self,
model,
steps: int = 100,
norm :str = "l2",
... | {"hexsha": "d76e97e39ca241786370617c70ad12238ac4838c", "size": 19282, "ext": "py", "lang": "Python", "max_stars_repo_path": "SurFree/attack.py", "max_stars_repo_name": "machanic/TangentAttack", "max_stars_repo_head_hexsha": "17c1a8e93f9bbd03e209e8650631af744a0ff6b8", "max_stars_repo_licenses": ["Apache-2.0"], "max_star... |
[STATEMENT]
lemma ccBinds_strict[simp]: "ccBinds \<Gamma>\<cdot>\<bottom>=\<bottom>"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. ccBinds \<Gamma>\<cdot>\<bottom> = \<bottom>
[PROOF STEP]
unfolding ccBinds_eq
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (\<Squnion>v\<mapsto>e\<in>map_of \<Gamma>. ccBind v e\<... | {"llama_tokens": 163, "file": "Call_Arity_CoCallAnalysisBinds", "length": 2} |
"""This module provides a class for describing and manipulating a
zeolite structure.
"""
__author__ = "Daniel Schwalbe-Koda"
__version__ = "1.0"
__email__ = "dskoda [at] mit [dot] edu"
__date__ = "Oct 7, 2019"
import numpy as np
import networkx as nx
from pymatgen.core import Structure
DEFAULT_RADIUS = 2
class Z... | {"hexsha": "1170dbbbfc669d29b42208de51df8eeeb211cbfb", "size": 6676, "ext": "py", "lang": "Python", "max_stars_repo_path": "zeograph/structure.py", "max_stars_repo_name": "learningmatter-mit/Zeolite-Graph-Similarity", "max_stars_repo_head_hexsha": "a42dccf787763f38a36a5bff4afd89216e23dd7d", "max_stars_repo_licenses": [... |
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{\label{sec:tcp-collector-update}Using TCP to Send Updates to
the \Condor{collector}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{TCP}
\index{TCP!sending updates}
\index{UDP}
\index{UDP!lost dat... | {"hexsha": "81b7c652d0397197b9ad1274204bc099abec9a42", "size": 3179, "ext": "tex", "lang": "TeX", "max_stars_repo_path": "doc/admin-man/tcp-update.tex", "max_stars_repo_name": "neurodebian/htcondor", "max_stars_repo_head_hexsha": "113a5c9921a4fce8a21e3ab96b2c1ba47441bf39", "max_stars_repo_licenses": ["Apache-2.0"], "ma... |
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from future.utils import python_2_unicode_compatible
import uuid
import logging
import networkx
import itertools
from indra.util import fast_deepcopy
from indra.statements import *
from indra.preassembler.hierarchy_m... | {"hexsha": "46b1f5d30d9912a9970e18dee53ac3b4b8c82487", "size": 32680, "ext": "py", "lang": "Python", "max_stars_repo_path": "indra/mechlinker/__init__.py", "max_stars_repo_name": "pupster90/indra", "max_stars_repo_head_hexsha": "e90b0bc1016fc2d210a9b46fb160b78a7d6a5ab9", "max_stars_repo_licenses": ["BSD-2-Clause"], "ma... |
abstract type AbstractRunningVariable{T,C,VT} <: AbstractVector{T} end
"""
RunningVariable(Zs; cutoff = 0.0, treated = :≥)
Represents the running variable values for data in a regression
discontinuity setting. The discontinuity is at `cutoff`, and `treated` is one of
`[:>; :>=; :≥; :≧; :<; :<=; :≤; :≦ ]' and det... | {"hexsha": "2d6f380b8b253b4463fc0059dc977e77d3207a1f", "size": 8419, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/running_variable.jl", "max_stars_repo_name": "evanmunro/RegressionDiscontinuity.jl", "max_stars_repo_head_hexsha": "43fb7d99219502b58393cb8270fd32024ab61c9e", "max_stars_repo_licenses": ["MIT"]... |
"""
Module for building a complete dataset from local directory with csv files.
"""
import os
import sys
from os.path import isfile, join
import pandas as pd
import json
import numpy as np
from logbook import Logger, StreamHandler
from pandas import read_csv, Timedelta
from zipline.utils.calendars import deregister_c... | {"hexsha": "fe392bdc3297dee3a02b3195e7a973022f0aec41", "size": 13444, "ext": "py", "lang": "Python", "max_stars_repo_path": "zipline/data/bundles/XNSE.py", "max_stars_repo_name": "prodipta/zipline", "max_stars_repo_head_hexsha": "d9755f51293792eef84e4da164e31f99e7806f04", "max_stars_repo_licenses": ["Apache-2.0"], "max... |
import pandas as pd
import os.path as op
import numpy.testing as npt
import nazarkav as nk
data_path = op.join(nk.__path__[0], 'data')
def test_Cleaner():
# hotel_pol = pd.read_csv(op.join(data_path, 'hotel-polarity.tsv'), sep='\t').head()
# sample = hotel_pol['comment'].tolist()
sample = ['این هتل خیلی ... | {"hexsha": "2398f50bab52f54f49248f394ef17a5d93bc7ea5", "size": 1557, "ext": "py", "lang": "Python", "max_stars_repo_path": "nazarkav/tests/test_cleaner.py", "max_stars_repo_name": "amsjavan/nazarkav", "max_stars_repo_head_hexsha": "b796cad10349009d5af734965b1257bbb9c0f678", "max_stars_repo_licenses": ["MIT"], "max_star... |
from __future__ import print_function, absolute_import
import os
import numpy as np
import json
import random
import math
import torch
import torch.utils.data as data
import random
# from utils.imutils2 import *
# from utils.transforms import *
import torchvision.transforms as transforms
import scipy.io as sio
impo... | {"hexsha": "cdb315281a79587977e96903b81575604aebd9a6", "size": 10173, "ext": "py", "lang": "Python", "max_stars_repo_path": "references/video_classification/davis_test.2.py", "max_stars_repo_name": "ajabri/vision", "max_stars_repo_head_hexsha": "f69847cc761937891be77e1780bc4ddea46dbe68", "max_stars_repo_licenses": ["BS... |
import numpy as np
class ReplayBuffer(object):
"""
A basic replay buffer :)
"""
def __init__(self, state_dim, action_dim, length, batch_size):
self.state_buf = np.zeros([length, state_dim], dtype=np.float32)
self.action_buf = np.zeros([length, action_dim], dtype=np.float32)
... | {"hexsha": "2f16a06ded1d1c5168bc5898509d40d48422a72c", "size": 1659, "ext": "py", "lang": "Python", "max_stars_repo_path": "algorithms/ddpg/replay_buffer.py", "max_stars_repo_name": "Fluidy/twc2020", "max_stars_repo_head_hexsha": "0c65ab3508675a81e3edc831e45d59729dab159d", "max_stars_repo_licenses": ["MIT"], "max_stars... |
"""
Basic character model.
Stolen from keras: https://github.com/keras-team/keras/blob/master/examples/lstm_text_generation.py
"""
from __future__ import print_function
from keras.callbacks import LambdaCallback
from keras.callbacks import TensorBoard
from keras.models import load_model
from keras.models import Sequen... | {"hexsha": "380abeefc994beb6203e3d2bfbc2722aa3cc4b66", "size": 7306, "ext": "py", "lang": "Python", "max_stars_repo_path": "charmodel.py", "max_stars_repo_name": "MaxStrange/scifi-titles", "max_stars_repo_head_hexsha": "f9803de37dd22d9ddddea3bcd5c448c26d56b0a5", "max_stars_repo_licenses": ["MIT"], "max_stars_count": nu... |
import numpy as np
def dictionary_to_vector(parameters):
keys = []
count = 0
for key in ["W1", "B1", "W2", "B2", "W3", "B3", "W4", "B4"]:
# flatten parameter
new_vector = np.reshape(parameters[key], (-1, 1))
keys = keys + [key] * new_vector.shape[0]
if count == 0:
... | {"hexsha": "f54b046f492301eae85e9691772303bd408fc6de", "size": 1799, "ext": "py", "lang": "Python", "max_stars_repo_path": "utils/grad_check_utils.py", "max_stars_repo_name": "ash-R2D2/Deep-Neural-networks", "max_stars_repo_head_hexsha": "3eb8e98783ed5a8bbde3e2a566bf1d457c5ee14e", "max_stars_repo_licenses": ["MIT"], "m... |
\section{Application to synthetic data}
\label{sec:synt_tests}
We applied the proposed method to three synthetic data sets simulating different geological scenarios. The first one is generated by a model containing a set of multiple sources with different geometries, all of them with the same magnetization direction. ... | {"hexsha": "9e139529b31a45172af85fb2aabaddb91c8017d9", "size": 6952, "ext": "tex", "lang": "TeX", "max_stars_repo_path": "manuscript/simulations.tex", "max_stars_repo_name": "pinga-lab/eqlayer-magnetization-direction", "max_stars_repo_head_hexsha": "dd929120b22bbd8d638c8bc5924d15f41831dce2", "max_stars_repo_licenses": ... |
// (C) Copyright Mac Murrett 2001.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org for most recent version.
#include "init.hpp"
#include "remote_ca... | {"hexsha": "82006ae3877fa0f87813f5234e5ed41d68e5a634", "size": 977, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "Source/boost_1_33_1/libs/thread/src/mac/init.cpp", "max_stars_repo_name": "spxuw/RFIM", "max_stars_repo_head_hexsha": "32b78fbb90c7008b1106b0cff4f8023ae83c9b6d", "max_stars_repo_licenses": ["MIT"], "... |
subroutine import_2el_dalton(oplist,fname_inp,
& mode,scaling,anti,str_info,orb_info)
*-----------------------------------------------------------------------
* Routine to read in and reorder 2-electron integrals required for
* R12 calculations. Integrals are reordered to be in type order.
*----... | {"hexsha": "c3c863454c6b7e6221f889bd3876048346f20cd8", "size": 8822, "ext": "f", "lang": "FORTRAN", "max_stars_repo_path": "input/import_2el_dalton.f", "max_stars_repo_name": "ak-ustutt/GeCCo-public", "max_stars_repo_head_hexsha": "8d43a6c9323aeba7eb54625b95553bfd4b2418c6", "max_stars_repo_licenses": ["MIT"], "max_star... |
from __future__ import division
import os
import os.path as osp
import chainer
import cv2
import numpy as np
import PIL.Image
import skimage.io
class DepthPredictionDataset(chainer.dataset.DatasetMixin):
class_names = np.array([
'_background_',
'mirror',
], dtype=np.str)
class_names.set... | {"hexsha": "eea1ed6b13fb3cef620a6430e5ee9c080bec6111", "size": 7257, "ext": "py", "lang": "Python", "max_stars_repo_path": "jsk_recognition/jsk_recognition_utils/python/jsk_recognition_utils/datasets/depth_prediction.py", "max_stars_repo_name": "VT-ASIM-LAB/autoware.ai", "max_stars_repo_head_hexsha": "211dff3bee2d2782c... |
import cv2
import numpy as np
import tensorflow as tf
from object_detection.utils import label_map_util
class ObjectDetector:
def __init__(
self, path_to_checkpoint, path_to_labelmap, class_id=None, threshold=0.5
):
# class_id is list of ids for desired classes, or None for all classes in the ... | {"hexsha": "71a4362d129a3cfc6a4f76c842f36841ec5b966a", "size": 3315, "ext": "py", "lang": "Python", "max_stars_repo_path": "object_tracker/dl/odapi/detector.py", "max_stars_repo_name": "aidoop/object-tracker-python", "max_stars_repo_head_hexsha": "283195a82076450d00d1f56f15c45895df69f6ee", "max_stars_repo_licenses": ["... |
using JuMP, EAGO
m = Model()
EAGO.register_eago_operators!(m)
@variable(m, -1 <= x[i=1:4] <= 1)
@variable(m, -2.9922365365158297 <= q <= 1.3959867501971397)
add_NL_constraint(m, :(sigmoid(-0.7172866311917714 + ... | {"hexsha": "2b52ce9e2506ec4a878dff8481cc61cca333d5c9", "size": 3767, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "solver_benchmarking/MINLPLib.jl/instances/ANN_Env/13_sigmoid_4_4_2.jl", "max_stars_repo_name": "PSORLab/RSActivationFunctions", "max_stars_repo_head_hexsha": "0bf8b4500b21144c076ea958ce93dbdd19a533... |
module Mathematica
# This package is based on MathLink
import MathLink
import MathLink: WExpr, WSymbol
import IterTools: takewhile
export weval, @importsymbol
include("types.jl")
include("operators.jl")
include("show.jl")
include("eval.jl")
end # module
| {"hexsha": "efa2d42ae5e46ccd3c55779498048100316b34ad", "size": 258, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/Mathematica.jl", "max_stars_repo_name": "peng1999/Mathematica.jl", "max_stars_repo_head_hexsha": "4ef37350cd8733cd5b5d7cabb6fcf7329d51763d", "max_stars_repo_licenses": ["MIT"], "max_stars_count"... |
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(x):
x = np.clip(x, -500, 500)
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(x):
return sigmoid(x) * (1.0 - sigmoid(x))
def softmax(z):
return np.exp(z) / np.sum(np.exp(z), axis=1, keepdims=True)
def relu(a, alpha=0.01):
retu... | {"hexsha": "4e43b994e826eadf9612644f2beb6b816efd719e", "size": 4020, "ext": "py", "lang": "Python", "max_stars_repo_path": "neural_network.py", "max_stars_repo_name": "daniilpastukhov/feed-forward-neural-network", "max_stars_repo_head_hexsha": "690a41259fc83db939a173b359772d35a64085b0", "max_stars_repo_licenses": ["MIT... |
/**
* @copyright Copyright 2016 The J-PET Framework Authors. All rights reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may find a copy of the License in the LICENCE file.
*
* Unless required by applicable la... | {"hexsha": "a371552c271310ce59d7d69ceaadbb832e406cee", "size": 4457, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "Tasks/JPetSimplePhysSignalReco/JPetSimplePhysSignalReco.cpp", "max_stars_repo_name": "Alvarness/j-pet-framework", "max_stars_repo_head_hexsha": "899ab32bf9a7f4daecaf8ed2dd7c8bc8922e73bd", "max_stars... |
SUBROUTINE UPDATE(IPARAM, IELMNT, PARAM, KFN)
IMPLICIT DOUBLE PRECISION (A-H,O-Z)
************************************************************************
*
* UPDATE UPDATES THE COMMON BLOCKS WHICH HOLD ALL THE PARAMETERS FOR
* RUNNING MNDO.
* IPARAM REFERS TO THE TYPE OF PARAMETER,
* ... | {"hexsha": "16f1d2750e737732d193a9d9f42ad8759b2ef334", "size": 2812, "ext": "f", "lang": "FORTRAN", "max_stars_repo_path": "1993_MOPAC7/update.f", "max_stars_repo_name": "openmopac/MOPAC-archive", "max_stars_repo_head_hexsha": "01510e44246de34a991529297a10bcf831336038", "max_stars_repo_licenses": ["BSD-3-Clause"], "max... |
\documentclass[a4paper]{article}
\usepackage[utf8]{inputenc}
\usepackage{url}
\title{
Tutorial on the \tlaplus{} Method and Tools\\
for Modeling and Verifying Algorithms
}
\author{
Stephan Merz and Hernán Vanzetto\\
Inria Nancy Grand-Est and LORIA\\
MSR-Inria Joint Centre Saclay
}
\date{}
\newcommand{\tlap... | {"hexsha": "5b28670520b471d6fc0a22c891389e56d7a4163f", "size": 5053, "ext": "tex", "lang": "TeX", "max_stars_repo_path": "doc/presentations/2013-afadl/abstract.tex", "max_stars_repo_name": "damiendoligez/tlapm", "max_stars_repo_head_hexsha": "13a1993263642092a521ac046c11e3cb5fbcbc8b", "max_stars_repo_licenses": ["BSD-2... |
"""
change_sense(objective_sense)
Change the objective sense of optimization.
Possible arguments are `MOI.MAX_SENSE` and `MOI.MIN_SENSE`.
If you want to change the objective and sense at the same time, use
[`change_objective`](@ref) instead to do both at once.
"""
function change_sense(objective_sense)
(mode... | {"hexsha": "183f89456a445093aaf2423aad6d0374c6dd6e7d", "size": 3298, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/analysis/modifications/optimizer.jl", "max_stars_repo_name": "htpusa/COBREXA.jl", "max_stars_repo_head_hexsha": "a1987d7d3081cf58c56df4bd50c4e235ecad20ae", "max_stars_repo_licenses": ["Apache-2... |
import os
import pandas as pd
import numpy as np
import json
import urllib.request
from datetime import datetime
# --------------------------------------------- Create contents ----------------------------------------------------- #
# 10:30 a.m. (Eastern Time) on Thursday. Delayed by one day if holiday.
def generate_h... | {"hexsha": "48c3a4d679a3e8d483cf908493b87db59a679959", "size": 2343, "ext": "py", "lang": "Python", "max_stars_repo_path": "report/eia_ng.py", "max_stars_repo_name": "jingmouren/QuantResearch", "max_stars_repo_head_hexsha": "7a17e567b0e95481894ed37524c041b30155b6cb", "max_stars_repo_licenses": ["MIT"], "max_stars_count... |
import os
import numpy as np
import matplotlib.pyplot as plt
from examples.utils.config import Config
config = Config()
def plot_attention_weights(encoder_inputs, attention_weights, en_id2word, fr_id2word, filename=None):
"""
Plots attention weights
:param encoder_inputs: Sequence of word ids (list/numpy.... | {"hexsha": "48fcf8129ed36e8f2b3270b95a20161dd5c92b8d", "size": 1655, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/examples/utils/model_helper.py", "max_stars_repo_name": "feperessim/attention_keras", "max_stars_repo_head_hexsha": "322a16ee147122026b63305aaa5e899d9e5de883", "max_stars_repo_licenses": ["MIT... |
# import libraries
from sqlalchemy import create_engine
import sys
import pickle
import nltk
nltk.download(['punkt', 'wordnet', 'averaged_perceptron_tagger','stopwords'])
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.corpus import stopwords
import re
import numpy as np
import... | {"hexsha": "26ed20f6d8443b61ea2906dbc1a94b4d5bac9a1f", "size": 8964, "ext": "py", "lang": "Python", "max_stars_repo_path": "models/train_classifier.py", "max_stars_repo_name": "ensst6/Udacity_DSND_Project3", "max_stars_repo_head_hexsha": "b41753ffa30dbb0bb2f086653b61aa07f8525956", "max_stars_repo_licenses": ["MIT"], "m... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.