text stringlengths 0 1.25M | meta stringlengths 47 1.89k |
|---|---|
import numpy as np
def levenshtein_distance(source, target, sub_cost=1, ins_cost=1, del_cost=1):
target = np.array(target)
dist = np.arange(len(target) + 1) * ins_cost
for s in source:
dist[1:] = np.minimum(dist[1:] + del_cost, dist[:-1] + (target != s) * sub_cost)
dist[0] += del_cost
... | {"hexsha": "b6718365165733efafa1f882f95dbb01d5ba5e6a", "size": 2941, "ext": "py", "lang": "Python", "max_stars_repo_path": "pero_ocr/sequence_alignment.py", "max_stars_repo_name": "DavidHribek/pero-ocr", "max_stars_repo_head_hexsha": "8d274282813878b3e31dd560563a36b3f02e5c33", "max_stars_repo_licenses": ["BSD-3-Clause"... |
C*************************************************************
C PLEASE KEEP THIS NOTE OF MODEL-DEVELOPMENT HISTORY
C Matrix solve uses Thomas algorithm, 10/1991, Jinlun Zhang
C Spherical coordinate system, 10/27/93, Jinlun Zhang
C Latest finite differencing scheme for treatment of NP,
C 9/9/1996,Jinlun Zhang
C Alterna... | {"hexsha": "599b66f6b61dc18e3d615c481ae6bec3a77ec52b", "size": 31011, "ext": "f", "lang": "FORTRAN", "max_stars_repo_path": "source/model/ICEDYN.f", "max_stars_repo_name": "Climostatistics/giss_model_e", "max_stars_repo_head_hexsha": "2ec1e4fd6ef009fe894a98955ef6e1586753ff79", "max_stars_repo_licenses": ["CC0-1.0"], "m... |
[STATEMENT]
lemma cltn2_compose_right_abs:
assumes "invertible M"
shows "cltn2_compose A (cltn2_abs M) = cltn2_abs (cltn2_rep A ** M)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. cltn2_compose A (cltn2_abs M) = cltn2_abs (cltn2_rep A ** M)
[PROOF STEP]
proof -
[PROOF STATE]
proof (state)
goal (1 subgoal):
1.... | {"llama_tokens": 860, "file": "Tarskis_Geometry_Projective", "length": 7} |
include("../common/ortho_gram_schmidt.jl")
include("../common/wrappers_fft.jl")
include("../common/gen_lattice_pwscf.jl")
include("gen_dr.jl")
include("init_pot_harm_3d.jl")
include("op_K.jl")
include("op_Vpot.jl")
include("op_H.jl")
include("calc_rho.jl")
include("calc_grad.jl")
include("calc_Etot.jl")
include("schso... | {"hexsha": "2999700d07c6aa2ffbd7e3d46ff253f5efba0f26", "size": 2541, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "PW/sch_02/driver_harm.jl", "max_stars_repo_name": "f-fathurrahman/ffr-ElectronicStructure.jl", "max_stars_repo_head_hexsha": "35dca9831bfc6a3e49bb0f3a5872558ffce4b211", "max_stars_repo_licenses": [... |
[STATEMENT]
lemma [simp]:
"P \<turnstile> C sees M,b: Ts\<rightarrow>T = m in D \<Longrightarrow>
method (compP f P) C M = (D,b,Ts,T,f b m)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. P \<turnstile> C sees M, b : Ts\<rightarrow>T = m in D \<Longrightarrow> method (compP f P) C M = (D, b, Ts, T, f b m)
[PROO... | {"llama_tokens": 2946, "file": "JinjaDCI_Compiler_PCompiler", "length": 24} |
import numpy as np
import SimpleITK as sitk
def load_itk(filename):
itkimage = sitk.ReadImage(filename)
image = np.transpose(sitk.GetArrayFromImage(itkimage))
origin = np.array(itkimage.GetOrigin())
spacing = np.array(itkimage.GetSpacing())
return image, origin, spacing
def world_2_voxel(world_coo... | {"hexsha": "6e20bc7a3c2697a8f0aee589cec805e2694bbe33", "size": 910, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/data_processing/xyz_utils.py", "max_stars_repo_name": "DataForces/CV_LUNA", "max_stars_repo_head_hexsha": "adc76fdc580807742fee4c6453c728a2d6d76ed3", "max_stars_repo_licenses": ["BSD-2-Clause"]... |
From Equations Require Import Equations.
Require Import Psatz.
Require Import Coq.Strings.String.
Require Import Coq.Lists.List.
Require Export SystemFR.OpenTOpen.
Require Export SystemFR.EqualWithRelation.
Require Export SystemFR.ReducibilityCandidate.
Require Export SystemFR.ReducibilityDefinition.
Require Export ... | {"author": "epfl-lara", "repo": "SystemFR", "sha": "a68d12d6360f395958506deea66112c46be492a0", "save_path": "github-repos/coq/epfl-lara-SystemFR", "path": "github-repos/coq/epfl-lara-SystemFR/SystemFR-a68d12d6360f395958506deea66112c46be492a0/PolarityErase.v"} |
#
# Copyright (c) 2019-2020 StephLin.
#
# This file is part of wipctv
# (see https://gitea.mcl.math.ncu.edu.tw/StephLin/wipctv).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regardi... | {"hexsha": "3581ea44d33febc6a8616a80da2656b7440433ee", "size": 5559, "ext": "py", "lang": "Python", "max_stars_repo_path": "wipctv/restft.py", "max_stars_repo_name": "StephLin/wipctv", "max_stars_repo_head_hexsha": "f289e306a9ea51c17853c58fd7e0af84dd8764cc", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_count":... |
[STATEMENT]
theorem completeTA_is_ta: "complete_tree_automaton completeTA A"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. complete_tree_automaton completeTA A
[PROOF STEP]
proof (standard, goal_cases)
[PROOF STATE]
proof (state)
goal (5 subgoals):
1. finite (ta_rules completeTA)
2. finite (ta_initial completeTA)... | {"llama_tokens": 8076, "file": "Tree-Automata_Ta", "length": 56} |
# -*- coding: utf-8 -*-
"""
Honest election simulations comparing IRV, plurality, and score voting
"""
import votesim
from votesim.models import spatial
from votesim.utilities.write import StringTable
import matplotlib.pyplot as plt
#import seaborn as sns
import numpy as np
import pandas as pd
#votesim.logconfig.... | {"hexsha": "507b4ce57a984c71d693b16d38c53de444c4c6ee", "size": 2201, "ext": "py", "lang": "Python", "max_stars_repo_path": "archive/examples/center_squeeze_3way/center_squeeze.py", "max_stars_repo_name": "johnh865/election_sim", "max_stars_repo_head_hexsha": "b73b7e65f1bb22abb82cbe8442fcf02b0c20894e", "max_stars_repo_l... |
#include <boost/pool/object_pool.hpp>
int main()
{
boost::object_pool<int> pool;
int *i = pool.malloc();
*i = 1;
int *j = pool.construct(2);
pool.destroy(i);
pool.destroy(j);
} | {"hexsha": "a5b78307c0b198160a865dc560ae2eb9d5e81122", "size": 192, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "Example/pool_02/main.cpp", "max_stars_repo_name": "KwangjoJeong/Boost", "max_stars_repo_head_hexsha": "29c4e2422feded66a689e3aef73086c5cf95b6fe", "max_stars_repo_licenses": ["MIT"], "max_stars_count"... |
/**
* @file visitor.hpp
* @author Leonardo Arcari (leonardo1.arcari@gmail.com)
* @version 1.0.0
* @date 2018-10-28
*
* @copyright Copyright (c) 2018 Leonardo Arcari
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation... | {"hexsha": "323f8ec119e08745b68cc01bd96197ceaf1462bc", "size": 9411, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "include/arlib/routing_kernels/visitor.hpp", "max_stars_repo_name": "ashishkashinath/arlib", "max_stars_repo_head_hexsha": "891aa8603a6e07a16aec5700e7129a0d14a40b84", "max_stars_repo_licenses": ["MIT... |
#!/usr/bin/python -Wall
# ================================================================
# Tom loredo
# loredo at spacenet dot tn dot cornell dot edu
# ================================================================
# Howdy -
#
# Below are some pure Python special functions, some adapted from Numerical
# Recipes, s... | {"hexsha": "e6f16c066af12cab30acbafa10488e845bccc1bf", "size": 6756, "ext": "py", "lang": "Python", "max_stars_repo_path": "pythonlib/sp_funcs_m.py", "max_stars_repo_name": "johnkerl/scripts-math", "max_stars_repo_head_hexsha": "71b6c6617728290ce9643c80a75a402707ad1134", "max_stars_repo_licenses": ["BSD-2-Clause"], "ma... |
[STATEMENT]
lemma loan_zero [simp]: "\<delta> n 0 = 0"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<delta> n 0 = 0
[PROOF STEP]
unfolding loan_def
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<iota> (\<lambda>m. if n = m then 0 else 0) = 0
[PROOF STEP]
using zero_account_def
[PROOF STATE]
proof (prove)
usi... | {"llama_tokens": 210, "file": "Risk_Free_Lending_Risk_Free_Lending", "length": 3} |
default_datastore() = joinpath("data", "$VERSION")
default_benchmarkstore() = joinpath(default_datastore(), "benchmark")
| {"hexsha": "148ddd83fa8442a6d6666eeb1bea176e43927708", "size": 121, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/core.jl", "max_stars_repo_name": "tkf/ThreadsAPIBenchmarks.jl", "max_stars_repo_head_hexsha": "4e6b8795334a45fd3ffa687a70bb68efec0c4839", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 2... |
"""
pyrad.prod.process_product
==========================
Functions for obtaining Pyrad products from the datasets
.. autosummary::
:toctree: generated/
generate_occurrence_products
generate_cosmo_coord_products
generate_cosmo_to_radar_products
generate_sun_hits_products
generate_qvp_products... | {"hexsha": "e40d2ebe00295e45e96df9afd2dc2bcc075371b3", "size": 29763, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/pyrad_proc/pyrad/prod/process_product.py", "max_stars_repo_name": "jfigui/pyrad", "max_stars_repo_head_hexsha": "7811d593bb09a7f8a621c0e8ae3f32c2b85a0254", "max_stars_repo_licenses": ["BSD-3-... |
# Copyright 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in wr... | {"hexsha": "b5ae4137986fe36881df6af12a5f95c09cea27dd", "size": 1625, "ext": "py", "lang": "Python", "max_stars_repo_path": "numba_dppy/dpctl_iface/__init__.py", "max_stars_repo_name": "akharche/numba-dppy", "max_stars_repo_head_hexsha": "f12dac64b149bd72f305f341ff64b796bbb648c1", "max_stars_repo_licenses": ["Apache-2.0... |
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicab... | {"hexsha": "07866c3e096662e08459e47001ec045fe371f7a2", "size": 5019, "ext": "py", "lang": "Python", "max_stars_repo_path": "stochastic_polyak/spsdiag_solver.py", "max_stars_repo_name": "xxdreck/google-research", "max_stars_repo_head_hexsha": "dac724bc2b9362d65c26747a8754504fe4c615f8", "max_stars_repo_licenses": ["Apach... |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import glob
from models.densenet import densenet121, densenet161
from data_util import *
import models_old.tf_to_pytorch_inception_v1 as inception
import models_old.tf_to_pytorch_resnet_v1_50 as resnet
import models_old.tf_to... | {"hexsha": "657017c177f727c48d7420ce4f5623f41e456aa2", "size": 24083, "ext": "py", "lang": "Python", "max_stars_repo_path": "attack_tijiao2.py", "max_stars_repo_name": "jiangyangzhou/Non-targeted-Attack-IJCAI2019-ColdRiver", "max_stars_repo_head_hexsha": "f9f26b4e00241c7831a2e46a0a2c965457fe99e5", "max_stars_repo_licen... |
"""
Module providing handling of the LAT effective area.
$Header: /nfs/slac/g/glast/ground/cvs/pointlike/python/uw/irfs/effective_area.py,v 1.1 2016/06/22 17:02:51 wallacee Exp $
Author: Eric Wallace
"""
__version__ = "$Revision: 1.1 $"
import os
import numpy as np
from astropy.io import fits
from scipy import in... | {"hexsha": "c529b22ac3333db7730974c9c26b6c1cea77d905", "size": 2637, "ext": "py", "lang": "Python", "max_stars_repo_path": "python/uw/irfs/effective_area.py", "max_stars_repo_name": "coclar/pointlike", "max_stars_repo_head_hexsha": "7088724b5a40cf787371aff69e64c9bec701f578", "max_stars_repo_licenses": ["BSD-3-Clause"],... |
include("StructEquality.jl")
include("ImportACounter.jl")
b_counter = CounterX(1)
println(a_counter.id)
println(b_counter.id)
println(a_counter == b_counter) # False
| {"hexsha": "d471ae3f698bdf7edfcbb917a2296d8e221479d4", "size": 176, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/Scripts/ImportStructEquality.jl", "max_stars_repo_name": "Sohva/oolong", "max_stars_repo_head_hexsha": "68b45cfa0b20b6d511db79f4aeacff358ed9b897", "max_stars_repo_licenses": ["MIT"], "max_stars_... |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | {"hexsha": "d4d1602cfc5622ca2515e8ae25f12e648a95335f", "size": 13098, "ext": "py", "lang": "Python", "max_stars_repo_path": "keras/distribute/keras_dnn_correctness_test.py", "max_stars_repo_name": "RakeshJarupula/keras", "max_stars_repo_head_hexsha": "2ac6638e91d5aff77c22b45e9c8c84fb05a9e477", "max_stars_repo_licenses"... |
import hashlib
import hmac
import base64
import requests
import urllib.parse as urlparse
import time
import os
import numpy as np
import pandas as pd
from tqdm import tqdm
import json
# Load Maps Platform API and signing secret from JSON file
with open("api_key.json", "r") as f:
credentials = json.load(f)
api_... | {"hexsha": "7f64ad8e40d2db3ac35590ab653c2b9172c5ac63", "size": 4546, "ext": "py", "lang": "Python", "max_stars_repo_path": "download_dataset_copy.py", "max_stars_repo_name": "aidan-fitz/SolarTracer", "max_stars_repo_head_hexsha": "31cc77ca974640be277d00c6ca23d82292f178c1", "max_stars_repo_licenses": ["Apache-2.0"], "ma... |
[STATEMENT]
lemma (in Corps) t_gen_vp:"\<lbrakk>valuation K v; t \<in> carrier K; v t = 1\<rbrakk> \<Longrightarrow>
(Vr K v) \<diamondsuit>\<^sub>p t = vp K v"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>valuation K v; t \<in> carrier K; v t = 1\<rbrakk> \<Longrightarrow> Vr K v ... | {"llama_tokens": 4171, "file": "Valuation_Valuation1", "length": 32} |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: Régis Gbenou
@email: regis.gbenou@outlook.fr
"""
###############################################################################
# PACKAGES
###############################################################################
import... | {"hexsha": "402d83fcea44631810d08867483cdb82daef6118", "size": 40585, "ext": "py", "lang": "Python", "max_stars_repo_path": "1_scripts/analyticalFunctions.py", "max_stars_repo_name": "rgb250/MostValuableFilmGenre", "max_stars_repo_head_hexsha": "cafd0a0db0ec592b29479d36831097b6dc1e5fda", "max_stars_repo_licenses": ["MI... |
[STATEMENT]
lemma the_riesz_rep[simp]: \<open>the_riesz_rep f \<bullet>\<^sub>C x = f *\<^sub>V x\<close>
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. the_riesz_rep f \<bullet>\<^sub>C x = f *\<^sub>V x
[PROOF STEP]
unfolding the_riesz_rep_def
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (SOME t. \<forall>x. ... | {"llama_tokens": 328, "file": "Complex_Bounded_Operators_Complex_Bounded_Linear_Function", "length": 3} |
import numpy as np
"""
Basic Neural Network example with 1 hidden layer.
Input layer: 3 neurons
Hidden layer: 4 neurons
Output layer: 1 neuron
"""
# nonlinear function which is on the end of the Neurons
def nonlin(x,deriv=False):
if(deriv==True):
return x*(1-x)
return 1/(1+np.exp(-x))
# ... | {"hexsha": "311971bae1ee31a8d73bf7f1d496e929cc988902", "size": 2667, "ext": "py", "lang": "Python", "max_stars_repo_path": "neuron_3.py", "max_stars_repo_name": "dallaszkorben/python-neuralnetwork-demo", "max_stars_repo_head_hexsha": "b5aea2bdc25163a88d17e5a5640cd54f65bc16ce", "max_stars_repo_licenses": ["MIT"], "max_s... |
"""
References:
https://github.com/TheAlgorithms/Python/blob/master/computer_vision/harris_corner.py
https://github.com/scikit-image/scikit-image/blob/main/skimage/feature/corner.py
"""
import cv2
import numpy as np
from .GaussianFilter import gaussian_filter
import skimage.feature
def img2col(image, block_size):
... | {"hexsha": "e2daaba966bc2518a7543a05373b5624347357a3", "size": 4061, "ext": "py", "lang": "Python", "max_stars_repo_path": "HarrisCorner.py", "max_stars_repo_name": "chcorophyll/general_image_process_python", "max_stars_repo_head_hexsha": "0ab3b3da246808c36822d31fa0fd226f8d4079ab", "max_stars_repo_licenses": ["Apache-2... |
import datetime
import serial
import csv
import re
import numpy as np
import matplotlib.pyplot as plt
SERIAL_PATH = 'COM6'
SERIAL_BAUD = 115200
ELEMENT_COUNT = 8
WINDOW = 60 * 60 # in seconds
OUT_PREFIX = "toaster_"
TEXT_ALPHA = 0.75
# for testing only
TEST_LINE = "0: 73 2: 79 3: 80 4: 75 5: 81 6: 80 7: 82"
if... | {"hexsha": "4eabd2286f5c5586bcb09908d83150a7136be2dd", "size": 3368, "ext": "py", "lang": "Python", "max_stars_repo_path": "toaster.py", "max_stars_repo_name": "CalSol/toaster-visualizer", "max_stars_repo_head_hexsha": "c381bc1d11e81aa0a162c12e5a272e7972e9c4aa", "max_stars_repo_licenses": ["BSD-3-Clause"], "max_stars_c... |
import h5py
import numpy as np
import silx.math.fit
import silx.math.fit.peaks
# fileRead = '/home/esrf/slim/data/ihme10/id15/TiC_Calib/ihme10_TiC_calib.h5'
# filesave = '/home/esrf/slim/easistrain/easistrain/EDD/Results_ihme10_TiC_calib.h5'
# sample = 'TiC_calib'
# dataset = '0001'
# scanNumber = '4'
# horizontalDet... | {"hexsha": "995ad727062c721ba44a035d48089aea936c5a10", "size": 13388, "ext": "py", "lang": "Python", "max_stars_repo_path": "easistrain/EDD/angleCalibEDD_OLD.py", "max_stars_repo_name": "woutdenolf/easistrain", "max_stars_repo_head_hexsha": "0484168e33e548af01a5cc649abf815c45b182f1", "max_stars_repo_licenses": ["MIT"],... |
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing... | {"hexsha": "3349d687dcd911cb984dcb469fdca4fb155a0b52", "size": 11596, "ext": "py", "lang": "Python", "max_stars_repo_path": "alveo/examples/caffe/ssd-detect/run_ssd.py", "max_stars_repo_name": "abid-k/Vitis-AI", "max_stars_repo_head_hexsha": "80d327afe3b9bf76dbbb384d0da4d2603d256bc3", "max_stars_repo_licenses": ["Apach... |
[STATEMENT]
lemma subspace_inter: "subspace A \<Longrightarrow> subspace B \<Longrightarrow> subspace (A \<inter> B)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>subspace A; subspace B\<rbrakk> \<Longrightarrow> subspace (A \<inter> B)
[PROOF STEP]
by (simp add: subspace_def) | {"llama_tokens": 105, "file": null, "length": 1} |
"""Generate iset (interpolated set) from a given correlation matrix or list. iset can then be used as a guide for aligning texts."""
from typing import List, Tuple, Union
import numpy as np
import pandas as pd
# import joblib
# import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
# from absl impor... | {"hexsha": "5cb4323d5fe5c9e3b7be3bfa7ca3089e6c4c8918", "size": 3377, "ext": "py", "lang": "Python", "max_stars_repo_path": "tinybee/gen_iset.py", "max_stars_repo_name": "ffreemt/tinybee-aligner", "max_stars_repo_head_hexsha": "5a38b0634df12a197e50ba21efb017d58e4cd02d", "max_stars_repo_licenses": ["MIT"], "max_stars_cou... |
using Bridge
import Base.valtype
abstract type DiffusionDomain end
struct UnboundedDomain <: DiffusionDomain end
bound_satisfied(::UnboundedDomain, x) = true
struct LowerBoundedDomain{T,N} <: DiffusionDomain
bounds::NTuple{N,T}
coords::NTuple{N,Integer}
function LowerBoundedDomain(bounds::NTuple{N,T},
... | {"hexsha": "16ca34bbc56904433c8737413ee955f9fe0f19e8", "size": 4504, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/stochastic_process/bounded_diffusion_domain.jl", "max_stars_repo_name": "mmider/BridgeSDEInference.jl", "max_stars_repo_head_hexsha": "c18dbe9c45bba9ef1d19e70deec8754df2c05293", "max_stars_repo... |
[STATEMENT]
lemma length_append [simp]: "length (xs @ ys) = length xs + length ys"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. length (xs @ ys) = length xs + length ys
[PROOF STEP]
by (induct xs) auto | {"llama_tokens": 82, "file": null, "length": 1} |
@testset "GrpAbFinGen" begin
@testset "Type stuff" begin
@test elem_type(GrpAbFinGen) == GrpAbFinGenElem
@test parent_type(GrpAbFinGenElem) == GrpAbFinGen
end
@testset "Constructor" begin
M1 = matrix(FlintZZ, 2, 3, [1, 2, 3, 4, 5, 6])
G = @inferred AbelianGroup(M1)
@test isa(G, GrpAbFinGen)
... | {"hexsha": "b5bea786d5084ff465cf135050036cf863407352", "size": 8341, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "test/GrpAb/GrpAbFinGen.jl", "max_stars_repo_name": "edgarcosta/Hecke.jl", "max_stars_repo_head_hexsha": "3ba4c63908eaa256150a055491a6387a45b081ec", "max_stars_repo_licenses": ["BSD-2-Clause"], "max... |
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from scipy.io import savemat, loadmat
import numpy as np
import drawdown as dd
from pyPCGA import PCGA
import math
if __name__ == '__main__': # for windows application
# model domain and discretization
import pdb
pdb.set_trace()
... | {"hexsha": "7640263fc7b109bcd678dac24804066e1d4b8bc6", "size": 5614, "ext": "py", "lang": "Python", "max_stars_repo_path": "pumping_history_identification/example_linear_inversion_testing.py", "max_stars_repo_name": "offglitch/GUI-pyPCGA", "max_stars_repo_head_hexsha": "f76ae8f4e97273ebacf8f0dbd8491e00dc900f75", "max_s... |
import cv2
import numpy as np
import torch
import torch.tensor as Tensor
import torch.nn.functional as F
piece_count = 3
batch_size = 2
padding = 6
def imwrite(filename: str, image: Tensor):
cv2.imwrite(filename, convert_to_cv_8bit(image))
def convert_to_cv_8bit(image: Tensor):
if len(image.shape) == 4:
... | {"hexsha": "dc019bbc344977d9272c86636e6df231650cfaf9", "size": 3591, "ext": "py", "lang": "Python", "max_stars_repo_path": "cm_modules/utils.py", "max_stars_repo_name": "SergejVolkov/SR_base", "max_stars_repo_head_hexsha": "285b40c0bbe9dc46f2bd660dc80ff255b4dc65a0", "max_stars_repo_licenses": ["MIT"], "max_stars_count"... |
#! -*- coding:utf-8 -*-
from __future__ import print_function
import numpy
def read(inp_file):
f_in = open(inp_file, 'r')
lines = f_in.readlines()
words_map = {} #マッピング用
char_map = {} #マッピング用
word_cnt = 0 #単語の種類数
char_cnt = 0 #文字の種類数
k_chr = 3 #文字コンテクストウィンドウ
k_wrd = 5 #単語コンテク... | {"hexsha": "8a3f45a5df3efb9067542192337d40e69ddf7345", "size": 2122, "ext": "py", "lang": "Python", "max_stars_repo_path": "char_load.py", "max_stars_repo_name": "satopirka/CharSCNN-theano", "max_stars_repo_head_hexsha": "2b142b9e494e5c5f29bfd3a5b3b5fb4d24f065de", "max_stars_repo_licenses": ["MIT"], "max_stars_count": ... |
#!/usr/bin/env python3
# imports go here
import time as time
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from sklearn.feature_extraction.image import grid_to_graph
from sklearn.cluster import AgglomerativeClustering
#
# Free Coding session for 2015-06-09
# Written by Matt Warren
#
lena = sp... | {"hexsha": "2ed6ffc3d7cd8d78362ae851e58d4a8706ff894b", "size": 849, "ext": "py", "lang": "Python", "max_stars_repo_path": "2015/06/fc_2015_06_09.py", "max_stars_repo_name": "mfwarren/FreeCoding", "max_stars_repo_head_hexsha": "58ac87f35ad2004a3514782556762ee0ed72c39a", "max_stars_repo_licenses": ["MIT"], "max_stars_cou... |
import sys
sys.path.append('core')
import argparse
import os
import cv2
import glob
import numpy as np
import torch
from PIL import Image
from raft import RAFT
from utils import flow_viz
from utils.utils import InputPadder
from io_list import save_list
DEVICE = torch.device('cuda' if torch.cuda.is_available() else ... | {"hexsha": "44145f78e5a98ab5ea950f63bd6b06fdf769f6d9", "size": 5402, "ext": "py", "lang": "Python", "max_stars_repo_path": "demo.py", "max_stars_repo_name": "skylook/RAFT", "max_stars_repo_head_hexsha": "e0a4af12f76b4bba34a8bc1628a820dd5117c88a", "max_stars_repo_licenses": ["BSD-3-Clause"], "max_stars_count": null, "ma... |
[STATEMENT]
lemma set_restriction_fun_empty_conv: "
((set_restriction_fun P) A = {}) = (\<forall>x\<in>A. \<not> P x)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (set_restriction_fun P A = {}) = (\<forall>x\<in>A. \<not> P x)
[PROOF STEP]
unfolding set_restriction_fun_def
[PROOF STATE]
proof (prove)
goal (1 su... | {"llama_tokens": 186, "file": "List-Infinite_CommonSet_SetIntervalCut", "length": 2} |
#!/usr/bin/env python3
# This script is used to estimate an accuracy of different face detection models.
# COCO evaluation tool is used to compute an accuracy metrics (Average Precision).
# Script works with different face detection datasets.
import os
import json
from fnmatch import fnmatch
from math import pi
import ... | {"hexsha": "6a6b091021f994cb4f642deb7e880791f9ee5351", "size": 8959, "ext": "py", "lang": "Python", "max_stars_repo_path": "python/cvi_toolkit/eval/eval_ssd300_face.py", "max_stars_repo_name": "sophgo/tpu_compiler", "max_stars_repo_head_hexsha": "6299ea0a3adae1e5c206bcb9bedf225d16e636db", "max_stars_repo_licenses": ["A... |
#include <boost/timer.hpp>
| {"hexsha": "294211880af67e4bbc3227a77a510645ef179ba8", "size": 27, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "src/boost_timer.hpp", "max_stars_repo_name": "miathedev/BoostForArduino", "max_stars_repo_head_hexsha": "919621dcd0c157094bed4df752b583ba6ea6409e", "max_stars_repo_licenses": ["BSL-1.0"], "max_stars_c... |
#ifndef REF_DESCRIPTORS_IMPL_IPP
#define REF_DESCRIPTORS_IMPL_IPP
#include <ref/Class.hpp>
#include <ref/DescriptorsImpl.hpp>
#include <ref/Holder.hpp>
#include <ref/detail/Name.hpp>
#include <iterator>
#include <algorithm>
#include <cassert>
#include <cstddef>
#include <boost/mpl/for_each.hpp>
#include <boost/lexical... | {"hexsha": "60ac3f38a1210e5311fd3ec61d6d12aa5451384b", "size": 18959, "ext": "ipp", "lang": "C++", "max_stars_repo_path": "ref/DescriptorsImpl.ipp", "max_stars_repo_name": "asenac/refcpp", "max_stars_repo_head_hexsha": "6b1ab20e65b3e5159fb2c7dd3b351dcc047516cd", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 7.... |
from __future__ import division, print_function, absolute_import
import sklearn
from . import matplotlibhelpers as mplh
import numpy as np
def get_tsne_embedding(affinity_mat, aff_to_dist_mat, perplexity, **kwargs):
from sklearn import manifold
tsne = sklearn.manifold.TSNE(metric='precomputed', perplexity=perp... | {"hexsha": "06c07ab48e7ad2866bbe563350a405ec5085ea77", "size": 703, "ext": "py", "lang": "Python", "max_stars_repo_path": "modisco/visualization/tsne.py", "max_stars_repo_name": "XiaotingChen/tfmodisco", "max_stars_repo_head_hexsha": "17cbafe806942304a02e8134fe10224bdff38b0c", "max_stars_repo_licenses": ["MIT"], "max_s... |
# This file is part of GridCal.
#
# GridCal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GridCal is distributed in the hope that... | {"hexsha": "4c3693dd2c62eefd5b842d77d4df0c1627c14c07", "size": 31249, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/GridCal/Engine/Devices/branch.py", "max_stars_repo_name": "vineetjnair9/GridCal", "max_stars_repo_head_hexsha": "5b63cbae45cbe176b015e5e99164a593f450fe71", "max_stars_repo_licenses": ["BSD-3-... |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, s... | {"hexsha": "3adedf3d5c6631908e61c91553ba220eb45ad442", "size": 18209, "ext": "py", "lang": "Python", "max_stars_repo_path": "response_model/python/metric_learning/end_to_end/encoding_models_experimental.py", "max_stars_repo_name": "googlearchive/rgc-models", "max_stars_repo_head_hexsha": "0dea94bbd54f591d82d95169e33d40... |
import cv2, pyrebase, socket,struct
import numpy as np
from tracker import CentroidTracker
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(33,GPIO.IN)
GPIO.setup(34,GPIO.IN)
GPIO.setup(36,GPIO.OUT)
GPIO.setup(37,GPIO.OUT)
RollOut = GPIO.PWM(36,300)
PitchOut = GPIO.PW... | {"hexsha": "39c7f672d824973d4640413f3bd44ca0e3ab17a8", "size": 6922, "ext": "py", "lang": "Python", "max_stars_repo_path": "Raspberry Pi Code/TrackAI main.py", "max_stars_repo_name": "phoenixlamarsacs9/DroneTrackingAI", "max_stars_repo_head_hexsha": "f5babad1cc05622af153c40a46d02c9bb41e2e71", "max_stars_repo_licenses":... |
import pickle
import os
import matplotlib.pyplot as plt
import numpy as np
varying_rollout_folder = 'code/varying_rollout'
clone_results_subfolder = 'clone_results'
dagger_results_subfolder = 'dagger_results'
expert_results_folder = 'code/expert_results'
out_folder = '3_1_graphs'
envnames = ['Ant-v2','HalfCheetah-v... | {"hexsha": "fd468615c586b9bb3147989ddb3891f611f333e7", "size": 2521, "ext": "py", "lang": "Python", "max_stars_repo_path": "hw1/3_1_draw_graph.py", "max_stars_repo_name": "hsilva664/Reinforcement_Learning_Course", "max_stars_repo_head_hexsha": "6a250bc017965bec76b6fe909068e40127e62fa7", "max_stars_repo_licenses": ["MIT... |
"""Utility functions module"""
import numpy as np
def distance(p1, p2):
"""Distance between two points"""
return np.sqrt(((p1[0] - p2[0]) ** 2) + ((p1[1] - p2[1]) ** 2))
def xamtfos(x, sig):
aux = (1 / (np.sqrt(2 * np.pi * sig ** 2)))
return -aux * (np.e ** -(x ** 2 / (2 * (sig ** 2)))) + aux + 1
... | {"hexsha": "a1d15d29761885e1fc2388aff1c79073f941d839", "size": 607, "ext": "py", "lang": "Python", "max_stars_repo_path": "tracer/utils.py", "max_stars_repo_name": "leopiney/tscf", "max_stars_repo_head_hexsha": "d98fbfe06abbf1d29458ddd147b7f1d99118e4ed", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "max... |
#########################################################################
#
# iterators.py - This file is part of the Spectral Python (SPy) package.
#
# Copyright (C) 2014 Thomas Boggs
#
# Spectral Python is free software; you can redistribute it and/
# or modify it under the terms of the GNU General Public Lic... | {"hexsha": "8ce82f84c98b924d603f7f7d185c64650e7ff050", "size": 5195, "ext": "py", "lang": "Python", "max_stars_repo_path": "ensemble_detectors/src/Algorithm_1_matchfilter/spectral_lib/spectral/tests/iterators.py", "max_stars_repo_name": "satish1901/Methane-detection-from-hyperspectral-imagery", "max_stars_repo_head_hex... |
"""
Here the structure of the network is made in pytorch
"""
from typing import List, Union, Optional
import torch
import os
from logger import logger
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from scipy.stats import norm
class Encoder(nn.Module):
"""
Encodes the data using a CN... | {"hexsha": "5d85349d2e309e5adea8b4afbf0f677a6d087990", "size": 11616, "ext": "py", "lang": "Python", "max_stars_repo_path": "code/vae_model.py", "max_stars_repo_name": "JMitnik/FacialDebiasing", "max_stars_repo_head_hexsha": "9003ab484ef7bdeb372d7e5cdcbe0b162e76dc39", "max_stars_repo_licenses": ["MIT"], "max_stars_coun... |
import numpy
import math
import random
random.seed(1337)
def next_poisson(rate):
return -math.log(1.0 - random.random()) / rate
def next_gaussian(mean, dev):
return int(numpy.random.normal(mean, dev, 1))
| {"hexsha": "3a672dfb0f12a7c191bd57ac299378de1ff216d8", "size": 214, "ext": "py", "lang": "Python", "max_stars_repo_path": "utils/math.py", "max_stars_repo_name": "uzum/cran-simulator", "max_stars_repo_head_hexsha": "eb9544ddf7e4312df8d88616cea2252feb270525", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, ... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This script visualize the semantic segmentation of ENet.
"""
import os
import numpy as np
from argparse import ArgumentParser
from os.path import join
import argparse
import sys
caffe_root = '/home/czm/CapStone/Capstone/ENet/caffe-enet/' # Change this to the absolute d... | {"hexsha": "2f00352d38997a5f274625fe06de872f7d17e163", "size": 3100, "ext": "py", "lang": "Python", "max_stars_repo_path": "ENet/scripts/test_segmentation.py", "max_stars_repo_name": "kuonangzhe/Visional_AutoDrive", "max_stars_repo_head_hexsha": "539d9858cbbcc8551343e2d067200c353175e7b2", "max_stars_repo_licenses": ["B... |
import numpy as np
import cv2
from hand_tracker import HandTracker
det = HandTracker('models/palm_detection_without_custom_op.tflite',
'models/hand_landmark_3d.tflite',
'data/anchors.csv',
box_shift=-0.5, box_enlarge=2.6)
in_bgr = cv2.imread('data/test_img1.jpg')
i... | {"hexsha": "ce3e4276e30942beb50a47aa0d08457709b7c8c5", "size": 1058, "ext": "py", "lang": "Python", "max_stars_repo_path": "test.py", "max_stars_repo_name": "yin-zhang/hand_tracking", "max_stars_repo_head_hexsha": "a9495c2639963a22fe6dbe1989c12c6d1f629198", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_count": ... |
from pylab import *
from numpy import NaN
def m(a):
z = 0
for n in range(1, 100):
z = z**2 + a
if abs(z) > 2:
return n
return NaN
X = arange(-2, .5, .002)
Y = arange(-1, 1, .002)
Z = zeros((len(Y), len(X)))
for iy, y in enumerate(Y):
print (iy, "of", len(Y))
for ix, x in enumerate(X):
Z[iy,ix] = m(x +... | {"hexsha": "d99e405c1614446538ba570f34f62094396175b2", "size": 504, "ext": "py", "lang": "Python", "max_stars_repo_path": "Task/Mandelbrot-set/Python/mandelbrot-set-3.py", "max_stars_repo_name": "LaudateCorpus1/RosettaCodeData", "max_stars_repo_head_hexsha": "9ad63ea473a958506c041077f1d810c0c7c8c18d", "max_stars_repo_l... |
# This file is part of the pyMOR project (http://www.pymor.org).
# Copyright 2013-2019 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from numbers import Number
import numpy as np
from pymor.tools import mpi
from pymor.vectorarray... | {"hexsha": "db8f500401757d8989c42fdb1f1ba821c6cea39c", "size": 917, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/pymor/playground/vectorarrays/mpi.py", "max_stars_repo_name": "ManuelMBaumann/pymor", "max_stars_repo_head_hexsha": "9ad226a0a46c7ba30a18bdab27b8bbbfe8f83a31", "max_stars_repo_licenses": ["Unli... |
function cm = getColorMap(num_colors)
%GETCOLORMAP Return default k-Wave color map.
%
% DESCRIPTION:
% getColorMap returns the default color map used for display and
% visualisation across the k-Wave Toolbox. Zero values are displayed
% as white, positive values are displayed as yellow through red ... | {"author": "wme7", "repo": "Aero-matlab", "sha": "9430008f2e3b84f28633775a44dff534e780fbac", "save_path": "github-repos/MATLAB/wme7-Aero-matlab", "path": "github-repos/MATLAB/wme7-Aero-matlab/Aero-matlab-9430008f2e3b84f28633775a44dff534e780fbac/K-wave/k-Wave/getColorMap.m"} |
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 8 13:17:12 2018
@author: Raj
"""
from ffta.hdf_utils import hdf_utils
from matplotlib import pyplot as plt
def test_pixel(h5_file, param_changes={}, pxls = 1, showplots = True,
verbose=True, clear_filter = False):
"""
Takes a random pixel and does standard pro... | {"hexsha": "927602feda4e222a55a54b802da89270eac36339", "size": 1610, "ext": "py", "lang": "Python", "max_stars_repo_path": "ffta/analysis/test_pixel.py", "max_stars_repo_name": "GingerLabUW/FFTA", "max_stars_repo_head_hexsha": "576591d6ba23731c26f7dfa90591e94795f1b288", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
# Digital Signal Processing - Lab 1 - Part 2
# Dimitris Dimos - 03117165
# Christos Dimopoulos - 03117037
import numpy as np
from numpy import random
import librosa
import matplotlib.pyplot as plt
import pywt
import sounddevice as sd
from scipy import signal as sg
import math
plt.close('all')
# 2.1
# QUESTION (a)
... | {"hexsha": "7f670f92a83ebbb571f9b75f21c9b90f4bbe1cbe", "size": 3994, "ext": "py", "lang": "Python", "max_stars_repo_path": "labs/Lab1/source/part2.py", "max_stars_repo_name": "d-dimos/dsp_ntua", "max_stars_repo_head_hexsha": "5702d3607682d6bfe6904a290d57ea0caed00b21", "max_stars_repo_licenses": ["MIT"], "max_stars_coun... |
import numpy as np
import sys,os
import readgadget
import MAS_library as MASL
import Pk_library as PKL
def generatePk(simroot, outroot, dims, ptypes, MAS, do_RSD, axis, threads, bigbox=True):
for i in xrange(0, 50):
for prefix in ["","NCV_0_","NCV_1_"]:
if ((prefix == "NCV_0_") ... | {"hexsha": "e0cf405f8a1068ffe0b55cf059447f6de688e443", "size": 2524, "ext": "py", "lang": "Python", "max_stars_repo_path": "Pk.py", "max_stars_repo_name": "andersdot/LyA-InvertPhase", "max_stars_repo_head_hexsha": "142a3c8e11be04c82d1cf61020e8fd54536ecc18", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "... |
import cv2
import numpy as np
class BlackBoard():
def __init__(self,img,i,cor,board):
self.cor=cor
self.frame=img
self.range=np.load('ohyea.npy')
self.board=board
self.flag=1
self.i=i
self.draw()
def draw(self):
self.frame=cv2.flip(self.frame,1)
... | {"hexsha": "7c66ed9ba3a969c51f4b0ab67abe2bdbb3dba0ea", "size": 2213, "ext": "py", "lang": "Python", "max_stars_repo_path": "T194/Backend/OpenCV/Process.py", "max_stars_repo_name": "krish232002/Hackathon_5.0", "max_stars_repo_head_hexsha": "78006c04aa902297649487d351c1ae33bac88b00", "max_stars_repo_licenses": ["MIT"], "... |
#! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
# Copyright Google 2016
# Copyright 2019 The BioNLP-HZAU Kaiyin Zhou
# Time:2019/04/08
"""
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import ... | {"hexsha": "02e3df71d3c6cb25bd6fe0856d84242d7fd41ac9", "size": 6791, "ext": "py", "lang": "Python", "max_stars_repo_path": "tfmetrics/metrics.py", "max_stars_repo_name": "kyzhouhzau/tfmetrics", "max_stars_repo_head_hexsha": "e7111ede6937dc4b27d0c3d1ab3497806940cbd8", "max_stars_repo_licenses": ["MIT"], "max_stars_count... |
"""shell
pip install -r https://raw.githubusercontent.com/datamllab/automl-in-action-notebooks/master/requirements.txt
"""
import tensorflow as tf
tf.random.set_seed(42)
"""
## Load data
"""
# Import the dataset loading function from sklearn
from sklearn.datasets import fetch_california_housing
# Load the Californ... | {"hexsha": "2bbbabbd28babf26281e3a7d6aa9608742901af9", "size": 5299, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/py/3.1-House-Price-Prediction-MLP.py", "max_stars_repo_name": "saibaldas/automl-in-action-notebooks", "max_stars_repo_head_hexsha": "4ddd105c7fccf2382091afaf05884ab816ad4b38", "max_stars_repo_... |
import argparse
import os
import nibabel as nib
import numpy as np
import pandas as pd
def find_sample(path):
"""
Args:
path (str): path to mri images
"""
labels_data = {"images": [], "labels": []}
t = 0
for case in os.listdir(path):
if case.startswith("."):
contin... | {"hexsha": "1de40bfa3a7690dbdbeaba45e7d3f2941f38a88c", "size": 4129, "ext": "py", "lang": "Python", "max_stars_repo_path": "neuro/scripts/prepare_data.py", "max_stars_repo_name": "ssktotoro/neuro", "max_stars_repo_head_hexsha": "2d6d4cd060ee368d67c3437cac2b64eeb7d7180c", "max_stars_repo_licenses": ["Apache-2.0"], "max_... |
/*=============================================================================
Copyright (c) 2002-2015 Joel de Guzman
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
====================================... | {"hexsha": "02e5089b4ec032ba172551c89bd1844193dfdf1d", "size": 3294, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "REDSI_1160929_1161573/boost_1_67_0/libs/spirit/example/x3/complex_number.cpp", "max_stars_repo_name": "Wultyc/ISEP_1718_2A2S_REDSI_TrabalhoGrupo", "max_stars_repo_head_hexsha": "eb0f7ef64e188fe871f4... |
import unittest
import import_ipynb
import pandas as pd
import pandas.testing as pd_testing
import numpy as np
import numpy.testing as np_testing
from sklearn.cluster import KMeans
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import StandardScaler
class Test(unittest.TestCase):
def setUp(... | {"hexsha": "91acd8d2c5568ae13258a8a9b75bc58cbb79f598", "size": 1747, "ext": "py", "lang": "Python", "max_stars_repo_path": "Chapter05/Exercise5.06/Test5.06.py", "max_stars_repo_name": "pmayd/The-Data-Science-Workshop", "max_stars_repo_head_hexsha": "a712f1fdbdf839c8b9288f4d4cdebcf5ebe146c4", "max_stars_repo_licenses": ... |
"""Basic Unit tests for Classes in Helper_functions"""
import unittest
import numpy as np
import random
from random import randint, sample
import helper_functions as thehelp
class NewDataFrameTests(unittest.TestCase):
"""Tests the NewDataFrame class within help_functions.py"""
def setUp(self):
"""Se... | {"hexsha": "c5006c9c89de5e11e24ab456000aa265c2f941df", "size": 897, "ext": "py", "lang": "Python", "max_stars_repo_path": "lambdata_Lopez_John/test_helper_functions.py", "max_stars_repo_name": "Lopez-John/lambdata-Lopez-John", "max_stars_repo_head_hexsha": "97f127d16a6f6304b62e9c13b5c8086ca98525bc", "max_stars_repo_lic... |
import dgl
import numpy as np
from openhgnn.models.MAGNN import mp_instance_sampler, mini_mp_instance_sampler
class MAGNN_sampler():
'''
Description
-----------
MAGNN sampler based on the implementation of author. The author only implements one sampling 1-layer subgraphs. Here
we implement the MAGN... | {"hexsha": "06b607582d2a09e54ba8c1e565d5eb1d7c99dc7c", "size": 8703, "ext": "py", "lang": "Python", "max_stars_repo_path": "openhgnn/sampler/MAGNN_sampler.py", "max_stars_repo_name": "clearhanhui/OpenHGNN", "max_stars_repo_head_hexsha": "9c3b7e7a4bc9b3df38ee6dc7154f950340ceec20", "max_stars_repo_licenses": ["Apache-2.0... |
"""
==============
Load converter
==============
This example demonstrates passing a custom converter to `numpy.genfromtxt` to
extract dates from a CSV file.
"""
import dateutil.parser
from matplotlib import cbook
import matplotlib.pyplot as plt
import numpy as np
datafile = cbook.get_sample_data('msft.csv', asfile... | {"hexsha": "793de7dc92649e76e437b4458e5147cb0df96644", "size": 575, "ext": "py", "lang": "Python", "max_stars_repo_path": "matplotlib-3.4.3/matplotlib-3.4.3/examples/misc/load_converter.py", "max_stars_repo_name": "JohnLauFoo/clc_packages_Yu", "max_stars_repo_head_hexsha": "259f01d9b5c02154ce258734d519ae8995cd0991", "m... |
//
// Created by janw on 02.06.2020.
//
// STL
#include <iostream>
#include <algorithm>
#include <sstream>
#include <string>
#include <boost/iostreams/copy.hpp>
#include <boost/iostreams/filtering_streambuf.hpp>
#include <boost/iostreams/filter/zlib.hpp>
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#in... | {"hexsha": "e782765a8d11648ae0195df8e7e2e64dd9ccf4bc", "size": 27026, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "laser_slam_ros/src/visual_view.cpp", "max_stars_repo_name": "LRMPUT/laser_slam", "max_stars_repo_head_hexsha": "99c69859ac26bbe13009474f461d14637b89cc4a", "max_stars_repo_licenses": ["BSD-3-Clause"... |
using MAT
using PyPlot
using ADCME
using DelimitedFiles
data = matread("data.mat")["data"]
kid = 3
tid = 1
if length(ARGS)==2
global kid = parse(Int64, ARGS[1])
global tid = parse(Int64, ARGS[2])
end
@info kid, tid
d = data[kid]
n = div(size(d, 1), 5)
H = Variable(1.0)^2
ε = d[1:n, 1]
σ = d[1:n, 2]
func... | {"hexsha": "8d9739e66111c367bd305f7fd1f9ed42d24c8d27", "size": 1729, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "AdFemResearch/viscoelasticity_experimental_data/nnfit.jl", "max_stars_repo_name": "ADCMEMarket/ADCMEImages", "max_stars_repo_head_hexsha": "d89df7050f53e56f3c509ff737199068410dbbc2", "max_stars_rep... |
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import os
import sys
import tensorflow as tf
import cv2
import numpy as np
import json
import math
from tqdm import tqdm
from multiprocessing import Queue, Process
import argparse
sys.pa... | {"hexsha": "13d4ac7a40126b091e63f4633ecb7dba8905745d", "size": 8183, "ext": "py", "lang": "Python", "max_stars_repo_path": "tools/eval_coco_multiprocessing.py", "max_stars_repo_name": "DetectionTeamUCAS/RetinaNet_Tensorflow", "max_stars_repo_head_hexsha": "bf05e25a11b420649f1c3f8bc58af66ba7bdf5c8", "max_stars_repo_lice... |
from scipy.stats import multivariate_normal
from scipy.signal import convolve2d
import matplotlib
try:
matplotlib.pyplot.figure()
matplotlib.pyplot.close()
except Exception:
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import os
# the colormap should assign light colors to low ... | {"hexsha": "5caf03c472b109f9c4a09de43fd631678b177ace", "size": 3671, "ext": "py", "lang": "Python", "max_stars_repo_path": "rllab/envs/mujoco/hill/terrain.py", "max_stars_repo_name": "RussellM2020/maml_gps", "max_stars_repo_head_hexsha": "631560dfd4e23dc2da9bfbbd2e3c5252aa9775c5", "max_stars_repo_licenses": ["MIT"], "m... |
import argparse
import datetime
import gym
import numpy as np
import itertools
import torch
from sac import SAC
from dctrain import DiscOptimizer
from tensorboardX import SummaryWriter
from replay_memory import ReplayMemory
from gym_navigation.envs.navigation import ContinuousNavigation2DEnv, ContinuousNavigation2DNREn... | {"hexsha": "321c1c2b4fa489561092a4d38a5f2715ceb1f72c", "size": 11459, "ext": "py", "lang": "Python", "max_stars_repo_path": "main.py", "max_stars_repo_name": "Steven-Ho/diayn-cont", "max_stars_repo_head_hexsha": "4f30e5593ae89ed70c78066022049bf40c4c005e", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "ma... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
import random
import string
import sys
import ctypes
import time
import json
import os
import multiprocessing
import yaml
import hashlib
from core.load_modules import load_all_languages
from core.time import now
from core.color import color
def process_condit... | {"hexsha": "022bb5a04c4baaf5ef97fbb76cc54e61a1bb46fe", "size": 18186, "ext": "py", "lang": "Python", "max_stars_repo_path": "core/utility.py", "max_stars_repo_name": "cbrunnkvist/Nettacker", "max_stars_repo_head_hexsha": "de2b0f9c64c48105eb2f7150ece9b44ecb8644ff", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_c... |
#pragma once
#include <cstddef>
#include <boost/iterator/counting_iterator.hpp>
#include <boost/iterator/transform_iterator.hpp>
#include <boost/iterator/iterator_facade.hpp>
#include "nifty/graph/detail/andres/grid-graph.hxx"
#include "nifty/tools/runtime_check.hxx"
#include "nifty/tools/for_each_coordinate.hxx"
#i... | {"hexsha": "57cce2d39b569d8deee6bc31bd5d95d21855f624", "size": 17237, "ext": "hxx", "lang": "C++", "max_stars_repo_path": "include/nifty/graph/undirected_grid_graph.hxx", "max_stars_repo_name": "konopczynski/nifty", "max_stars_repo_head_hexsha": "dc02ac60febaabfaf9b2ee5a854bb61436ebdc97", "max_stars_repo_licenses": ["M... |
import numpy as np
from ..hardware import Hardware
from ..world import World
async def drive_arc(world: World, hardware: Hardware):
while world.robot.prediction.x < 2:
await hardware.drive(1, np.deg2rad(25))
await hardware.stop()
| {"hexsha": "afcfdad0276798e91e725317666e3d86004ea84b", "size": 249, "ext": "py", "lang": "Python", "max_stars_repo_path": "rosys/automations/arc.py", "max_stars_repo_name": "zauberzeug/rosys", "max_stars_repo_head_hexsha": "10271c88ffd5dcc4fb8eec93d46fe4144a9e40d8", "max_stars_repo_licenses": ["MIT"], "max_stars_count"... |
from jax import random, vmap, numpy as jnp, value_and_grad
from jax.scipy.special import logsumexp
from jaxns.gaussian_process.utils import squared_norm
def log_tomographic_weight_function_stochastic(key, u, x1, p1, x2, p2):
"""
int w(x) f(x) dx = sum_i w(dx * i) f(dx * i) dx
where,
int w(x) dx = sum... | {"hexsha": "5afac086abdd97d43e642c2eea8fc1509dff3791", "size": 11283, "ext": "py", "lang": "Python", "max_stars_repo_path": "jaxns/gaussian_process/tomographic_kernel/tomographic_kernel_utils.py", "max_stars_repo_name": "fehiepsi/jaxns", "max_stars_repo_head_hexsha": "9cf9366f11ace564e21f938edf4d090fb5de137d", "max_sta... |
import xarray
import os
import numpy
import progressbar
import gsw
from ismip6_ocean_forcing.remap.interp1d import remap_vertical
from ismip6_ocean_forcing.remap.descriptor import get_antarctic_descriptor
from ismip6_ocean_forcing.remap.grid import LatLonGridDescriptor, \
LatLon2DGridDescriptor
from ismip6_ocean_... | {"hexsha": "3f6540638cc7751e043d6bc9206f5a922a2f2fb2", "size": 8990, "ext": "py", "lang": "Python", "max_stars_repo_path": "ismip6_ocean_forcing/model/remap.py", "max_stars_repo_name": "nicojourdain/ismip6-ocean-forcing", "max_stars_repo_head_hexsha": "2c28be2ce9c0e1e94db275a6a3e668ad4f871343", "max_stars_repo_licenses... |
# -*- coding: utf-8 -*-
"""
PEC cylinder
============
An example of scattering from a perfectly conducting cylinder
"""
import matplotlib.pyplot as plt
import numpy as np
import gyptis as gy
import gyptis.utils.data_download as dd
##############################################################################
# Refe... | {"hexsha": "606f3b20a7f94f2dc69227a7b7b60bbdee3e44c3", "size": 2344, "ext": "py", "lang": "Python", "max_stars_repo_path": "examples/scattering/plot_scattering2d_pec_cylinder.py", "max_stars_repo_name": "benvial/gyptis", "max_stars_repo_head_hexsha": "3f197fe33d0a0bac72be05a474baff4e450af0ab", "max_stars_repo_licenses"... |
from numpy import log
def compute_1d_array_entropy(_1d_array):
probabilities = _1d_array / _1d_array.sum()
return -(probabilities * log(probabilities)).sum()
| {"hexsha": "a7c805c885022bdd274470e5a7f862662639c073", "size": 170, "ext": "py", "lang": "Python", "max_stars_repo_path": "ccal/compute_1d_array_entropy.py", "max_stars_repo_name": "alex-wenzel/ccal", "max_stars_repo_head_hexsha": "74dfc604d93e6ce9e12f34a828b601618df51faa", "max_stars_repo_licenses": ["MIT"], "max_star... |
import textwrap
from pathlib import Path
import pyexasol
import pytest
from exasol_udf_mock_python.column import Column
from exasol_udf_mock_python.connection import Connection
from exasol_udf_mock_python.group import Group
from exasol_udf_mock_python.mock_exa_environment import MockExaEnvironment
from exasol_udf_mock... | {"hexsha": "3db123e46456ae189a23df426bd125b5a0aff05a", "size": 19875, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/integration_tests/test_train_udf.py", "max_stars_repo_name": "exasol/data-science-utils-python", "max_stars_repo_head_hexsha": "44f4019b30f4945e14a8ee19c1a4f6bed68692d9", "max_stars_repo_li... |
# Probabilidad I:
Valor esperado e indicadores. Teorema de Bayes. Estimación Bayesiana.
# 0. PMFs y PDFs conjuntas.
- Una PMF conjunta $p_{X,Y}$ de las variables $X$ y $Y$ está definida como
\begin{equation}
p_{X,Y}(x,y)=P(X=x,Y=y)
\end{equation}
- La PMF marginal de X y Y puede ser obtenida a partir de la PMF c... | {"hexsha": "cd1e5f8003001db5f3d5c28f3a595b756c429cfe", "size": 67897, "ext": "ipynb", "lang": "Jupyter Notebook", "max_stars_repo_path": "7Estadistica/3_ProbabilidadI.ipynb", "max_stars_repo_name": "sergiogaitan/Study_Guides", "max_stars_repo_head_hexsha": "083acd23f5faa6c6bc404d4d53df562096478e7c", "max_stars_repo_lic... |
[STATEMENT]
lemma in_synth_GuardK: "[| X \<in> synth G; GuardK n Ks G |] ==> X \<in> guardK n Ks"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>X \<in> synth G; GuardK n Ks G\<rbrakk> \<Longrightarrow> X \<in> guardK n Ks
[PROOF STEP]
by (drule GuardK_synth, auto) | {"llama_tokens": 123, "file": null, "length": 1} |
/**********************************************************************
* Copyright (c) 2008-2015, Alliance for Sustainable Energy.
* All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published ... | {"hexsha": "cd98abb27addc1f51225eb7ca899a82cae297490", "size": 7945, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "openstudiocore/src/openstudio_lib/HVACSystemsController.hpp", "max_stars_repo_name": "pepsi7959/OpenstudioThai", "max_stars_repo_head_hexsha": "fb18afb8b983f71dd5eb171e753dac7d9a4b811b", "max_stars_... |
"""Code for simulations in [1].
[1] Håvard Kvamme and Ørnulf Borgan. Continuous and Discrete-Time Survival Prediction
with Neural Networks. arXiv preprint arXiv:1910.06724, 2019.
https://arxiv.org/pdf/1910.06724.pdf
"""
import numpy as np
import pandas as pd
import torchtuples as tt
from pycox.simulations imp... | {"hexsha": "bea01f63f9d2c932769aeef0de988ba77fb1be69", "size": 11784, "ext": "py", "lang": "Python", "max_stars_repo_path": "pycox/simulations/discrete_logit_hazard.py", "max_stars_repo_name": "nikolase90/pycox", "max_stars_repo_head_hexsha": "1c780253da7bab7eba0dc02e1436a68a9b812a66", "max_stars_repo_licenses": ["BSD-... |
"""
File contains everything required to load, provide expert trajectories to the discriminator.
"""
import random
import glob
import os
import math
import itertools
import numpy as np
import torch
from torch.utils.data import Dataset, DataLoader
from torch.nn.utils.rnn import pad_sequence
def pad_collate(traj_ba... | {"hexsha": "b1a5a9929f2da123257a8dbcdcf3138770969fb2", "size": 1943, "ext": "py", "lang": "Python", "max_stars_repo_path": "utils/expert_data_loader.py", "max_stars_repo_name": "sen-pai/Reccurent-GAIL", "max_stars_repo_head_hexsha": "67a1938fe584171d95cada5de58c8a516aa3f65f", "max_stars_repo_licenses": ["MIT"], "max_st... |
import json
from datetime import datetime
from decimal import Decimal
import numpy as np
class CustomEncoder(json.JSONEncoder):
def default(self, obj):
"""If input object is an ndarray it will be converted into a dict
holding dtype, shape and the data, base64 encoded.
"""
numpy_ty... | {"hexsha": "a4f44c39b8d139b23b4989536cc4a3f8c8a6896f", "size": 1738, "ext": "py", "lang": "Python", "max_stars_repo_path": "analysis/webservice/webmodel/CustomEncoder.py", "max_stars_repo_name": "ngachung/incubator-sdap-nexus", "max_stars_repo_head_hexsha": "38e768694fcc142e2d88283cb1e44e05f88da847", "max_stars_repo_li... |
import numpy as np
from pandas.core.dtypes.common import is_extension_array_dtype
from pandas.core.dtypes.dtypes import ExtensionDtype
import pandas as pd
import pandas._testing as tm
from .base import BaseExtensionTests
class BaseInterfaceTests(BaseExtensionTests):
"""Tests that the basic interface is satisfi... | {"hexsha": "6a4ff68b4580f119dabdb09aad427196ff2ff601", "size": 3932, "ext": "py", "lang": "Python", "max_stars_repo_path": "pandas/tests/extension/base/interface.py", "max_stars_repo_name": "CJL89/pandas", "max_stars_repo_head_hexsha": "6210077d32a9e9675526ea896e6d1f9189629d4a", "max_stars_repo_licenses": ["BSD-3-Claus... |
module MixedModelsSim
using LinearAlgebra
using MixedModels
using PooledArrays
using PrettyTables
using Random
using Statistics
using Tables
using MixedModels: replicate
export
create_re,
create_theta,
createθ,
cyclicshift,
factorproduct,
flatlowertri,
nlevels,
nlevstbl,
#withinit... | {"hexsha": "71e891e14d39ab900ed917c94e669272f3131ac6", "size": 505, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/MixedModelsSim.jl", "max_stars_repo_name": "RePsychLing/MixedModelsSim.jl", "max_stars_repo_head_hexsha": "0f39f5b088996da40290727744014405420f2953", "max_stars_repo_licenses": ["MIT"], "max_sta... |
%%
%% $Id$
%%
%% Copyright 1989-2014 MINES ParisTech
%%
%% This file is part of PIPS.
%%
%% PIPS is free software: you can redistribute it and/or modify it
%% under the terms of the GNU General Public License as published by
%% the Free Software Foundation, either version 3 of the License, or
%% any later version.
%%
%... | {"hexsha": "74dba79b0d3854ed13a64236d1a6da62efca03ed", "size": 1414, "ext": "tex", "lang": "TeX", "max_stars_repo_path": "packages/PIPS/pips/src/Documentation/newgen/c_parser_private.tex", "max_stars_repo_name": "DVSR1966/par4all", "max_stars_repo_head_hexsha": "86b33ca9da736e832b568c5637a2381f360f1996", "max_stars_rep... |
#%%
from os import listdir
from os.path import isfile, join
import cv2;
from pathlib import Path
from dotenv import find_dotenv, load_dotenv
# not used in this stub but often useful for finding various files
#project_dir = Path(__file__).resolve().parents[2]
# find .env automagically by walking up directories unti... | {"hexsha": "e3a501cb86c283cec902e599dc2511d5000db576", "size": 2377, "ext": "py", "lang": "Python", "max_stars_repo_path": "notebooks/team-detection.py", "max_stars_repo_name": "helldragger/MetaWatch", "max_stars_repo_head_hexsha": "3ef0d7bcf5603b57402f79fff90c84b20516fe82", "max_stars_repo_licenses": ["Unlicense"], "m... |
from __future__ import absolute_import, division, print_function
import os
import sys
import h5py
import numpy as np
from cctbx import factor_ev_angstrom
from scitbx.array_family import flex
from dxtbx.format.FormatHDF5 import FormatHDF5
from dxtbx.format.FormatStill import FormatStill
class FormatHDF5SaclaRayoni... | {"hexsha": "6712b80305aacfa138f8ba600f72bc546e03fd4f", "size": 4587, "ext": "py", "lang": "Python", "max_stars_repo_path": "format/FormatHDF5SaclaRayonix.py", "max_stars_repo_name": "dials/dx2", "max_stars_repo_head_hexsha": "94a5e2fd048b4133241c5a528ffaf7c504f22f8b", "max_stars_repo_licenses": ["BSD-3-Clause"], "max_s... |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
#%%
DAT = pd.read_fwf("table2", header=None)#, dtype="str")
#%%
DAT.columns = ["0", "1", "2", "3", "intensi", "long", "6", "7", "8", "9", "10", "11", "12", "13", "14"]
DAT
DAT_1 = DAT.apply(pd.to_numeric, errors="coerce")
DAT_1
type(DAT_1["inten... | {"hexsha": "2ed517746ac1e812e6cd37b0b46b28c6be9dff31", "size": 1338, "ext": "py", "lang": "Python", "max_stars_repo_path": "Nave.py", "max_stars_repo_name": "juanitopereza/Granulacion", "max_stars_repo_head_hexsha": "07e455fee338f86bf4dc5eae068b677e87a534d0", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null,... |
#ifndef _UTHHFF_CODEPY_BPL_HPP_INCLUDED
#define _UTHHFF_CODEPY_BPL_HPP_INCLUDED
#include <boost/python.hpp>
#include <boost/python/suite/indexing/vector_indexing_suite.hpp>
#define CODEPY_PYTHON_ERROR(TYPE, REASON) \
{ \
PyErr_SetString(PyExc_##TYPE, REASON); \
throw boost::python::error_already_set(); \
}
... | {"hexsha": "07ca6305cdf848e0fac209ab4eec7404318d2da2", "size": 931, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "codepy/include/codepy/bpl.hpp", "max_stars_repo_name": "reguly/codepy", "max_stars_repo_head_hexsha": "17801c99622a46bc6ca7308135f8e9235ba3c7f3", "max_stars_repo_licenses": ["MIT"], "max_stars_count"... |
subroutine tmulta(
$ np,x,px,y,py,z,g,dv,sx,sy,sz,al,ak0,phi,
$ psi1,psi2,bz,
1 dx,dy,theta,dtheta,
$ eps0,krad,fb1,fb2,mfring,fringe)
use ffs_flag, only:ndivrad,calpol,photons
use tmacro
use multa
use tbendcom, only:tbrot,tbshift
use kradlib, onl... | {"hexsha": "d253a0a24a55310c4c33b4a44a9f173bf0e3c1bf", "size": 11849, "ext": "f", "lang": "FORTRAN", "max_stars_repo_path": "src/tmulta.f", "max_stars_repo_name": "noboruatkek/SAD", "max_stars_repo_head_hexsha": "3881a190af8903537774c57b56963ffc94cfc039", "max_stars_repo_licenses": ["BSD-3-Clause"], "max_stars_count": ... |
'''
Functions to select features based on various feature selection settings
Code by Carl Rietschel
https://github.com/carlr67/deephitplus
'''
import pandas as pd
import numpy as np
def apply_features(full_feat_list, feat_list, tr_data, va_data, te_data):
# Takes as input feat_list, an array of feature lists (le... | {"hexsha": "26ed2b7e836781ed4ceb8b8b3671735af3eaf53a", "size": 7297, "ext": "py", "lang": "Python", "max_stars_repo_path": "get_features.py", "max_stars_repo_name": "carlr67/deephitplus", "max_stars_repo_head_hexsha": "0d527b235cfa471a4d97853c599f3ef46837e78a", "max_stars_repo_licenses": ["BSD-3-Clause"], "max_stars_co... |
from __future__ import print_function
from keras.models import Sequential, Model, load_model
from keras.layers import Dense, Activation, Embedding
from keras.layers import LSTM, Input
from keras.optimizers import RMSprop, Adam
from keras.utils.data_utils import get_file
from keras.layers.normalization import BatchNorma... | {"hexsha": "25c4383e36fe02b9902d840f8a40139be467d8c0", "size": 2213, "ext": "py", "lang": "Python", "max_stars_repo_path": "text_generator_keras_sample.py", "max_stars_repo_name": "tienthanhdhcn/char-embeddings", "max_stars_repo_head_hexsha": "860c92a0af3b13c525c33d7257ef0204aaf80e1c", "max_stars_repo_licenses": ["MIT"... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.