text stringlengths 0 1.25M | meta stringlengths 47 1.89k |
|---|---|
isSingleton : Bool -> Type
isSingleton True = Nat
isSingleton False = List Nat
mkSingle : (x : Bool) -> isSingleton x
mkSingle True = 0
mkSingle False = []
sum : (single : Bool) -> isSingleton single -> Nat
sum True x = x
sum False [] = 0
sum False (x::xs) = x + sum False xs
| {"hexsha": "de1a12455033ba2b47eb40c74685ddea523944f6", "size": 291, "ext": "idr", "lang": "Idris", "max_stars_repo_path": "src/types.idr", "max_stars_repo_name": "0918nobita/idris", "max_stars_repo_head_hexsha": "e4ae7fbb95d7cd580b366e93747a069ccbeae31d", "max_stars_repo_licenses": ["CC0-1.0"], "max_stars_count": 1, "m... |
# Note that this script can accept some limited command-line arguments, run
# `julia build_tarballs.jl --help` to see a usage message.
using BinaryBuilder
name = "IpoptBuilder"
version = v"3.12.10"
# Collection of sources required to build IpoptBuilder
sources = [
"https://github.com/coin-or/Ipopt/archive/release... | {"hexsha": "9f9905aad611ed887579be4ab82335b1f6d06ffe", "size": 4863, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "build_tarballs.jl", "max_stars_repo_name": "juan-pablo-vielma/IpoptBuilder", "max_stars_repo_head_hexsha": "ae1aea31c8a3e0bc1b42c4f5e260de5da845b4af", "max_stars_repo_licenses": ["MIT"], "max_stars... |
from unittest.mock import MagicMock, patch
import numpy as np
from tensorflow_serving.apis.prediction_service_pb2_grpc import PredictionServiceStub
from chitra.serve.tf_serving.client import GrpcClient, create_grpc_stub, grpc_request
def test_create_grpc_stub():
assert isinstance(create_grpc_stub(), PredictionS... | {"hexsha": "7e5b6aaed5f1bdb2b16cb957fafd26de49637fab", "size": 1049, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/serve/test_tf_serving_client.py", "max_stars_repo_name": "aniketmaurya/Chitra", "max_stars_repo_head_hexsha": "e040311c25ccf2e101df5596662450ae532bee08", "max_stars_repo_licenses": ["Apache-... |
[STATEMENT]
lemma Bernstein_nonneg: "\<lbrakk>0 \<le> x; x \<le> 1\<rbrakk> \<Longrightarrow> 0 \<le> Bernstein n k x"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>0 \<le> x; x \<le> 1\<rbrakk> \<Longrightarrow> 0 \<le> Bernstein n k x
[PROOF STEP]
by (simp add: Bernstein_def) | {"llama_tokens": 127, "file": null, "length": 1} |
// auto-generated header by CodeFromTemplate
// CodeFromTemplate Version: 0.3 alpha
//
// NEVER TOUCH this file!
#include <exception>
#include <boost/assign/list_of.hpp>
#include "Store_Manager_Ringbuffer_Stub_UnitTest_GeneratorTestCode.h"
// --> Do NOT EDIT <--
namespace ConnectedVision {
namespace UnitTest {
name... | {"hexsha": "70f11950312285c3c37c0054ce798236ceea0749", "size": 1273, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "test/UnitTest/generated/stubs/Store_Manager_Ringbuffer_Stub_UnitTest_GeneratorTestCode.cpp", "max_stars_repo_name": "ConnectedVision/ConnectedVision", "max_stars_repo_head_hexsha": "210e49205ca50f73... |
(* Author: Tobias Nipkow *)
subsection "Collecting Semantics of Commands"
theory Collecting
imports Complete_Lattice Big_Step ACom
begin
subsubsection "The generic Step function"
notation
sup (infixl "\<squnion>" 65) and
inf (infixl "\<sqinter>" 70) and
bot ("\<bottom>") and
top ("\<top>")
context
fixes ... | {"author": "seL4", "repo": "isabelle", "sha": "e1ab32a3bb41728cd19541063283e37919978a4c", "save_path": "github-repos/isabelle/seL4-isabelle", "path": "github-repos/isabelle/seL4-isabelle/isabelle-e1ab32a3bb41728cd19541063283e37919978a4c/src/HOL/IMP/Collecting.thy"} |
# FileManager
FileManager() = FileManager(create_file_manager())
"""
create_file_manager() -> CXFileManager
Return a pointer to a `clang::FileManager` object.
For now, `FileSystemOptions` is set to nothing and `llvm::vfs::FileSystem` defaults to the
"real" file system, as seen by the operating system.
TODO: supp... | {"hexsha": "21ae372c58a10940be94ecfe209c46a405d32af3", "size": 1529, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/clang/api/Basic/FileManager.jl", "max_stars_repo_name": "vchuravy/ClangCompiler.jl", "max_stars_repo_head_hexsha": "47080072b059465f8176349c6e67bc678fa238d2", "max_stars_repo_licenses": ["MIT"]... |
@testset "sample.jl" begin
@testset "Basic sampling" begin
@testset "REPL" begin
empty!(LOGGERS)
Random.seed!(1234)
N = 1_000
chain = sample(MyModel(), MySampler(), N; sleepy = true, loggers = true)
@test length(LOGGERS) == 1
logger =... | {"hexsha": "6e876d48c98e3d1af1982b5fc596bbc62fa5c30e", "size": 14479, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "test/sample.jl", "max_stars_repo_name": "kaandocal/AbstractMCMC.jl", "max_stars_repo_head_hexsha": "349799146c2cfe85d24a1efbfbc6b162f8850ca4", "max_stars_repo_licenses": ["MIT"], "max_stars_count"... |
import damselfly as df
import numpy as np
import pickle as pkl
import os
import matplotlib.pyplot as plt
temp = 10.0
result_date = '210607'
result_train_dset = '210607_df1_multiclass_ch3'
result_test_dset = '210607_df2_multiclass_test_ch3'
result_model = 'df_conv6_fc2_multiclass_3ch'
result_domain = 'freq'
result_epoc... | {"hexsha": "9cb136c60f59a551fd2956a0e41b20f978350ef2", "size": 3994, "ext": "py", "lang": "Python", "max_stars_repo_path": "analysis/plotting/scripts/2021415_plot_classification.py", "max_stars_repo_name": "zieglerad/damselfly", "max_stars_repo_head_hexsha": "d0ee9b13aa5981a91f62765cba6a263b584e7f25", "max_stars_repo_l... |
import numpy as np
import tensorflow as tf
import random
from dataloader import Gen_Data_loader, Dis_dataloader
from generator import Generator
from discriminator import Discriminator
from rollout import ROLLOUT
import pickle
import time
#################################################################################... | {"hexsha": "4355ceee07077432a2201e0010fe0e1685d55583", "size": 9255, "ext": "py", "lang": "Python", "max_stars_repo_path": "2_seqgan/sequence_gan.py", "max_stars_repo_name": "557mp/pk_story", "max_stars_repo_head_hexsha": "90399709a1b75c40867316beec12e32168bf9c34", "max_stars_repo_licenses": ["MIT"], "max_stars_count":... |
# Copyright (c) 2018-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation, writers
from mpl_toolkits.mplot3d import Axes3D... | {"hexsha": "bfba40fba95fb2760b7f1e7858c2807d50348fff", "size": 4122, "ext": "py", "lang": "Python", "max_stars_repo_path": "common/visualization.py", "max_stars_repo_name": "kamisoel/kinematic_pose_estimation", "max_stars_repo_head_hexsha": "fd0fa7ce87b8b690e86572b2689604763c283d73", "max_stars_repo_licenses": ["MIT"],... |
from aocd import get_data, submit
import numpy as np
DAY = 7
YEAR = 2021
def part1(data: str) -> str:
nums = [int(n) for n in data.split(',')]
median = int(np.median(nums))
ans = 0
for i in nums:
ans += abs(i - median)
return str(ans)
def part2(data: str) -> str:
nums = [int(n) for ... | {"hexsha": "fe114127c28a057b9d98e93c4d13d7483ebaddbc", "size": 723, "ext": "py", "lang": "Python", "max_stars_repo_path": "days/day7.py", "max_stars_repo_name": "vanHavel/AdventOfCode2021", "max_stars_repo_head_hexsha": "a83ee21cffff56ba3f49de7af5113bf0b11fea7a", "max_stars_repo_licenses": ["MIT"], "max_stars_count": n... |
# Copyright 2021 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in wr... | {"hexsha": "b40f5c2fc1d703b51ccf14605a2288d616148615", "size": 1268, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/pandas/sr_unary_datetime_with_null.py", "max_stars_repo_name": "marcinz/legate.pandas", "max_stars_repo_head_hexsha": "94c21c436f59c06cfba454c6569e9f5d7109d839", "max_stars_repo_licenses": [... |
import cv2
import numpy as np
import glob
img_array = []
for filename in glob.glob('DATA/baseline/results/highway_MOG/*jpg'):
img = cv2.imread(filename)
height, width, layers = img.shape
size = (width, height)
img_array.append(img)
out = cv2.VideoWriter('resultTest.avi', cv2.VideoWriter_fourcc(*'DIVX')... | {"hexsha": "e6931289acfc4c7f29cd48deedd10251e3a3db0f", "size": 546, "ext": "py", "lang": "Python", "max_stars_repo_path": "BackgroundSubtraction/test_files/im_to_vid.py", "max_stars_repo_name": "JanuszJSzturo/ImageAnalysis2020", "max_stars_repo_head_hexsha": "8ef432ecdc9e5c04834acd4752848302d75d7856", "max_stars_repo_l... |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Train the model.
Usage:
train.py [<output>] [--ckpt=<ckpt>] [--batch_size=<batch_size>]
Options:
-h --help Show this help.
<batch_size> Batch size to train on
<output> Ouput folder. By default: ./outputs/
<ckpt> Path to the checkpoints to rest... | {"hexsha": "40f6e3a74728126b754da66973f55aa51fc3fba7", "size": 5004, "ext": "py", "lang": "Python", "max_stars_repo_path": "train_for_faces.py", "max_stars_repo_name": "krishnr/CapsNet4Faces", "max_stars_repo_head_hexsha": "1863b9d9524f6659e08625402053332ed6ea1415", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars... |
# Utility functions
ord = [
21, 17, 13,
9, 5, 1,
22, 18, 14,
10, 6, 2,
23, 19, 15,
11, 7, 3,
24, 20, 16,
12, 8, 4]
function estimate_snr(x::AbstractVector; fs=10)
# Estimate SNR for a 30-102.4s segment
pow = power(periodogram(x, nfft=1024, window=hanning))
ps = argmax(pow)
n = mean(pow[Not... | {"hexsha": "70b8f2825ac30351eb08a1c4f576d37672e583cb", "size": 3744, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/utils.jl", "max_stars_repo_name": "carterjgreen/PressureSensitiveMats.jl", "max_stars_repo_head_hexsha": "5efe25c27bddd92291d1f90238c4105c603a68e5", "max_stars_repo_licenses": ["MIT"], "max_sta... |
from __future__ import annotations
import numpy as np
import pyvista as pv
from .._doc import doc
from ._mesh import Mesh
@doc(Mesh,
prefix='Data class for tetrahedral meshes',
dim_points='3',
dim_cells='4')
class TetraMesh(Mesh, cell_dim=4):
cell_type = 'tetra'
def to_open3d(self):
... | {"hexsha": "5458feb01547def64728ce66754f0856c2278ad3", "size": 3369, "ext": "py", "lang": "Python", "max_stars_repo_path": "nanomesh/mesh/_tetra.py", "max_stars_repo_name": "hpgem/nanomesher", "max_stars_repo_head_hexsha": "06e7648ff8b9ecf4cc1faa967469db6270c0ba5d", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars... |
import numpy as np
import pandas as pd
from PIL import Image
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
import os
from shutil import copyfile
from tqdm import tqdm
import argparse
from QataCovDataset import QataCovDataset
from model.unet import UNet
import gc
def create_predict_data... | {"hexsha": "b762d539e7e30da83f46c07de07add1e75ee7da2", "size": 5694, "ext": "py", "lang": "Python", "max_stars_repo_path": "croped_data.py", "max_stars_repo_name": "salem-devloper/COVID-Lung-Segment", "max_stars_repo_head_hexsha": "6896f6b0c56dac6d32e005afd4a94d59b1917b44", "max_stars_repo_licenses": ["MIT"], "max_star... |
C$Procedure EKFIND ( EK, find data )
SUBROUTINE EKFIND ( QUERY, NMROWS, ERROR, ERRMSG )
C$ Abstract
C
C Find E-kernel data that satisfy a set of constraints.
C
C$ Disclaimer
C
C THIS SOFTWARE AND ANY RELATED MATERIALS WERE CREATED BY THE
C CALIFORNIA INSTITUTE OF TECHNOLOGY (CALTECH) UNDER A ... | {"hexsha": "b7413fbc9d38c276aeaf15eb7d6e398f06d84538", "size": 27081, "ext": "f", "lang": "FORTRAN", "max_stars_repo_path": "source/nasa_f/ekfind.f", "max_stars_repo_name": "agforero/FTFramework", "max_stars_repo_head_hexsha": "6caf0bc7bae8dc54a62da62df37e852625f0427d", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests that relate to using quantities/units on parameters of models.
"""
import numpy as np
import pytest
from ..core import Model, Fittable1DModel, InputParameterError
from ..parameters import Parameter, ParameterDefinitionError
from ..models impo... | {"hexsha": "ab4ebe8fae9fbc1df4df1c19fec308552ec239b8", "size": 11909, "ext": "py", "lang": "Python", "max_stars_repo_path": "astropy/modeling/tests/test_quantities_parameters.py", "max_stars_repo_name": "jbkalmbach/astropy", "max_stars_repo_head_hexsha": "88ae8c615533efd1e60de4aded204943f66f881c", "max_stars_repo_licen... |
__author__ = "Tomasz Rybotycki"
"""
A script containing some utilities that could be used in general QC simulaitons.
"""
# TODO TR: Consider releasing this file as a separate package.
from typing import List, Union
from numpy import (
abs,
linalg,
log,
ndarray,
sqrt,
pi,
exp,
asar... | {"hexsha": "93d4c9b4480c00dfb1f4477f97573c5986bcc40f", "size": 4293, "ext": "py", "lang": "Python", "max_stars_repo_path": "theboss/quantum_computations_utilities.py", "max_stars_repo_name": "Tomev/BoSS", "max_stars_repo_head_hexsha": "45db090345650741c85b39b47cbc7b391d6daa33", "max_stars_repo_licenses": ["Apache-2.0"]... |
import numpy as np
# convert block format to linkedlist
# adjacent format graph
def block2adja(box, size):
ops = [(1, 0), (-1, 0), (0, 1), (0, -1)]
height, width = size
vertices = {i: [] for i in range(width * height)}
for i in range(width):
for j in range(height):
vertex = (j * w... | {"hexsha": "21b67539bb98eb3ad413de47d22599b83946dd64", "size": 851, "ext": "py", "lang": "Python", "max_stars_repo_path": "graph/graph.py", "max_stars_repo_name": "HackerTon/astarviz", "max_stars_repo_head_hexsha": "ff63159206fc9f27374862ab29e010e82ce69369", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_count":... |
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import pickle
import argparse
import pathlib as path
layouts = {
"circular": nx.circular_layout,
"kamada_kawai": nx.kamada_kawai_layout,
"random": nx.random_layout,
"shell": nx.shell_layout,
"spring": nx.s... | {"hexsha": "baf0a4e046160270ca10fb49de5fb305739a8050", "size": 1978, "ext": "py", "lang": "Python", "max_stars_repo_path": "graph_vis.py", "max_stars_repo_name": "hpi-sam/GNN-TiborMaxTiago", "max_stars_repo_head_hexsha": "986b3cf1e15328f6a03aa1e7f979b3435fc98910", "max_stars_repo_licenses": ["MIT"], "max_stars_count": ... |
[STATEMENT]
lemma left_total_rel_resumption [transfer_rule]:
"\<lbrakk> left_total R1; left_total R2 \<rbrakk> \<Longrightarrow> left_total (rel_resumption R1 R2)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>left_total R1; left_total R2\<rbrakk> \<Longrightarrow> left_total (Resumption.resumption.rel_r... | {"llama_tokens": 184, "file": "CryptHOL_Resumption", "length": 1} |
import csv
import numpy as np
############## Get Species Dictionary #################
filename = './data/alltrain.csv'
species = {}
LONG_KEY = 'long'
LAT_KEY = 'lat'
with open(filename) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
if line_co... | {"hexsha": "d93d559a8fed83414afd267aefde714b73e8a012", "size": 1470, "ext": "py", "lang": "Python", "max_stars_repo_path": "distribution.py", "max_stars_repo_name": "upupming/dragon", "max_stars_repo_head_hexsha": "245f71996004b386ae764eb8f76603233d8a6763", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 1, "max... |
module activations
implicit none
contains
pure function sigmoid(z)
double precision, intent(in) :: z(:)
double precision, dimension(size(z)) :: sigmoid
sigmoid = 1.0 / (1.0 + exp(-z))
end function sigmoid
pure function sigmoid_prime(z)
double precision, intent(in) :: z(:)
double precision, dimension(size(z)) :... | {"hexsha": "325f7b1c0f9719926d4f24522dd314406e503c3d", "size": 687, "ext": "f90", "lang": "FORTRAN", "max_stars_repo_path": "src/activations.f90", "max_stars_repo_name": "alexmconn/dl4TRAN", "max_stars_repo_head_hexsha": "4fc0ccfa3e738615a9037061ac775ef7bf87ad5e", "max_stars_repo_licenses": ["MIT"], "max_stars_count": ... |
#pragma once
#include <boost/dynamic_bitset.hpp>
#include "../Variable.hpp"
#include "Counter.hpp"
namespace mist {
namespace it {
using Bitset = boost::dynamic_bitset<unsigned long long>;
using BitsetVariable = std::vector<Bitset>;
using BitsetTable = std::vector<BitsetVariable>;
/** Generates a ProbabilityDistr... | {"hexsha": "958e1a0d31b8bfcfea7f8586e8e15a4c3571ebe0", "size": 1253, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "include/mist/it/BitsetCounter.hpp", "max_stars_repo_name": "andbanman/mist", "max_stars_repo_head_hexsha": "2546fb41bccea1f89a43dbdbed7ce3a257926b54", "max_stars_repo_licenses": ["MIT"], "max_stars_... |
#include "gtest/gtest.h"
#include <boost/bimap.hpp>
#include "opencv2/opencv.hpp"
#include "utils/utils.hpp"
#include <unordered_map>
using namespace cv;
using namespace std;
Mat formatTransformationMat2(const Mat transformation_matrix)
{
cv::Mat m = cv::Mat::ones(2, 3, CV_64F);
m.at<double>(0, 0) = transfo... | {"hexsha": "55a3a8161a62fc4405123851f213b3a66905c20a", "size": 10686, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "c_src/test/fullKeypointAccuracyTest.cpp", "max_stars_repo_name": "pippy360/imageHashTesting", "max_stars_repo_head_hexsha": "230aa5adfda4e984475261e0d33453f4d9644f96", "max_stars_repo_licenses": ["... |
[STATEMENT]
lemma lemma_LIMSEQ_powrat_diff_inverse:
assumes "1 \<le> a"
and "(\<lambda>n. a pow\<^sub>\<rat> (s n))\<longlonglongrightarrow> y"
shows "(\<lambda>n. a pow\<^sub>\<rat> (s n - 1/of_nat(Suc n))) \<longlonglongrightarrow> y"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (\<lambda>n. a pow\<^sub>... | {"llama_tokens": 1599, "file": "Real_Power_RealPower", "length": 13} |
cat("Hello world!") | {"hexsha": "b08f00cde113899c1e40bf9fbdcf9ee34f097ca7", "size": 19, "ext": "r", "lang": "R", "max_stars_repo_path": "hello-world.r", "max_stars_repo_name": "In-All-Programming-Languages/helloWorld-in-all-programming-languages", "max_stars_repo_head_hexsha": "469939963e436635b9556267b9888743835a1914", "max_stars_repo_lic... |
#####################################################################
##### IMPORT STANDARD MODULES
#####################################################################
from __future__ import print_function
from ..data import DataBlock
from ..preprocess import PreProcess
import pandas as pd
import numpy as np
from... | {"hexsha": "201c13266070803088b5eee33e10562fdb79a614", "size": 1475, "ext": "py", "lang": "Python", "max_stars_repo_path": "easyML/tests/test_preprocess.py", "max_stars_repo_name": "aarshayj/easyML", "max_stars_repo_head_hexsha": "d65d4776704c4e417374ff8fb0266b066da51757", "max_stars_repo_licenses": ["BSD-3-Clause"], "... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import math
#################################
# Linear algebra helpers #
#################################
def LU_solve(b, LU):
"""Finds a solution to the linear system Ax=b
with A factorized into LU."""
rhs = np.asarray(b)
mat ... | {"hexsha": "d6eb95a2082d2388999f65e42d1e5c2eedd90e6c", "size": 1755, "ext": "py", "lang": "Python", "max_stars_repo_path": "_algebra.py", "max_stars_repo_name": "NklasF/splcurve", "max_stars_repo_head_hexsha": "09505f8cd1ae97eaa9583cfd19f02b319547341f", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "max_... |
#include <boost/mpl/aux_/preprocessed/bcc/list.hpp>
| {"hexsha": "2459ea5c1a50ab8d9c6c5f6640aa8e874575f980", "size": 52, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "src/boost_mpl_aux__preprocessed_bcc_list.hpp", "max_stars_repo_name": "miathedev/BoostForArduino", "max_stars_repo_head_hexsha": "919621dcd0c157094bed4df752b583ba6ea6409e", "max_stars_repo_licenses": ... |
import datetime
from .data_factory import create_message_lines
import pytest
from ..parser import Message, Chat
import numpy as np
# parsing a single line into 3 objects: date, author and message
def test_parse_line():
l = '27/04/2021, 08:18 - Kyle: Number gas poor nothing will statement.'
# action
messa... | {"hexsha": "f5e0f9a397a00518b55933319b530452e4d49c0e", "size": 7522, "ext": "py", "lang": "Python", "max_stars_repo_path": "wassap/tests/test_parser.py", "max_stars_repo_name": "nicelgueta/whatsapp-utility", "max_stars_repo_head_hexsha": "2568ebe7fcbc071f642631836622f3ba7bdb43ab", "max_stars_repo_licenses": ["MIT"], "m... |
# ----------------------------------------------------------------------------
# File name: HolidayFea.py
#
# Created on: Aug. 11 2020
#
# by Julia Hu
#
# Description:
#
# 1) This module to ## Add holidays for different sites
#
#
#
# -----------------------------------------------------------------------------
... | {"hexsha": "c923864c6a88999afc02155f096536c856f37f72", "size": 2024, "ext": "py", "lang": "Python", "max_stars_repo_path": "sagemaker-notebook/HolidayFea.py", "max_stars_repo_name": "Julia-Bobo-Hu/IoTAnalytics-Realtime-Ingestion-Inference", "max_stars_repo_head_hexsha": "401dc9ac7f885a0b9924667a13291500f1159c79", "max_... |
using Printf
function output_result(stepnum, Qbase, cellxmax, cellymax, specific_heat_ratio, out_file_front, out_ext, out_dir, Rd, nval)
stepnum = string(stepnum)
while length(stepnum) < 6
stepnum = "0"*stepnum
end
fff = out_dir*"/"*out_file_front*stepnum*out_ext
open(fff,"w") do ... | {"hexsha": "4e0592f48cc4a37b3139470b259607c21642164a", "size": 904, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src_c/output.jl", "max_stars_repo_name": "hide-dog/general_2d_NS_Chemical-N2-N-", "max_stars_repo_head_hexsha": "e5e32ce510b91c82ffd216b4f2e5d1f97a624b12", "max_stars_repo_licenses": ["MIT"], "max_s... |
\documentclass[5p,authoryear]{elsarticle}
\makeatletter
\def\ps@pprintTitle{%
\let\@oddhead\@empty
\let\@evenhead\@empty
\let\@evenfoot\@oddfoot} % Supprimer le bas de page ELSEVIER
\makeatother
\usepackage[utf8]{inputenc} % En unicode
\usepackage[T1]{fontenc}
\usepackage[english]{babel}
\usepackage[babel=true]{csq... | {"hexsha": "bc8789bb70ec48dcd8baff842fb9d7656d47869f", "size": 21752, "ext": "tex", "lang": "TeX", "max_stars_repo_path": "Academic Paper/main.tex", "max_stars_repo_name": "papagorgio23/NBA_News_Spiders", "max_stars_repo_head_hexsha": "ca5c12bf50e1a8b422b0afc315a6b61ba3b67588", "max_stars_repo_licenses": ["MIT"], "max_... |
# -*- coding: utf-8 -*-
"""inter_annotator_agreement and data composition.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/19rbuLH97L8OesYayqaCXrrE0Y0I_ndTJ
This pipeline extracts annotated entities and labels from training data for every annotato... | {"hexsha": "aa3d17f37069bb4ecc8bdee08e4e997f4f1cd199", "size": 12484, "ext": "py", "lang": "Python", "max_stars_repo_path": "pipelines/ner_spacy_final/inter_annotator_agreement_and_data_composition.py", "max_stars_repo_name": "almazhankapan/opengulf.github.io", "max_stars_repo_head_hexsha": "70771e84025a11a6afc39a4d506... |
#include <iostream>
#include <unistd.h>
// Les lignes suivantes ne servent qu'à vérifier que la compilation avec SFML fonctionne
#include <SFML/Graphics.hpp>
void testSFML() {
sf::Texture texture;
}
// Fin test SFML
#include <state.h>
#include <render.h>
#include <engine.h>
#include <ai.h>
#include <client.h>
#... | {"hexsha": "e8e4c9cbd8e8620faf4c9709fe6e4ef10c3b2ca6", "size": 2189, "ext": "cpp", "lang": "C++", "max_stars_repo_path": "src/client/main.cpp", "max_stars_repo_name": "Kuga23/Projet-M2", "max_stars_repo_head_hexsha": "85c879b8fd1ed4fdf89eedd9f89841cbd7a1e433", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null... |
"""Provides analysis of a probe set for targeted genomes.
This computes the number of bp across each target genome that the
probes cover, as well as the percentage of each target genome that
the probes cover. It computes a percentage against the full
length of the target genome, as well as a percentage against the
len... | {"hexsha": "3b4e6b4a2709ee6c4006fc76dbd41fc6bda4b9f6", "size": 27168, "ext": "py", "lang": "Python", "max_stars_repo_path": "catch/coverage_analysis.py", "max_stars_repo_name": "broadinstitute/catch", "max_stars_repo_head_hexsha": "2fedca15f921116f580de8b2ae7ac9972932e59e", "max_stars_repo_licenses": ["MIT"], "max_star... |
from abc import ABCMeta, abstractmethod
from types import FunctionType
from tqdm import tqdm
from torch.utils.data import random_split
import traceback
import shutil
from typing import Union
from jdit.dataset import DataLoadersFactory
from jdit.model import Model
from jdit.optimizer import Optimizer
import torch
impor... | {"hexsha": "128b50f9e2eb95fe62d8e168e07de1640d571f19", "size": 31540, "ext": "py", "lang": "Python", "max_stars_repo_path": "jdit/trainer/super.py", "max_stars_repo_name": "dingguanglei/jdit", "max_stars_repo_head_hexsha": "ef878e696c9e2fad5069f106496289d4e4cc6154", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars... |
using RetroSignalModel
using RetroSignalModel: RtgMTK
using Documenter
makedocs(;
modules=[RetroSignalModel],
authors="stevengogogo <stevengogogo4321@gmail.com> and contributors",
repo="https://github.com/ntumitolab/RetroSignalModel.jl/blob/{commit}{path}#L{line}",
sitename="RetroSignalModel.jl",
f... | {"hexsha": "bdbe9d93a46561e21608ee84b0d5c74f82b54d58", "size": 667, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "docs/make.jl", "max_stars_repo_name": "NTUMitoLab/RetroSignalModel.jl", "max_stars_repo_head_hexsha": "c39b22a312dccfc174d3a4db4c7dc6af8aca76b2", "max_stars_repo_licenses": ["MIT"], "max_stars_count... |
"""HiddenFootprints Core Functions
Read from Waymo records.
Project labels in a sequence to reference frames.
"""
import numpy as np
import tensorflow as tf
from .utils import get_global_box, box_label_to_corners, global_box_to_camera_image_matmul, convert_camera_gc
def read_single_frame(frame_record, open_dataset,... | {"hexsha": "4122e054794241c0f7d5c63ee0cbee61c23c77fa", "size": 3609, "ext": "py", "lang": "Python", "max_stars_repo_path": "hiddenfootprints/core.py", "max_stars_repo_name": "jinsungit/hiddenfootprints", "max_stars_repo_head_hexsha": "13ae322cc77435809a408152fd2406dbe16ce9a6", "max_stars_repo_licenses": ["Apache-2.0"],... |
#pragma once
#include <mtlog/badbotlogger.hpp>
#include <sharedstorage/sharedstrings.hpp>
#include <atomic>
#include <thread>
#include <boost/signals2.hpp>
using namespace std;
using namespace boost;
class ControllerBase : protected BadBotLogger
{
public:
typedef boost::signals2::signal<void (std::strin... | {"hexsha": "7beefe375f273a31d9cca4ee5b9a18073f1cf476", "size": 2284, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "fuse/controllers/controllerbase.hpp", "max_stars_repo_name": "badbot-v1/motion-controller", "max_stars_repo_head_hexsha": "560a8dfc19d628a7f474d26d0cbdf0426f683020", "max_stars_repo_licenses": ["Apa... |
function evaluate_to_array(backend::ArrayBackend, x::Number, target_dimensions; seed=nothing)
evaluate(backend, convert(FieldExpr, x), target_dimensions)
end
function evaluate_to_array(backend::ArrayBackend, fexpr::FieldExpr, target_dimensions; seed=nothing)
af = wrapped_array_function([fexpr => target_dimensi... | {"hexsha": "09e8a09337d51d5fb48a975830149e9b19adda10", "size": 1460, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/evaluation.jl", "max_stars_repo_name": "fluxion/FluxionFields.jl", "max_stars_repo_head_hexsha": "663b5e0f73ae2396fa9e33abc0f3742a58d57469", "max_stars_repo_licenses": ["MIT"], "max_stars_count... |
"""
These functions have no direct analog in the standard python data analytics
stack, or require information about the internal state of the system beyond
what is present in the function call. We provide them in a structure that
makes it easy for the model elements to call.
"""
import inspect
import os
import re
impo... | {"hexsha": "1c3db062c4dbf77b79b85474ac206aec6b78f65b", "size": 72095, "ext": "py", "lang": "Python", "max_stars_repo_path": "pysd/py_backend/functions.py", "max_stars_repo_name": "JamesPHoughton/pysd", "max_stars_repo_head_hexsha": "5885d622144dd81af96e3c875bac74c51ddba62f", "max_stars_repo_licenses": ["MIT"], "max_sta... |
[STATEMENT]
lemma Vars_indeps_foldr:
assumes "set xs \<subseteq> set Vars"
shows "foldr (\<squnion>\<^sub>S) xs \<bottom>\<^sub>S \<bowtie>\<^sub>S foldr (\<squnion>\<^sub>S) (filter (\<lambda>x. x \<notin> set xs) Vars) \<bottom>\<^sub>S"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<Squnion>\<^sub>S xs \<bo... | {"llama_tokens": 732, "file": "Optics_Scene_Spaces", "length": 7} |
"""
vtki plotting module
"""
import collections
import ctypes
import logging
import os
import time
from threading import Thread
from subprocess import PIPE, Popen
import imageio
import numpy as np
import vtk
from vtk.util import numpy_support as VN
import vtki
from vtki.export import export_plotter_vtkjs
from vtki.ut... | {"hexsha": "36f0658bd4d3b351a93af03bc322ffc9a5176b8d", "size": 103054, "ext": "py", "lang": "Python", "max_stars_repo_path": "vtki/plotting.py", "max_stars_repo_name": "GuillaumeFavelier/pyvista", "max_stars_repo_head_hexsha": "dd2cb22464d0e96d8c92d91106283ee59b8b5041", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
import numpy as np
import pandas as pd
# house list of lists
house = [["hallway", 11.25],
["kitchen", 18.0],
["living room", 20.0],
["bedroom", 10.75],
["bathroom", 9.50]]
# Build a for loop from scratch
for a, [x, y] in enumerate(house):
print("the " + str(x) + " is " + str(y)... | {"hexsha": "f1c1998e04db1d70d45b375e2a594d6cf54761f5", "size": 1461, "ext": "py", "lang": "Python", "max_stars_repo_path": "Day 15 - Practice, Study Case/1 - Practice.py", "max_stars_repo_name": "ServerCetin/hello_python3", "max_stars_repo_head_hexsha": "7cf0807e09c819c690f28ee30758f22355c79115", "max_stars_repo_licens... |
module Occupation
public export
record Occupation where
constructor CreateOccupation
type : String
id : String
name : String
| {"hexsha": "30e7cab8f6975eb33e3e00f256ad0d155b2edae9", "size": 135, "ext": "idr", "lang": "Idris", "max_stars_repo_path": "src/Objects/Occupation.idr", "max_stars_repo_name": "GrandArchTemplar/IdrisVKAPI", "max_stars_repo_head_hexsha": "bcbb27d2591588c04709d83808deb864fe9e4bb6", "max_stars_repo_licenses": ["MIT"], "max... |
import random
import hashlib
import glob
import os, sys
import pickle
import json
import itertools
import torch
import numpy as np
from codae.dataset import ConcatenatedEmbeddingDataset
def get_mask_transformation(observation_mask, loss_mask):
"""
Create a boolean transformation matrix T to go from an obse... | {"hexsha": "a57f2ab94e6138333b7158187a2d92f41a8f9cb0", "size": 8169, "ext": "py", "lang": "Python", "max_stars_repo_path": "codae/tool/data_tool.py", "max_stars_repo_name": "victordeleau/MUI-DeepAutoEncoder", "max_stars_repo_head_hexsha": "d5b8351334df64b30b3f6929c934a936b66ed963", "max_stars_repo_licenses": ["BSD-3-Cl... |
import pytorch_lightning as pl
from torch.utils.data import DataLoader, random_split, ConcatDataset, Subset
from transformers import T5ForConditionalGeneration, Adafactor, T5Tokenizer
from utils import *
import dataloading as dl
import warnings
import datasets
import torch
import numpy as np
# set seeds
pl.seed_everyt... | {"hexsha": "7b96e887c52d02aededc79c55a6c522332582e10", "size": 19388, "ext": "py", "lang": "Python", "max_stars_repo_path": "litT5.py", "max_stars_repo_name": "SebOchs/KN1-baseline", "max_stars_repo_head_hexsha": "17b9bb724ac2d372b04ee6f629d213ff930220ac", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "m... |
!
! AtmProfile_netCDF_IO
!
! Module containing routines to read and write AtmProfile netCDF
! format files.
!
!
! CREATION HISTORY:
! Written by: Paul van Delst, 08-Jul-2002
! paul.vandelst@noaa.gov
!
MODULE AtmProfile_netCDF_IO
! -----------------
! Environment setup
! --------... | {"hexsha": "848530aec7eac537d3f68bf47964451655109e92", "size": 51877, "ext": "f90", "lang": "FORTRAN", "max_stars_repo_path": "src/TauProd/AtmProfile/AtmProfile_netCDF_IO.f90", "max_stars_repo_name": "hsbadr/crtm", "max_stars_repo_head_hexsha": "bfeb9955637f361fc69fa0b7af0e8d92d40718b1", "max_stars_repo_licenses": ["CC... |
(*
Copyright 2018
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed und... | {"author": "ssrg-vt", "repo": "Luce-src", "sha": "f7f1ef0fd07bba48bcb3d5e32404db6013a5f1bc", "save_path": "github-repos/isabelle/ssrg-vt-Luce-src", "path": "github-repos/isabelle/ssrg-vt-Luce-src/Luce-src-f7f1ef0fd07bba48bcb3d5e32404db6013a5f1bc/safecomp2019_artifact/current_work/examples/hermitcore/spinlock/spinlock_l... |
import os, cv2
import pickle
import torch
import torchvision
from libs.Loader import Dataset
import numpy as np
from Networks.StyleNet import StyleAugmentation
from torchvision import transforms
if __name__ == "__main__":
os.environ["CUDA_VISIBLE_DEVICES"]="0"
print_idx = True
batch_size = 8
num_examp... | {"hexsha": "26d6086ee03a4827a4073a6d8cc76cc2b28679d8", "size": 3451, "ext": "py", "lang": "Python", "max_stars_repo_path": "AugmentedTest.py", "max_stars_repo_name": "emedinac/UnderstandingSA", "max_stars_repo_head_hexsha": "a234631e99f2979396fef9e24f54865e63147ef4", "max_stars_repo_licenses": ["MIT"], "max_stars_count... |
//==============================================================================
// Copyright 2003 - 2011 LASMEA UMR 6602 CNRS/Univ. Clermont II
// Copyright 2009 - 2011 LRI UMR 8623 CNRS/Univ Paris Sud XI
//
// Distributed under the Boost Software License, Version 1.0.
// S... | {"hexsha": "2919004c08480725092d8e3dec7ad65dbce9c6e6", "size": 2480, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "modules/boost/simd/ieee/include/boost/simd/toolbox/ieee/functions/simd/common/eps.hpp", "max_stars_repo_name": "timblechmann/nt2", "max_stars_repo_head_hexsha": "6c71f7063ca4e5975c9c019877e6b2fe07c9... |
[STATEMENT]
theorem prover_complete_refutation: "prover N \<longleftrightarrow> satisfiable (RP.grounded_N0 N)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. prover N = satisfiable (RP.grounded_N0 N)
[PROOF STEP]
unfolding prover_def St0_def
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (case deterministic_RP (... | {"llama_tokens": 448, "file": "Functional_Ordered_Resolution_Prover_Executable_FO_Ordered_Resolution_Prover", "length": 3} |
import numpy
from clpy.creation import basic
from clpy.creation import from_data
from clpy.creation import ranges
from clpy.math import trigonometric
def blackman(M):
"""Returns the Blackman window.
The Blackman window is defined as
.. math::
w(n) = 0.42 - 0.5 \\cos\\left(\\frac{2\\pi{n}}{M-1}\... | {"hexsha": "8fb2a2737ce6eb38f4a0f8d37fd264006ef6e7c5", "size": 2247, "ext": "py", "lang": "Python", "max_stars_repo_path": "clpy/math/window.py", "max_stars_repo_name": "fixstars/clpy", "max_stars_repo_head_hexsha": "693485f85397cc110fa45803c36c30c24c297df0", "max_stars_repo_licenses": ["BSD-3-Clause"], "max_stars_coun... |
# -*- encoding: utf-8 -*-
import numpy as np
def rmse(prof_ref,prof_seg):
dif_curv = []
for shift in range(prof_seg.shape[1]):
dif_curv.append(np.abs(np.sum((prof_ref[0] - np.roll(prof_seg[0],shift))**2)))
prof_seg_shift = np.apply_along_axis(np.roll, 1, prof_seg, np.argmin(dif_curv))
return n... | {"hexsha": "aac9465f6f2fd6788492f65d860377cf66980269", "size": 395, "ext": "py", "lang": "Python", "max_stars_repo_path": "deliver/functions_will/rmse.py", "max_stars_repo_name": "mariecpereira/Extracao-de-Caracteristicas-Corpo-Caloso", "max_stars_repo_head_hexsha": "f094c706db815f91cf61d1d501c2a9030b9b54d3", "max_star... |
# Copyright 2020 Novartis Institutes for BioMedical Research Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | {"hexsha": "b4ed671691940d14196af4e43195bd9388eef56d", "size": 26396, "ext": "py", "lang": "Python", "max_stars_repo_path": "evaluation.py", "max_stars_repo_name": "andrew-xu-monash/UMM-Modified", "max_stars_repo_head_hexsha": "18729dc34733c203e8cd3873fec2b9f7d0b56dba", "max_stars_repo_licenses": ["Apache-2.0"], "max_s... |
#!/usr/bin/env python3
# coding: utf-8
import os.path as osp
import numpy as np
from .io import _load
def make_abs_path(d):
return osp.join(osp.dirname(osp.realpath(__file__)), d)
d = make_abs_path('../train.configs')
keypoints = _load(osp.join(d, 'keypoints_sim.npy'))
w_shp = _load(osp.join(d, 'w_shp_sim.npy'... | {"hexsha": "43d67be08c6888005852ea01e6f72e3f25cc9ccd", "size": 1211, "ext": "py", "lang": "Python", "max_stars_repo_path": "utils/params.py", "max_stars_repo_name": "xqterry/3DDFA", "max_stars_repo_head_hexsha": "3b8f7bb4cfafa349e628d6433d7a4edc55627243", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "ma... |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# ## Read data into dataframe
data_name = 'metr-la'
data_path = '../data/' + data_name + '.h5'
df = pd.read_hdf(data_path)
print(df.shape)
print(df)
# ## Get the critical info
sensor_id_index = 0
sensor_id = list(df.ke... | {"hexsha": "38df9f3e28219657e1a75bbaef763073ae716278", "size": 1129, "ext": "py", "lang": "Python", "max_stars_repo_path": "display_data.py", "max_stars_repo_name": "KarlDenken/Traffic-YX", "max_stars_repo_head_hexsha": "086346350ccc7a8c6751790f263a0dc73a5d0b78", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_co... |
"""
@alias new = original
Define `const new = original` and attach `original`'s docstring to `new`.
"""
macro alias(expr)
expr.head == :(=) || error("must be an assignment expression")
new, original = expr.args
return quote
@doc (@doc $original)
const $(esc(new)) = $(esc(original))
... | {"hexsha": "0c832fc3467c62efc9245daa6eec0ced3980316e", "size": 783, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/macros.jl", "max_stars_repo_name": "tfiers/MyToolbox.jl", "max_stars_repo_head_hexsha": "c96c0eb2e93d98c6e8a6fe11729bb737cef3d963", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "... |
import numpy as np
import cv2
import matplotlib.pyplot as plt
import torch
from tqdm import tqdm
from pathlib import Path
import os
import warnings
import argparse
import torch
import sys
import json
sys.path.append('./src/utils')
from openpose_utils import create_label_full, create_face_label
from matplotlib import p... | {"hexsha": "2a9f0416736fceb9b76d747ef6768bddd9a4ef7b", "size": 5239, "ext": "py", "lang": "Python", "max_stars_repo_path": "source.py", "max_stars_repo_name": "kjzju/EverybodyDanceNow-Temporal-FaceGAN", "max_stars_repo_head_hexsha": "b61e628ac3f4be351dc0bc80445049394ce500a5", "max_stars_repo_licenses": ["MIT"], "max_st... |
[STATEMENT]
lemma order_bal_nonempty_lasttreebal: "\<lbrakk>k > 0; root_order k t; bal t\<rbrakk> \<Longrightarrow> nonempty_lasttreebal t"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>0 < k; root_order k t; bal t\<rbrakk> \<Longrightarrow> nonempty_lasttreebal t
[PROOF STEP]
proof(induction k t rule: ord... | {"llama_tokens": 3083, "file": "BTree_BTree_Set", "length": 20} |
"""
Implementation of DDPG - Deep Deterministic Policy Gradient
Algorithm and hyperparameter details can be found here:
http://arxiv.org/pdf/1509.02971v2.pdf
The algorithm is tested on the Pendulum-v0 OpenAI gym task
and developed with tflearn + Tensorflow
Author: Patrick Emami
"""
import tensorflow as tf
imp... | {"hexsha": "28ce5e00c6e845acf5974c84f46764cbb334ba14", "size": 5695, "ext": "py", "lang": "Python", "max_stars_repo_path": "python_code/ddpg.py", "max_stars_repo_name": "ssriramana93/EnvClassify", "max_stars_repo_head_hexsha": "56c1b1965403d08fd3c1213a2bc67a27a9c1ebf8", "max_stars_repo_licenses": ["MIT"], "max_stars_co... |
from random import randint as rand
import numpy as np
from scipy.io import wavfile as wf
import function_melody_generator as fmg
samplerate = 44100 #Frequecy in Hz
tempo = fmg.tempo_ke_detik(200)
ketukan = 8
scale=[2,1,2,2,1,3,1]
nada = 0
chord = [0,3,7]
#chordx = fmg.to_chord(0,chord)
#scalex = fmg.to_s... | {"hexsha": "aa24870c54f68e41cb88748233e247a45cbd7dc0", "size": 852, "ext": "py", "lang": "Python", "max_stars_repo_path": "app 2.py", "max_stars_repo_name": "zemetia/random-automatic-melody-generator", "max_stars_repo_head_hexsha": "4ffaa855d7bf98b63e077209726abdba70d9de1f", "max_stars_repo_licenses": ["MIT"], "max_sta... |
import matplotlib.pyplot as plt
import numpy as np
import scipy.io as scio
from skimage import io
from skimage import img_as_float
import runkMeans as km
import findClosestCentroids as fc
import computeCentroids as cc
import kMeansInitCentroids as kmic
plt.ion()
np.set_printoptions(formatter={'float': '{: 0.6f}'.form... | {"hexsha": "8926759096ef0fa88172e5b419c1e0a78e1001f5", "size": 5077, "ext": "py", "lang": "Python", "max_stars_repo_path": "machine-learning-ex7/ex7/ex7.py", "max_stars_repo_name": "ShawnT4ever/coursera-ml-py", "max_stars_repo_head_hexsha": "ede0f259ed5ac6ed0c0d7b4d6f999cad5c07aafb", "max_stars_repo_licenses": ["MIT"],... |
from PIL import Image
import numpy as np
from paddle.io import Dataset, DataLoader
import scipy.io
import os
# A = TrainDataset('../Data/benchmark_RELEASE/dataset')
class TrainDataset(Dataset):
def __init__(self,dataset_path, img_folder='img', gt_folder='cls',threshold=128,ignore_label=None):
self... | {"hexsha": "df8e996d016881ac71aadfd6cdd5e1bcb6ea1262", "size": 1144, "ext": "py", "lang": "Python", "max_stars_repo_path": "fcanet/fcanet/train.py", "max_stars_repo_name": "PaddleEdu/Segmentation-models-PaddlePaddle", "max_stars_repo_head_hexsha": "478b00f260128642d5db60bd5ac0485de0e341bc", "max_stars_repo_licenses": [... |
# 求一个三维的椭圆体
import numpy as np
from skimage.draw import ellipsoid
spacing = (1., 10 / 6., 16 / 6.)
ellipsoid_anisotropic = ellipsoid(6, 10, 16, spacing=spacing, levelset=True)
print(ellipsoid_anisotropic)
print(ellipsoid_anisotropic.shape) | {"hexsha": "568ac87233e1e0990847c16fa55eef8b5a886894", "size": 239, "ext": "py", "lang": "Python", "max_stars_repo_path": "Draw/ellipsoid.py", "max_stars_repo_name": "Joevaen/Scikit-image_On_CT", "max_stars_repo_head_hexsha": "e3bf0eeadc50691041b4b7c44a19d07546a85001", "max_stars_repo_licenses": ["Apache-2.0"], "max_st... |
#!/usr/bin/env python
from pathlib import Path
import numpy as np
import pytest
from histutils.rawDMCreader import goRead
R = Path(__file__).parent
def test_rawread():
bigfn = R / "testframes.DMCdata"
params = {
"xy_pixel": (512, 512),
"xy_bin": (1, 1),
"frame_request": (1, 2, 1),
... | {"hexsha": "2733f9f05d11c08a97b817e7f377e6a255f77a03", "size": 961, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/histutils/tests/test_all.py", "max_stars_repo_name": "space-physics/histutils", "max_stars_repo_head_hexsha": "f2add29c73be5c62d8675139cc58250ece92f477", "max_stars_repo_licenses": ["MIT"], "ma... |
FUNCTION zbrent(func,x1,x2,tol)
INTEGER ITMAX
REAL zbrent,tol,x1,x2,func,EPS
EXTERNAL func
PARAMETER (ITMAX=100,EPS=3.e-8)
INTEGER iter
REAL a,b,c,d,e,fa,fb,fc,p,q,r,s,tol1,xm
a=x1
b=x2
fa=func(a)
fb=func(b)
if((fa.gt.0..and.fb.gt.0.).or... | {"hexsha": "d7f4ef4d48b77c549fa7e5e5fbafaab135d23cb0", "size": 1743, "ext": "for", "lang": "FORTRAN", "max_stars_repo_path": "NR-Functions/Numerical Recipes- Example & Functions/Functions/zbrent.for", "max_stars_repo_name": "DingdingLuan/nrfunctions_fortran", "max_stars_repo_head_hexsha": "37e376dab8d6b99e63f6f1398d0c3... |
import os
import gc
from itertools import islice
from typing import Generator, List, Tuple
import numpy as np
import pandas as pd
from tqdm import tqdm
import regex as re
from networks.classes.centernet.utils.BBoxesVisualizer import BBoxesVisualizer
class SubmissionHandler:
def __init__(self, dict_cat, log):
... | {"hexsha": "0f7770a211f53bd282a210189d06fb47b0fa71c6", "size": 8277, "ext": "py", "lang": "Python", "max_stars_repo_path": "networks/classes/centernet/pipeline/SubmissionHandler.py", "max_stars_repo_name": "ALIENK9/Kuzushiji-recognition", "max_stars_repo_head_hexsha": "a18c1fbfa72b6bbbcfe4004148cd0e90531acf6b", "max_st... |
import numpy as np
from opytimizer.optimizers.science import aig
from opytimizer.spaces import search
def test_aig_params():
params = {
'alpha': np.pi,
'beta': np.pi
}
new_aig = aig.AIG(params=params)
assert new_aig.alpha == np.pi
assert new_aig.beta == np.pi
def test_aig_par... | {"hexsha": "36d07c24550ac1368db42c77202416e116ed69f6", "size": 1100, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/opytimizer/optimizers/science/test_aig.py", "max_stars_repo_name": "anukaal/opytimizer", "max_stars_repo_head_hexsha": "5f1ccc0da80e6a4cabd99578fa24cf4f6466f9b9", "max_stars_repo_licenses": ... |
theory NimFullProofs
imports NimFull
begin
(*************************************************************************)
subsection \<open> Proving function and operation satisfiability POs \<close>
text \<open>
Next, we illustrate the general PO setup for all auxiliary functions.
After the translation is complet... | {"author": "leouk", "repo": "VDM_Toolkit", "sha": "791013909961d45949fcd96d937ae18f0174c7ec", "save_path": "github-repos/isabelle/leouk-VDM_Toolkit", "path": "github-repos/isabelle/leouk-VDM_Toolkit/VDM_Toolkit-791013909961d45949fcd96d937ae18f0174c7ec/experiments/isa/Nim/NimFullProofs.thy"} |
import numpyro
numpyro.enable_x64()
import sys
import argparse
import mechbayes.util as util
import numpy as onp
from run_util import load_config, get_method
import data_cleaning
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run forecast model for one location.')
parser.add_argume... | {"hexsha": "f558463e228eda27d37fee47baf051b7c77362d1", "size": 1959, "ext": "py", "lang": "Python", "max_stars_repo_path": "scripts/run_model.py", "max_stars_repo_name": "dsheldon/mechbayes", "max_stars_repo_head_hexsha": "dc1b857e5bee6429aa18233d4f4890b2892a2e4b", "max_stars_repo_licenses": ["MIT"], "max_stars_count":... |
'''
use the neural network from nn.py to find evidence in a file
uses cleaned files from clean_hslld.py
'''
import numpy as np
import pickle
from bertinator import get_bert_vector
def transcript_to_chunks(transcript, radius=1):
transcript_lines = transcript.splitlines()
chunks = [None] * len(transcript_lines... | {"hexsha": "d12c20c6d3eba6a598bdb50573740089b84d81a0", "size": 1217, "ext": "py", "lang": "Python", "max_stars_repo_path": "clf_evidence_finder.py", "max_stars_repo_name": "chrisraff/conversation2foods", "max_stars_repo_head_hexsha": "76948d7b302d7d9aa22d8405d13f0e63e22e5706", "max_stars_repo_licenses": ["MIT"], "max_s... |
"""
$(TYPEDEF)
Structure to contain the density values obtained from the calculation.
$(TYPEDFIELDS)
"""
@with_kw mutable struct Density
solute::Float64 = 0.0
solvent::Float64 = 0.0
solvent_bulk::Float64 = 0.0
end
function reset!(d::Density)
d.solute = 0.0
d.solvent = 0.0
d.solvent_bulk = 0... | {"hexsha": "eb8279b1eef7124eaf8edc0c95523d6d20040e78", "size": 555, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/Density.jl", "max_stars_repo_name": "m3g/MDDF", "max_stars_repo_head_hexsha": "efbc8e0dcf426c9b2246217eb9edaf4605318e84", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 1, "max_stars_rep... |
# Copyright 2019 Xilinx, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in... | {"hexsha": "932fc0bbc8b659421081be8dbe3edbd080841105", "size": 4611, "ext": "py", "lang": "Python", "max_stars_repo_path": "gemx/MLsuite_MLP/examples/keras/mnist/mlp_mnist.py", "max_stars_repo_name": "mihnea-chirila/gemx", "max_stars_repo_head_hexsha": "81e1075975908744d905fdb00175e00849b90630", "max_stars_repo_license... |
import sys
sys.path.append(".")
from PyQt5.QtCore import QObject, pyqtSignal, QBasicTimer
import numpy as np
import cv2
import os
class VideoRecorder(QObject):
imageData = pyqtSignal(np.ndarray)
def __init__(self, camera_port=0, parent=None):
super().__init__(parent)
self.camera = cv2.VideoC... | {"hexsha": "808f9741bbf9bf60742171589bfb9b1d74322ab4", "size": 644, "ext": "py", "lang": "Python", "max_stars_repo_path": "App/utils/video_recorder.py", "max_stars_repo_name": "yonycherkos/automatic-classroom-attendance-system-using-face-recognition", "max_stars_repo_head_hexsha": "95925d9906a0b76cadb6abac778dd3bd18f01... |
From aneris.aneris_lang.lib Require Export
assert_proof
network_util_proof set_proof map_proof nodup_proof coin_flip_proof inject.
From aneris_examples.transaction_commit Require Import two_phase_prelude.
Section transaction_manager.
Context `{!network_topo}.
Context `{!anerisG (TC_model RMs) Σ, !tcG Σ}.... | {"author": "fresheed", "repo": "trillium-experiments", "sha": "a9c38a9e9566fb8057ae97ecb8d1a0c09c799aef", "save_path": "github-repos/coq/fresheed-trillium-experiments", "path": "github-repos/coq/fresheed-trillium-experiments/trillium-experiments-a9c38a9e9566fb8057ae97ecb8d1a0c09c799aef/theories/transaction_commit/two_p... |
__copyright__ = "Copyright (c) Microsoft Corporation and Mila - Quebec AI Institute"
__license__ = "MIT"
"""Metrics used to compare environemnts, sets of factors, etc
"""
import numpy as np
import ot
def wasserstein_distance(
x1: np.ndarray, x2: np.ndarray, p: int = 2, seed: int = 0, n_projections: int = 50,
) ... | {"hexsha": "cc549b0d37b9310d5b90a93fd5df50aeb76d5d92", "size": 906, "ext": "py", "lang": "Python", "max_stars_repo_path": "segar/metrics.py", "max_stars_repo_name": "fgolemo/segar", "max_stars_repo_head_hexsha": "8e21f8ee01bc72adb84dec7998b014d11d2b1fbe", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 19, "max_... |
"""
This file contains fixtures that are used at multiple points in the tests.
"""
import pytest
import numpy as np
import pandas as pd
from mokapot import LinearPsmDataset
@pytest.fixture(scope="session")
def psm_df_6():
"""A DataFrame containing 6 PSMs"""
data = {
"target": [True, True, True, False,... | {"hexsha": "c490a6175952ad5a79f7015d0bc8af19d1a06ed9", "size": 5109, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/conftest.py", "max_stars_repo_name": "mobiusklein/mokapot", "max_stars_repo_head_hexsha": "5148da1c09cfc92f9b776ae8306619429d0656cb", "max_stars_repo_licenses": ["Apache-2.0"], "max_stars_co... |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.init as init
import os
import random
import numpy as np
## Adapted from https://github.com/joaomonteirof/e2e_antispoofing
class SelfAttention(nn.Module):
def __init__(self, hidden_size, mean_only=False):
super(SelfAttention... | {"hexsha": "ccda2be0e277d40ccd7cfaa006347b7b29ac0dba", "size": 7381, "ext": "py", "lang": "Python", "max_stars_repo_path": "resnet.py", "max_stars_repo_name": "AirLabUR/ASVspoof2021_AIR", "max_stars_repo_head_hexsha": "e63ce99ceb1827d81e306b75d09999be28a042c6", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 40,... |
#ifndef STAN_MCMC_HMC_HAMILTONIANS_UNIT_E_METRIC_HPP
#define STAN_MCMC_HMC_HAMILTONIANS_UNIT_E_METRIC_HPP
#include <stan/mcmc/hmc/hamiltonians/base_hamiltonian.hpp>
#include <stan/mcmc/hmc/hamiltonians/unit_e_point.hpp>
#include <boost/random/variate_generator.hpp>
#include <boost/random/normal_distribution.hpp>... | {"hexsha": "d0e914bd643a299c6703838068d15a488da3ee99", "size": 1704, "ext": "hpp", "lang": "C++", "max_stars_repo_path": "archive/stan/src/stan/mcmc/hmc/hamiltonians/unit_e_metric.hpp", "max_stars_repo_name": "alashworth/stan-monorepo", "max_stars_repo_head_hexsha": "75596bc1f860ededd7b3e9ae9002aea97ee1cd46", "max_star... |
import torch
import torch.utils.data as data
from glob import glob
from os.path import join, basename, exists
import numpy as np
import pickle as pkl
from random import random
np.random.seed(123)
class TTSDataset(data.Dataset):
def __init__(self, which_set='train', datapath='./samples'):
# Load vocabulary
... | {"hexsha": "084ed86d7ae7071ba4426d9ee4a95746c999711c", "size": 3212, "ext": "py", "lang": "Python", "max_stars_repo_path": "dataset.py", "max_stars_repo_name": "ktho22/vctts", "max_stars_repo_head_hexsha": "84e8bc6c4b5586aa319c7c21c4325f879f2cd3ba", "max_stars_repo_licenses": ["MIT"], "max_stars_count": 17, "max_stars_... |
(* Title: List2.thy
Date: Oct 2006
Author: David Trachtenherz
*)
header {* Additional definitions and results for lists *}
theory List2
imports "../CommonSet/SetIntervalCut"
begin
subsection {* Additional definitions and results for lists *}
text {*
Infix syntactical abbreviations for op... | {"author": "Josh-Tilles", "repo": "AFP", "sha": "f4bf1d502bde2a3469d482b62c531f1c3af3e881", "save_path": "github-repos/isabelle/Josh-Tilles-AFP", "path": "github-repos/isabelle/Josh-Tilles-AFP/AFP-f4bf1d502bde2a3469d482b62c531f1c3af3e881/thys/List-Infinite/ListInf/List2.thy"} |
function alpha = complexNormalAngle(varargin)
%COMPLEXNORMALANGLE compute normal angle of a vertex of a cellular complex
%
% ALPHA = complexNormalAngle(NODES, EDGES, FACES, INDEX)
% ALPHA = complexNormalAngle(NODES, EDGES, FACES, CELLS, INDEX)
% Compute the nortmal angle of the polyhedral reconstruction defined b... | {"author": "mattools", "repo": "matImage", "sha": "94d892c7beac0db32daadf2646ce37f58e894caf", "save_path": "github-repos/MATLAB/mattools-matImage", "path": "github-repos/MATLAB/mattools-matImage/matImage-94d892c7beac0db32daadf2646ce37f58e894caf/matImage/imMinkowski/private/complexNormalAngle.m"} |
import argparse
from torch.utils.data.sampler import SequentialSampler
import sys
import numpy as np
import os
import sys
import pandas as pd
import pickle
from apex import amp
sys.path.insert(1,'./')
from train.zoo.models import *
from train.zoo.surgery import *
from train.datafeeding.retriever import *
from train.too... | {"hexsha": "99b7b9281e0b8cf6c01c5e0d94b9d5f3a31b8252", "size": 6273, "ext": "py", "lang": "Python", "max_stars_repo_path": "abba/train/train_pytorch.py", "max_stars_repo_name": "simphide/Kaggle-2020-Alaska2", "max_stars_repo_head_hexsha": "3c1f5e8e564c9f04423beef69244fc74168f88ca", "max_stars_repo_licenses": ["MIT"], "... |
"""
File: create_grid.py
Author: David Solanas Sanz
TFG
"""
import argparse
import csv
import os
import keras
import numpy as np
from scipy.ndimage import rotate, measurements
from skimage.transform import resize
def create_grid(src_image):
"""
Creates 16 brain sections from center of mass
Pa... | {"hexsha": "070a9f98228d272ca8ec6331819ee62668257878", "size": 11380, "ext": "py", "lang": "Python", "max_stars_repo_path": "src/preprocessing/create_grid.py", "max_stars_repo_name": "DavidSolanas/TFG", "max_stars_repo_head_hexsha": "dc84a8b11cfcc53e13ba6080793e3550bea6a48b", "max_stars_repo_licenses": ["MIT"], "max_st... |
[STATEMENT]
lemma comparator_of_rep [simp]: "comparator_of (rep_nat x) (rep_nat y) = comparator_of x y"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. comparator_of (rep_nat x) (rep_nat y) = comparator_of x y
[PROOF STEP]
by (simp add: comparator_of_def linorder_class.comparator_of_def ord_iff rep_inj) | {"llama_tokens": 126, "file": "Polynomials_Term_Order", "length": 1} |
[STATEMENT]
lemma card_of_empty4:
"|{}::'b set| <o |A::'a set| = (A \<noteq> {})"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (|{}| <o |A|) = (A \<noteq> {})
[PROOF STEP]
proof(intro iffI notI)
[PROOF STATE]
proof (state)
goal (2 subgoals):
1. \<lbrakk>|{}| <o |A|; A = {}\<rbrakk> \<Longrightarrow> False
2. A \... | {"llama_tokens": 1867, "file": null, "length": 22} |
"""
Unit tests for computing density, baroclinic head and the internal pressure
gradient from a temperature field.
Runs MES convergence tests against a non-trivial analytical solution in a
deformed geometry.
NOTE currently only linear equation of state is tested
TODO test full nonlinear equation of state
"""
from the... | {"hexsha": "1ce4b1f374333614de010d4dfb6d9af39f0d9392", "size": 10277, "ext": "py", "lang": "Python", "max_stars_repo_path": "test/pressure_grad/test_pg-stack_mes.py", "max_stars_repo_name": "LawrenceDior/thetis", "max_stars_repo_head_hexsha": "fa4b14eeac1063f922ba24f03ebf7ecdf80b82ff", "max_stars_repo_licenses": ["MIT"... |
import numpy as np
import pytest
from scipy.spatial.transform import Rotation
from nao_gestures.nao_kinematics import InverseKinematics, ForwardKinematics, isclose_angles
def test_right_shoulder_forward_kinematics_zero():
np.random.seed(42)
position_right_shoulder_standard = np.random.random([3])
rotati... | {"hexsha": "ef529feeb028fade4865b038c8908b8b8a7aad05", "size": 20857, "ext": "py", "lang": "Python", "max_stars_repo_path": "nao_gestures/test_nao_kinematics.py", "max_stars_repo_name": "TomKingsfordUoA/NaoGestures", "max_stars_repo_head_hexsha": "ba1cfbb2a376c02ab7bb51264e7504d6bb255c28", "max_stars_repo_licenses": ["... |
"""
The MIT License (MIT)
Copyright (c) 2017 Eduardo Henrique Vieira dos Santos
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, ... | {"hexsha": "2927e3f19c035f12a26dabe97c3e201ed2b76d09", "size": 4969, "ext": "py", "lang": "Python", "max_stars_repo_path": "AnnyBee.py", "max_stars_repo_name": "EdVieira/AnnyBee", "max_stars_repo_head_hexsha": "250f5193c9242a27bf31d76dbdf6a1902c4534bf", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "max_... |
#
# Class Enhancement
from scipy.signal import lfilter
from spectrum import pmtm
from Universal import *
from VAD import *
class Enhancement:
def simplesubspec(self, signal, wlen, inc, NIS, a, b):
"""
simple spectrum subtraction
:param signal: noisy speech
:param wlen: window length
:param inc: frame shi... | {"hexsha": "9bd543b2e9a144bdf68645b88e2f82e008aad06d", "size": 14130, "ext": "py", "lang": "Python", "max_stars_repo_path": "Chapter8_PitchDetection/Enhancement.py", "max_stars_repo_name": "SeventeenChen/Python_Speech_SZY", "max_stars_repo_head_hexsha": "0074ad1d519387a75d5eca42c77f4d6966eb0a0e", "max_stars_repo_licens... |
import datetime
import random
import numpy as np
from sklearn.metrics import roc_auc_score as roc_auc
from cases.credit_scoring.credit_scoring_problem import get_scoring_data
from fedot.core.composer.gp_composer.gp_composer import GPComposerBuilder, GPComposerRequirements
from fedot.core.data.data import InputData
fr... | {"hexsha": "5dff8ea6f886dccd89c1bc9c5c5d990a69429590", "size": 4943, "ext": "py", "lang": "Python", "max_stars_repo_path": "cases/credit_scoring/credit_scoring_problem_multiobj.py", "max_stars_repo_name": "rozlana-g/FEDOT", "max_stars_repo_head_hexsha": "a909d6c0ef481cc1cf7a5f10f7b1292d8d2def5c", "max_stars_repo_licens... |
import basevcstest
import numpy
import vcs
class TestVCSNoXtraElts(basevcstest.VCSBaseTest):
def testNoXtraElements(self):
data = numpy.sin(numpy.arange(100))
data.shape = (10, 10)
orig = {}
new = {}
for k in list(vcs.elements.keys()):
new[k] = []
or... | {"hexsha": "41b3f026ff55fe3d0fe2fe45797cbae31979d6aa", "size": 1099, "ext": "py", "lang": "Python", "max_stars_repo_path": "tests/test_vcs_no_extra_elements.py", "max_stars_repo_name": "scottwittenburg/vcs", "max_stars_repo_head_hexsha": "5b9f17fb78f7ab186fc0132ab81ada043a7ba348", "max_stars_repo_licenses": ["BSD-3-Cla... |
[STATEMENT]
lemma eeqButPID_F_cong:
assumes "eeqButPID_F sw sw1"
and "PID = PID \<Longrightarrow> eqButF uu uu1"
and "pid \<noteq> PID \<Longrightarrow> uu = uu1"
shows "eeqButPID_F (sw (pid := uu)) (sw1(pid := uu1))"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. eeqButPID_F (sw(pid := uu)) (sw1(pid := uu1))
[PROOF... | {"llama_tokens": 461, "file": "CoSMeDis_Post_Confidentiality_Post_Unwinding_Helper_ISSUER", "length": 3} |
fun(x,y) = x + y | {"hexsha": "a232acffd714d555e2598fe2e9ff40dd5c29f505", "size": 16, "ext": "jl", "lang": "Julia", "max_stars_repo_path": "src/lib.jl", "max_stars_repo_name": "waseemssaeed/TetsRepo", "max_stars_repo_head_hexsha": "e8cec1b167b5b9eea113a4882452106955c3e57e", "max_stars_repo_licenses": ["MIT"], "max_stars_count": null, "ma... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.