code stringlengths 31 1.05M | apis list | extract_api stringlengths 97 1.91M |
|---|---|---|
import torch
from torch.nn.functional import leaky_relu
from rational.torch import Rational
import numpy as np
t = torch.tensor([-2., -1, 0., 1., 2.])
expected_res = np.array(leaky_relu(t))
inp = torch.from_numpy(np.array(t)).reshape(-1)
cuda_inp = torch.tensor(np.array(t), dtype=torch.float, device="cuda").reshape(-... | [
"torch.nn.functional.leaky_relu",
"numpy.isclose",
"torch.tensor",
"numpy.array",
"rational.torch.Rational"
] | [((117, 156), 'torch.tensor', 'torch.tensor', (['[-2.0, -1, 0.0, 1.0, 2.0]'], {}), '([-2.0, -1, 0.0, 1.0, 2.0])\n', (129, 156), False, 'import torch\n'), ((177, 190), 'torch.nn.functional.leaky_relu', 'leaky_relu', (['t'], {}), '(t)\n', (187, 190), False, 'from torch.nn.functional import leaky_relu\n'), ((793, 849), 'n... |
"""
The :mod:`sportsbed.datasets._soccer` includes functions
to fetch soccer historical and fixtures data.
"""
import numpy as np
HOME_WIN = lambda outputs, col1, col2, offset: outputs[col1] - outputs[col2] > offset
AWAY_WIN = lambda outputs, col1, col2, offset: outputs[col1] - outputs[col2] < -offset
DRAW = lambda o... | [
"numpy.abs"
] | [((348, 385), 'numpy.abs', 'np.abs', (['(outputs[col1] - outputs[col2])'], {}), '(outputs[col1] - outputs[col2])\n', (354, 385), True, 'import numpy as np\n')] |
import face_recognition
import cv2
import numpy as np
# getMouthImage (from TLR Teeth Appearance Calculation.ipynb)
def getMouthImage(faceImage,margin=0):
# face_locations = face_recognition.face_locations(faceImage)
face_landmarks_list = face_recognition.face_landmarks(faceImage)
if len(face_landmarks_list) == ... | [
"numpy.copy",
"cv2.polylines",
"face_recognition.face_landmarks",
"numpy.array",
"cv2.cvtColor",
"numpy.concatenate"
] | [((244, 286), 'face_recognition.face_landmarks', 'face_recognition.face_landmarks', (['faceImage'], {}), '(faceImage)\n', (275, 286), False, 'import face_recognition\n'), ((1845, 1898), 'numpy.concatenate', 'np.concatenate', (['(top_lip[6:], bottom_lip[6:])'], {'axis': '(0)'}), '((top_lip[6:], bottom_lip[6:]), axis=0)\... |
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 08 21:24:18 2014
@author: Derrick
Module containing import detex classes
"""
# python 2 and 3 compatibility imports
from __future__ import print_function, absolute_import, unicode_literals, division
import json
import numbers
import os
import matplotlib as mpl
import ma... | [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.hist",
"numpy.random.rand",
"obspy.core.Stream",
"matplotlib.pyplot.ylabel",
"pandas.read_csv",
"detex.construct.fast_normcorr",
"obspy.core.UTCDateTime",
"numpy.array",
"detex.util.saveSQLite",
"copy.deepcopy",
"numpy.linalg.norm",
"scipy.dot",
... | [((10480, 10528), 'detex.log', 'detex.log', (['__name__', 'msg'], {'level': '"""info"""', 'pri': '(True)'}), "(__name__, msg, level='info', pri=True)\n", (10489, 10528), False, 'import detex\n'), ((13698, 13746), 'detex.log', 'detex.log', (['__name__', 'msg'], {'level': '"""info"""', 'pri': '(True)'}), "(__name__, msg,... |
from torchvision import models
import numpy as np
import torch
import os
from moviepy.editor import VideoFileClip
SKIP_FRAME_RATE = 10
MINIMAX_FRAME = 4
# 함수에서 documentaiton 읽기
model = models.detection.fasterrcnn_resnet50_fpn(pretrained=True)
model.eval()
os.environ['KMP_DUPLICATE_LIB_OK']='True'
def extract_boxes(re... | [
"numpy.float",
"torch.from_numpy",
"numpy.max",
"numpy.array",
"torchvision.models.detection.fasterrcnn_resnet50_fpn",
"numpy.transpose"
] | [((186, 243), 'torchvision.models.detection.fasterrcnn_resnet50_fpn', 'models.detection.fasterrcnn_resnet50_fpn', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (226, 243), False, 'from torchvision import models\n'), ((3155, 3170), 'numpy.float', 'np.float', (['"""Inf"""'], {}), "('Inf')\n", (3163, 3170), True,... |
import numpy
#Variables
PLAYERS= 2
boardW = 5
boardH = 5
board = numpy.zeros((boardW,boardH))
step = 0
winLength = 3
#Functions
def drawBoard():
global step
print("\n Step:", step, "\n")
for i in range(0,len(board)):
for j in numpy.flipud(board)[i]:
print('{:>4}'.format(getSym(j)), end... | [
"numpy.full",
"numpy.zeros",
"numpy.diagonal",
"numpy.flipud"
] | [((66, 95), 'numpy.zeros', 'numpy.zeros', (['(boardW, boardH)'], {}), '((boardW, boardH))\n', (77, 95), False, 'import numpy\n'), ((2058, 2077), 'numpy.zeros', 'numpy.zeros', (['(5, 5)'], {}), '((5, 5))\n', (2069, 2077), False, 'import numpy\n'), ((2313, 2342), 'numpy.zeros', 'numpy.zeros', (['(boardW, boardH)'], {}), ... |
"""
Author: <NAME>
Created: 3/11/2020 9:04 AM
"""
from Climate_Shocks.vcsn_pull import vcsn_pull_single_site
from Climate_Shocks.note_worthy_events.simple_soil_moisture_pet import calc_sma_smd_historical, calc_smd_monthly
from Climate_Shocks.get_past_record import get_restriction_record, get_vcsn_record
from Pastur... | [
"Climate_Shocks.get_past_record.get_restriction_record",
"pandas.to_datetime",
"os.path.exists",
"Climate_Shocks.get_past_record.get_vcsn_record",
"pandas.MultiIndex.from_product",
"itertools.product",
"pandas.DataFrame",
"Climate_Shocks.note_worthy_events.simple_soil_moisture_pet.calc_sma_smd_histori... | [((844, 873), 'os.path.exists', 'os.path.exists', (['event_def_dir'], {}), '(event_def_dir)\n', (858, 873), False, 'import os\n'), ((879, 905), 'os.makedirs', 'os.makedirs', (['event_def_dir'], {}), '(event_def_dir)\n', (890, 905), False, 'import os\n'), ((1731, 1828), 'pandas.merge', 'pd.merge', (['irr_temp2', 'dry_te... |
import argparse
import numpy as np
import numpy_net as npn
parser = argparse.ArgumentParser()
parser.add_argument('--lr', type=float, help='Learning rate', default=0.1)
parser.add_argument('--epochs', type=int, help='Number of epochs', default=10)
parser.add_argument('--batch-size', type=int, help='Batch size', defaul... | [
"numpy.mean",
"numpy.eye",
"argparse.ArgumentParser",
"numpy_net.load_mnist",
"numpy_net.CrossEntropy",
"numpy.random.shuffle"
] | [((69, 94), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (92, 94), False, 'import argparse\n'), ((927, 943), 'numpy_net.load_mnist', 'npn.load_mnist', ([], {}), '()\n', (941, 943), True, 'import numpy_net as npn\n'), ((1552, 1572), 'numpy.random.shuffle', 'np.random.shuffle', (['i'], {}), '(i... |
import os
def configuration(parent_package="", top_path=None):
import numpy
from numpy.distutils.misc_util import Configuration
config = Configuration("simulator", parent_package, top_path)
libraries = []
if os.name == "posix":
libraries.append("m")
# cpp_args = ['-stdlib=libc++', '... | [
"numpy.distutils.misc_util.Configuration",
"numpy.get_include"
] | [((152, 204), 'numpy.distutils.misc_util.Configuration', 'Configuration', (['"""simulator"""', 'parent_package', 'top_path'], {}), "('simulator', parent_package, top_path)\n", (165, 204), False, 'from numpy.distutils.misc_util import Configuration\n'), ((473, 492), 'numpy.get_include', 'numpy.get_include', ([], {}), '(... |
import numpy as np
import pandas as pd
import skfuzzy as fuzz
from skfuzzy import control as ctrl
x = ctrl.Antecedent(np.arange(0.0, 2.0), "X")
y = ctrl.Consequent(np.arange(0.0, 2), "Y")
x.automf(names=["pequeno", "médio", "grande"])
y.automf(names=["baixo", "alto"])
regra_1 = ctrl.Rule(antecedent=x["pequeno"], con... | [
"skfuzzy.control.ControlSystemSimulation",
"numpy.random.random",
"skfuzzy.control.ControlSystem",
"skfuzzy.control.Rule",
"numpy.arange"
] | [((282, 356), 'skfuzzy.control.Rule', 'ctrl.Rule', ([], {'antecedent': "x['pequeno']", 'consequent': "y['baixo']", 'label': '"""regra_1"""'}), "(antecedent=x['pequeno'], consequent=y['baixo'], label='regra_1')\n", (291, 356), True, 'from skfuzzy import control as ctrl\n'), ((367, 439), 'skfuzzy.control.Rule', 'ctrl.Rul... |
import glob, os
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
def quick_plot(results_file, gauss_width, start, stop, step):
with open(results_file, "r") as results:
results = results.read().split('\n')
results = [float(res) for res in results[:-1]]
eigenenergies = results
gau... | [
"matplotlib.pyplot.grid",
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.fill_between",
"numpy.arange",
"seaborn.set",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"numpy.exp",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.ylim",
"glob.glob"... | [((360, 388), 'numpy.arange', 'np.arange', (['start', 'stop', 'step'], {}), '(start, stop, step)\n', (369, 388), True, 'import numpy as np\n'), ((640, 673), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(13.66, 7.68)'}), '(figsize=(13.66, 7.68))\n', (650, 673), True, 'import matplotlib.pyplot as plt\n'), ... |
from ..tweet_sentiment_classifier import Classifier, tokenizer_filter
import pickle as pkl
import numpy as np
import json
import os
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
from sklearn.model_selection i... | [
"pickle.dump",
"os.makedirs",
"numpy.hstack",
"sklearn.model_selection.train_test_split",
"sklearn.linear_model.LogisticRegression",
"sklearn.feature_extraction.text.TfidfVectorizer",
"sklearn.utils.resample",
"json.dump"
] | [((2678, 2743), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {'analyzer': 'str.split', 'max_features': 'self.vocab_size'}), '(analyzer=str.split, max_features=self.vocab_size)\n', (2693, 2743), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n'), ((2908, 2975), 'sklearn... |
# Owen's experiment to convert a CSDS to the HF data structure
import datasets
import numpy as np
from transformers import AutoModelForSequenceClassification, AutoTokenizer, Trainer, TrainingArguments
from datasets import Dataset, DatasetDict, ClassLabel, load_metric
# create a CSDS as dict
# First create a mapping ... | [
"datasets.load_metric",
"transformers.TrainingArguments",
"datasets.Dataset.from_dict",
"numpy.argmax",
"transformers.AutoModelForSequenceClassification.from_pretrained",
"datasets.DatasetDict",
"datasets.ClassLabel",
"transformers.AutoTokenizer.from_pretrained",
"transformers.Trainer"
] | [((357, 409), 'datasets.ClassLabel', 'ClassLabel', ([], {'num_classes': '(3)', 'names': "['CB', 'NCB', 'NA']"}), "(num_classes=3, names=['CB', 'NCB', 'NA'])\n", (367, 409), False, 'from datasets import Dataset, DatasetDict, ClassLabel, load_metric\n'), ((3639, 3673), 'datasets.Dataset.from_dict', 'Dataset.from_dict', (... |
"""
Test integrators with simple ODE
dx/dy = 3x^2y given x0 = 1, y0 = 2
ANALYTIC SOLUTION:
y = e^{x^3 + c}, c = ln(2) - 1
y(1,1.1,1.2,1.3,1.4) = [2,2.78471958461639,4.141869187709196,6.6203429951303265,11.440356871885081]
"""
# Import package, test suite, and other packages as needed
import numpy as np
from pycc.rt i... | [
"numpy.array",
"numpy.allclose"
] | [((611, 641), 'numpy.array', 'np.array', (['[y0, y1, y2, y3, y4]'], {}), '([y0, y1, y2, y3, y4])\n', (619, 641), True, 'import numpy as np\n'), ((730, 826), 'numpy.array', 'np.array', (['[2, 2.7846419118859376, 4.141490537335979, 6.618844434974082, \n 11.434686303979237]'], {}), '([2, 2.7846419118859376, 4.141490537... |
# system
import os
from enum import Enum
# lib
import numpy as np
class GloVeSize(Enum):
tiny = 50
small = 100
medium = 200
large = 300
__DEFAULT_SIZE = GloVeSize.small
def get_pretrained_embedding_matrix(word_to_index,
vocab_size=10000,
... | [
"numpy.random.normal",
"os.path.exists",
"os.makedirs",
"os.path.join",
"numpy.asarray",
"os.path.isfile",
"numpy.load",
"numpy.save"
] | [((1503, 1572), 'os.path.join', 'os.path.join', (['cache_dir', "('glove_%d_embedding_matrix.npy' % size.value)"], {}), "(cache_dir, 'glove_%d_embedding_matrix.npy' % size.value)\n", (1515, 1572), False, 'import os\n'), ((1427, 1452), 'os.path.exists', 'os.path.exists', (['cache_dir'], {}), '(cache_dir)\n', (1441, 1452)... |
import numpy as np
arr = np.array([[2, 5], [1, 3]])
arr_inv = np.linalg.inv(arr)
print(arr_inv)
# [[ 3. -5.]
# [-1. 2.]]
mat = np.matrix([[2, 5], [1, 3]])
mat_inv = np.linalg.inv(mat)
print(mat_inv)
# [[ 3. -5.]
# [-1. 2.]]
mat_inv = mat**-1
print(mat_inv)
# [[ 3. -5.]
# [-1. 2.]]
mat_inv = mat.I
print(mat_... | [
"numpy.mat",
"numpy.linalg.pinv",
"numpy.array",
"numpy.linalg.inv",
"numpy.matrix"
] | [((26, 52), 'numpy.array', 'np.array', (['[[2, 5], [1, 3]]'], {}), '([[2, 5], [1, 3]])\n', (34, 52), True, 'import numpy as np\n'), ((64, 82), 'numpy.linalg.inv', 'np.linalg.inv', (['arr'], {}), '(arr)\n', (77, 82), True, 'import numpy as np\n'), ((132, 159), 'numpy.matrix', 'np.matrix', (['[[2, 5], [1, 3]]'], {}), '([... |
"""
This script was modified from https://github.com/ZhaoJ9014/face.evoLVe.PyTorch
"""
import os
import cv2
import bcolz
import numpy as np
import tqdm
from sklearn.model_selection import KFold
from scipy import interpolate
import math
from .utils import l2_norm
def get_val_pair(path, name):
carray = bcolz.carray... | [
"numpy.arccos",
"numpy.logical_not",
"scipy.interpolate.interp1d",
"numpy.linalg.norm",
"sklearn.model_selection.KFold",
"numpy.arange",
"numpy.mean",
"numpy.less",
"numpy.multiply",
"numpy.asarray",
"numpy.subtract",
"numpy.max",
"numpy.argmax",
"numpy.square",
"numpy.std",
"cv2.resiz... | [((1952, 1976), 'numpy.less', 'np.less', (['dist', 'threshold'], {}), '(dist, threshold)\n', (1959, 1976), True, 'import numpy as np\n'), ((2867, 2908), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'nrof_folds', 'shuffle': '(False)'}), '(n_splits=nrof_folds, shuffle=False)\n', (2872, 2908), False, 'from ... |
import os, random, numpy as np, copy
from torch.utils.data import Dataset
import torch
def seq_collate(data):
(past_traj, future_traj) = zip(*data)
past_traj = torch.stack(past_traj,dim=0)
future_traj = torch.stack(future_traj,dim=0)
data = {
'past_traj': past_traj,
'future_traj': fu... | [
"torch.stack",
"torch.from_numpy",
"numpy.load"
] | [((172, 201), 'torch.stack', 'torch.stack', (['past_traj'], {'dim': '(0)'}), '(past_traj, dim=0)\n', (183, 201), False, 'import torch\n'), ((219, 250), 'torch.stack', 'torch.stack', (['future_traj'], {'dim': '(0)'}), '(future_traj, dim=0)\n', (230, 250), False, 'import torch\n'), ((843, 861), 'numpy.load', 'np.load', (... |
#! /usr/bin/python
# -*- coding: utf8 -*-
import os, time, random
import numpy as np
import scipy
import tensorflow as tf
import tensorlayer as tl
from model import *
from utils import *
from config import *
###====================== HYPER-PARAMETERS ===========================###
batch_size = config.train.batch_siz... | [
"tensorflow.image.resize_images",
"numpy.sqrt",
"tensorlayer.layers.initialize_global_variables",
"tensorlayer.files.load_and_assign_npz",
"os.listdir",
"argparse.ArgumentParser",
"tensorflow.placeholder",
"tensorlayer.layers.get_variables_with_name",
"tensorflow.assign",
"tensorlayer.vis.save_ima... | [((371, 403), 'numpy.sqrt', 'np.sqrt', (['config.train.batch_size'], {}), '(config.train.batch_size)\n', (378, 403), True, 'import numpy as np\n'), ((2259, 2308), 'numpy.empty', 'np.empty', (['[batch_size, patch_size, patch_size, 3]'], {}), '([batch_size, patch_size, patch_size, 3])\n', (2267, 2308), True, 'import nump... |
import pandas as pd
import numpy as np
from typing import Tuple
from itertools import product
from tqdm import tqdm
from sklearn.ensemble import RandomForestClassifier
from sklearn.multiclass import OneVsRestClassifier
from sklearn.metrics import confusion_matrix, f1_score
from sklearn.model_selection import train_tes... | [
"sklearn.model_selection.train_test_split",
"itertools.product",
"sklearn.ensemble.RandomForestClassifier",
"numpy.array",
"numpy.linspace",
"sklearn.multiclass.OneVsRestClassifier"
] | [((924, 944), 'numpy.array', 'np.array', (['f1_mac_lst'], {}), '(f1_mac_lst)\n', (932, 944), True, 'import numpy as np\n'), ((1045, 1065), 'numpy.array', 'np.array', (['f1_mic_lst'], {}), '(f1_mic_lst)\n', (1053, 1065), True, 'import numpy as np\n'), ((1544, 1605), 'sklearn.ensemble.RandomForestClassifier', 'RandomFore... |
"""
`icclim.models.frequency` wraps the concept of pandas frequency in order to resample
time series. `slice_mode` paramater of `icclim.index` is always converted to a
`Frequency`.
"""
import datetime
from enum import Enum
from typing import Any, Callable, List, Optional, Tuple, Union
import cftime
impor... | [
"xarray.coding.cftime_offsets.to_offset",
"xarray.core.dataarray.DataArray",
"numpy.unique",
"pandas.Timedelta",
"xarray.concat",
"pandas.tseries.frequencies.to_offset",
"cftime.datetime",
"icclim.icclim_exceptions.InvalidIcclimArgumentError",
"datetime.timedelta",
"pandas.to_datetime"
] | [((7101, 7163), 'icclim.icclim_exceptions.InvalidIcclimArgumentError', 'InvalidIcclimArgumentError', (['f"""Unknown frequency {slice_mode}."""'], {}), "(f'Unknown frequency {slice_mode}.')\n", (7127, 7163), False, 'from icclim.icclim_exceptions import InvalidIcclimArgumentError\n'), ((1198, 1224), 'numpy.unique', 'np.u... |
# coding=utf8
"""
@author: <NAME>
@date: 09/26/2019
@code description: It is a Python3 file to implement cosine similarity with TF-IDF and Word Embedding methods.
"""
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise im... | [
"numpy.mean",
"nltk.corpus.stopwords.words",
"sklearn.metrics.pairwise.cosine_similarity",
"nltk.stem.WordNetLemmatizer",
"numpy.array",
"sklearn.feature_extraction.text.TfidfVectorizer"
] | [((1062, 1090), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['vec', 'args'], {}), '(vec, args)\n', (1079, 1090), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((4364, 4381), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {}), '()\n', (4379, 4381)... |
# -*- coding: utf-8 -*-
"""
This file contains implementations of the functions used to train a CNN model:
train_cnn - Function used to facilitate the training of the Convolutinal Neural Network model.
test_cnn - Function used to facilitate the testing of the Convolutinal Neural Network model.
"""
# Built-i... | [
"model.Classifier",
"torch.nn.functional.softmax",
"numpy.mean",
"torch.cuda.amp.GradScaler",
"torch.optim.lr_scheduler.CyclicLR",
"torch.cuda.amp.autocast",
"utils.log",
"torch.nn.functional.log_softmax",
"time.time",
"torch.cat",
"torch.device",
"os.makedirs",
"torch.stack",
"os.path.joi... | [((1630, 1653), 'dataset.get_datasets', 'get_datasets', (['arguments'], {}), '(arguments)\n', (1642, 1653), False, 'from dataset import get_datasets\n'), ((1748, 1892), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'batch_size': 'arguments.batch_size', 'shuffle': '(True)', 'num_workers': 'arguments.dat... |
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 27 12:47:00 2017
@author: sakurai
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cluster import AffinityPropagation
from sklearn.metrics import f1_score
from sklearn.metrics import normalized_mutual_info_score
from sklearn.preprocessing import LabelE... | [
"numpy.random.normal",
"sklearn.preprocessing.LabelEncoder",
"sklearn.cluster.AffinityPropagation",
"numpy.vstack",
"numpy.random.uniform",
"sklearn.metrics.normalized_mutual_info_score",
"numpy.load",
"matplotlib.pyplot.show"
] | [((4137, 4159), 'numpy.load', 'np.load', (['"""y_train.npy"""'], {}), "('y_train.npy')\n", (4144, 4159), True, 'import numpy as np\n'), ((4218, 4239), 'numpy.load', 'np.load', (['"""y_test.npy"""'], {}), "('y_test.npy')\n", (4225, 4239), True, 'import numpy as np\n'), ((4757, 4767), 'matplotlib.pyplot.show', 'plt.show'... |
import sys
sys.path.append('../../')
import keras2caffe
DATA_DIR='../../data/'
import caffe
import cv2
import numpy as np
import sys
sys.path.append('/media/toshiba_ml/models/keras-models/keras-squeezenet')
from keras_squeezenet import SqueezeNet
#TensorFlow backend uses all GPU memory by default, so we need limit... | [
"keras_squeezenet.SqueezeNet",
"tensorflow.Session",
"caffe.set_mode_gpu",
"keras2caffe.convert",
"numpy.argmax",
"numpy.array",
"caffe.Net",
"tensorflow.ConfigProto",
"cv2.resize",
"sys.path.append",
"cv2.imread"
] | [((11, 36), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (26, 36), False, 'import sys\n'), ((136, 209), 'sys.path.append', 'sys.path.append', (['"""/media/toshiba_ml/models/keras-models/keras-squeezenet"""'], {}), "('/media/toshiba_ml/models/keras-models/keras-squeezenet')\n", (151, 209... |
import argparse
import numpy as np
from scipy import sparse
from scipy.optimize import linprog
import matplotlib.pyplot as plt
import networkx as nx
import torch
import torch.nn as nn
import torch.nn.functional as func
import torch.optim as optim
from torch.optim.lr_scheduler import StepLR
from torch.utils.data impor... | [
"networkx.draw_networkx_edge_labels",
"torch.nn.MSELoss",
"numpy.array",
"torch.cuda.is_available",
"numpy.genfromtxt",
"networkx.draw_networkx_edges",
"numpy.mean",
"scipy.sparse.lil_matrix",
"argparse.ArgumentParser",
"matplotlib.pyplot.Normalize",
"networkx.DiGraph",
"captum.attr.Integrated... | [((924, 944), 'numpy.full', 'np.full', (['time', 'c_buy'], {}), '(time, c_buy)\n', (931, 944), True, 'import numpy as np\n'), ((959, 977), 'numpy.zeros', 'np.zeros', (['(time * 5)'], {}), '(time * 5)\n', (967, 977), True, 'import numpy as np\n'), ((1453, 1489), 'scipy.sparse.lil_matrix', 'sparse.lil_matrix', (['(2 * ti... |
# -*- coding: utf-8 -*-
"""
@Author: <NAME>
For Citibike rebancing simulation
"""
import simpy
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as stats
order_threshold = 2.0
order_up_to = 5.0
delivery_delay = 20 # in minutes
SIM_RUN = 1000 #number of simulation runs
initial_bikes = 15
operat... | [
"numpy.mean",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.hist",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"scipy.stats.norm.ppf",
"numpy.random.exponential",
"simpy.Environment",
"matplotlib.pyplot.figure",
"numpy.random.seed",
"scipy.stats.sem",
"matplotlib.pyplot.step",
... | [((4533, 4573), 'scipy.stats.norm.ppf', 'stats.norm.ppf', (['(1 - confidence_level / 2)'], {}), '(1 - confidence_level / 2)\n', (4547, 4573), True, 'import scipy.stats as stats\n'), ((684, 720), 'numpy.random.exponential', 'np.random.exponential', (['(1.0 / _lambda)'], {}), '(1.0 / _lambda)\n', (705, 720), True, 'impor... |
import pandas as pd
import numpy as np
from sklearn.preprocessing import Imputer, LabelBinarizer, StandardScaler
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.model_selection import StratifiedShuffleSplit, GridSearchCV, train_test_split, c... | [
"sklearn.pipeline.FeatureUnion",
"sklearn.preprocessing.LabelBinarizer",
"sklearn.metrics.f1_score",
"sklearn.neural_network.MLPClassifier",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.preprocessing.Imputer",
"sklearn.linear_model.LogisticRegression",
"sklearn.preprocessi... | [((445, 463), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (459, 463), True, 'import numpy as np\n'), ((1583, 1683), 'sklearn.pipeline.FeatureUnion', 'FeatureUnion', ([], {'transformer_list': "[('num_pipeline', num_pipeline), ('cat_pipeline', cat_pipeline)]"}), "(transformer_list=[('num_pipeline', n... |
from librosa import cqt, icqt
import numpy as np
def gl_cqt(S, n_iter=32, sr=22050, hop_length=512, bins_per_octave=12, fmin=None, window='hann',
dtype=np.float32, length=None, momentum=0.99, random_state=None, res_type='kaiser_fast'):
if fmin is None:
fmin = librosa.note_to_hz('C1')
... | [
"numpy.abs",
"librosa.cqt",
"numpy.random.RandomState",
"librosa.icqt"
] | [((1807, 1956), 'librosa.icqt', 'icqt', (['(S * angles)'], {'sr': 'sr', 'hop_length': 'hop_length', 'bins_per_octave': 'bins_per_octave', 'fmin': 'fmin', 'window': 'window', 'length': 'length', 'res_type': 'res_type'}), '(S * angles, sr=sr, hop_length=hop_length, bins_per_octave=\n bins_per_octave, fmin=fmin, window... |
import sys
sys.path.append('C:/Python37/Lib/site-packages')
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
import random
from pyOpenBCI import OpenBCICyton
import threading
import time
import numpy as np
from scipy import signal
import random
import numpy as np
from PIL import Image
im... | [
"pyOpenBCI.OpenBCICyton",
"PIL.Image.open",
"pyqtgraph.Qt.QtGui.QApplication.instance",
"numpy.average",
"numpy.fft.fftfreq",
"numpy.nditer",
"pyqtgraph.ImageItem",
"scipy.signal.butter",
"numpy.fft.fft",
"numpy.array",
"pyqtgraph.Qt.QtGui.QApplication",
"scipy.signal.lfilter",
"numpy.std",
... | [((12, 60), 'sys.path.append', 'sys.path.append', (['"""C:/Python37/Lib/site-packages"""'], {}), "('C:/Python37/Lib/site-packages')\n", (27, 60), False, 'import sys\n'), ((373, 386), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (381, 386), True, 'import numpy as np\n'), ((447, 461), 'numpy.array', 'np.array', (... |
# [1차]프렌츠4블록
import numpy as np
def new_borad(m, n, board):
remove = np.array([[True for _ in range(m)] for _ in range(n)])
count = 0
for i in range(n - 1):
for j in range(m - 1):
cur = board[i,j]
if cur == "-1":
break
if cur == board[i,j+1] and ... | [
"numpy.array",
"numpy.transpose"
] | [((915, 930), 'numpy.transpose', 'np.transpose', (['b'], {}), '(b)\n', (927, 930), True, 'import numpy as np\n'), ((1101, 1116), 'numpy.array', 'np.array', (['new_b'], {}), '(new_b)\n', (1109, 1116), True, 'import numpy as np\n')] |
import math
import numpy
def hill_chart_parametrisation(h, turbine_specs):
"""
Calculates power and flow rate through bulb turbines based on Aggidis and Feather (2012)
f_g = grid frequency, g_p = generator poles,
t_cap = Turbine capacity, h = head difference, dens = water density
"""
turb_sp ... | [
"math.sqrt",
"numpy.sign"
] | [((484, 496), 'math.sqrt', 'math.sqrt', (['h'], {}), '(h)\n', (493, 496), False, 'import math\n'), ((631, 643), 'math.sqrt', 'math.sqrt', (['h'], {}), '(h)\n', (640, 643), False, 'import math\n'), ((1346, 1383), 'math.sqrt', 'math.sqrt', (["(2 * turbine_specs['g'] * h)"], {}), "(2 * turbine_specs['g'] * h)\n", (1355, 1... |
# standard library imports
import io
import logging
import struct
import warnings
# 3rd party library imports
import numpy as np
from uuid import UUID
# local imports
from glymur import Jp2k
from .lib import tiff as libtiff
from .jp2box import UUIDBox
# Map the numeric TIFF datatypes to the format string used by th... | [
"logging.getLogger",
"numpy.ceil",
"logging.StreamHandler",
"uuid.UUID",
"numpy.flipud",
"glymur.Jp2k",
"io.BytesIO",
"struct.pack",
"numpy.zeros",
"struct.unpack",
"warnings.warn"
] | [((1982, 2011), 'logging.getLogger', 'logging.getLogger', (['"""tiff2jp2"""'], {}), "('tiff2jp2')\n", (1999, 2011), False, 'import logging\n'), ((2065, 2088), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (2086, 2088), False, 'import logging\n'), ((2672, 2684), 'io.BytesIO', 'io.BytesIO', ([], {})... |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from config_settings import Args
# %load_ext autoreload
# %autoreload 2
args=Args()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("device=",device)
def hidden_init(layer):
fan_in = layer.weight.d... | [
"torch.manual_seed",
"numpy.sqrt",
"config_settings.Args",
"torch.from_numpy",
"torch.cuda.is_available",
"torch.nn.Linear",
"torch.cat"
] | [((165, 171), 'config_settings.Args', 'Args', ([], {}), '()\n', (169, 171), False, 'from config_settings import Args\n'), ((206, 231), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (229, 231), False, 'import torch\n'), ((349, 364), 'numpy.sqrt', 'np.sqrt', (['fan_in'], {}), '(fan_in)\n', (356,... |
from math import sqrt
from numpy import arange
from universal_constants import MARS_RADIUS
from universal_functions import mars_density
class Simulation:
@property
def xs(self):
return [v.x for v in self.ps]
@property
def ys(self):
return [v.y for v in self.ps]
@property
def... | [
"csv.writer",
"universal_functions.mars_density",
"numpy.arange"
] | [((871, 890), 'numpy.arange', 'arange', (['(0)', 'time', 'dt'], {}), '(0, time, dt)\n', (877, 890), False, 'from numpy import arange\n'), ((2073, 2092), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (2083, 2092), False, 'import csv\n'), ((3587, 3606), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csv... |
import numpy as np
import cv2
import matplotlib.pyplot as plt
from collections import deque
# Import configuration parameters
import config as cfg
# Define a class to receive the characteristics of each line detection
class Line:
def __init__(self, buf_len = 5):
# x values of the last n fits of the line
... | [
"cv2.rectangle",
"numpy.dstack",
"numpy.mean",
"numpy.abs",
"collections.deque",
"numpy.polyfit",
"numpy.hstack",
"numpy.zeros_like",
"numpy.argmax",
"numpy.max",
"numpy.sum",
"numpy.array",
"numpy.linspace",
"cv2.addWeighted",
"numpy.vstack",
"numpy.concatenate",
"numpy.int_",
"nu... | [((917, 979), 'numpy.sum', 'np.sum', (['binary_warped[binary_warped.shape[0] // 2:, :]'], {'axis': '(0)'}), '(binary_warped[binary_warped.shape[0] // 2:, :], axis=0)\n', (923, 979), True, 'import numpy as np\n'), ((1059, 1115), 'numpy.dstack', 'np.dstack', (['(binary_warped, binary_warped, binary_warped)'], {}), '((bin... |
# -*- coding: utf-8 -*-
"""
...
"""
import LibraryTT.txt2array as conversion
import numpy as np
from numpy import sqrt
import pandas as pd
import matplotlib.pyplot as plt
import random
import math
from mpl_toolkits.mplot3d import Axes3D
# import open3d as o3d
# %matplotlib inline
D = conversion.txt2... | [
"numpy.copy",
"LibraryTT.txt2array.usar",
"LibraryTT.txt2array.imprimir3D",
"LibraryTT.txt2array.centros",
"numpy.delete",
"LibraryTT.txt2array.imprimirObjetos",
"math.degrees",
"numpy.append",
"numpy.array",
"math.atan2",
"LibraryTT.txt2array.rnsc",
"LibraryTT.txt2array.RObjetos",
"LibraryT... | [((305, 327), 'LibraryTT.txt2array.txt2array', 'conversion.txt2array', ([], {}), '()\n', (325, 327), True, 'import LibraryTT.txt2array as conversion\n'), ((336, 346), 'numpy.copy', 'np.copy', (['D'], {}), '(D)\n', (343, 346), True, 'import numpy as np\n'), ((486, 526), 'LibraryTT.txt2array.RObjetos', 'conversion.RObjet... |
import sys
import os
import numpy as np
import pandas as pd
from Globals import *
#-------- Create directories ------------
os.makedirs(dir_data,exist_ok=True)
os.makedirs(dir_chain,exist_ok=True)
os.makedirs(dir_plots,exist_ok=True)
os.makedirs(dir_outs,exist_ok=True)
#------------- Load data -----------------------... | [
"numpy.repeat",
"os.makedirs",
"pandas.read_csv",
"numpy.where",
"numpy.floor",
"numpy.isnan",
"numpy.isfinite",
"numpy.concatenate"
] | [((125, 161), 'os.makedirs', 'os.makedirs', (['dir_data'], {'exist_ok': '(True)'}), '(dir_data, exist_ok=True)\n', (136, 161), False, 'import os\n'), ((161, 198), 'os.makedirs', 'os.makedirs', (['dir_chain'], {'exist_ok': '(True)'}), '(dir_chain, exist_ok=True)\n', (172, 198), False, 'import os\n'), ((198, 235), 'os.ma... |
from estimator_adaptative import EstimatorAdaptative
from mpl_toolkits.mplot3d import Axes3D
from grid_search import GridSearch
from sklearn import metrics
import matplotlib.pyplot as plt
import matplotlib as mpl
from utils import *
import numpy as np
import os
import sys
data_path = '../../databases'
PlotsDirectory =... | [
"os.path.exists",
"matplotlib.pyplot.savefig",
"estimator_adaptative.EstimatorAdaptative",
"os.makedirs",
"matplotlib.pyplot.ylabel",
"sklearn.metrics.auc",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.close",
"numpy.array",
"matplotlib.pyplot.figure"
] | [((353, 383), 'os.path.exists', 'os.path.exists', (['PlotsDirectory'], {}), '(PlotsDirectory)\n', (367, 383), False, 'import os\n'), ((389, 416), 'os.makedirs', 'os.makedirs', (['PlotsDirectory'], {}), '(PlotsDirectory)\n', (400, 416), False, 'import os\n'), ((501, 523), 'numpy.array', 'np.array', (['[1050, 1200]'], {}... |
import numpy as np
a = np.arange(15).reshape(3,5)
print(a)
print(a.shape) | [
"numpy.arange"
] | [((23, 36), 'numpy.arange', 'np.arange', (['(15)'], {}), '(15)\n', (32, 36), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
############################################################
## <NAME> ##
## Copyright (C) 2019-2020 Lauro Sumoy Lab, IGTP, Spain ##
############################################################
"""
Get frequence of reads for each type, variant, etc
"""
## ... | [
"random.sample",
"argparse.ArgumentParser",
"numpy.isnan",
"HCGB.functions.main_functions.get_data",
"pandas.DataFrame"
] | [((997, 1336), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""mod_freq.py"""', 'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'description': '"""\n\nmod_freq.py: Modified given frequencies and select isomiRs\n\nVersion: 0.1\nLicense: GPLv3\n\nUSAGE: python mod_freq.py --freq table.f... |
import os
import argparse
import torch
import numpy
import random
from datetime import datetime
def format_time():
now = datetime.now() # current date and time
date_time = now.strftime("%m-%d-%H:%M:%S")
return date_time
def ensure_dir(path):
if not os.path.exists(path):
os.makedirs(path)
... | [
"torch.manual_seed",
"os.path.exists",
"os.makedirs",
"random.seed",
"argparse.ArgumentTypeError",
"datetime.datetime.now",
"numpy.random.seed",
"torch.cuda.manual_seed"
] | [((127, 141), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (139, 141), False, 'from datetime import datetime\n'), ((668, 691), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (685, 691), False, 'import torch\n'), ((696, 719), 'numpy.random.seed', 'numpy.random.seed', (['seed'], {}), ... |
###############################################################################
# Copyright (c) 2019 Uber Technologies, Inc. #
# #
# Licensed under the Uber Non-Commercial License (the "License"); #
... | [
"numpy.ones",
"numpy.where",
"numpy.zeros",
"numpy.isfinite",
"numpy.vstack",
"math.fabs",
"copy.deepcopy",
"numpy.argmin",
"sys.stdout.flush"
] | [((3611, 3638), 'numpy.zeros', 'np.zeros', (['(0, 1)'], {'dtype': 'int'}), '((0, 1), dtype=int)\n', (3619, 3638), True, 'import numpy as np\n'), ((3727, 3768), 'numpy.zeros', 'np.zeros', (['self.n_trust_regions'], {'dtype': 'int'}), '(self.n_trust_regions, dtype=int)\n', (3735, 3768), True, 'import numpy as np\n'), ((3... |
#
# file: gd_1d.py
#
# 1D example of GD
#
# RTK, 14-Feb-2021
# Last update: 14-Feb-2021
#
################################################################
import sys
import os
import numpy as np
import matplotlib.pylab as plt
# The function and its derivative
def f(x):
return 6*x**2 - 12*x + 3
def d(x):
... | [
"matplotlib.pylab.savefig",
"matplotlib.pylab.tight_layout",
"matplotlib.pylab.xlabel",
"numpy.linspace",
"matplotlib.pylab.plot",
"matplotlib.pylab.close",
"matplotlib.pylab.ylabel"
] | [((392, 416), 'numpy.linspace', 'np.linspace', (['(-1)', '(3)', '(1000)'], {}), '(-1, 3, 1000)\n', (403, 416), True, 'import numpy as np\n'), ((424, 455), 'matplotlib.pylab.plot', 'plt.plot', (['x', 'y'], {'color': '"""#1f77b4"""'}), "(x, y, color='#1f77b4')\n", (432, 455), True, 'import matplotlib.pylab as plt\n'), ((... |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# *****************************************************************************/
# * Authors: <NAME>, <NAME>
# *****************************************************************************/
from __future__ import absolute_import, division, print_function, unicode_literals... | [
"os.path.exists",
"random.shuffle",
"os.path.dirname",
"numpy.array",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.callbacks.ModelCheckpoint",
"json.load",
"tensorflow.keras.layers.Flatten"
] | [((703, 732), 'os.path.exists', 'os.path.exists', (['"""QDist_2.txt"""'], {}), "('QDist_2.txt')\n", (717, 732), False, 'import os, json, random\n'), ((737, 767), 'os.path.exists', 'os.path.exists', (['"""labels_1.txt"""'], {}), "('labels_1.txt')\n", (751, 767), False, 'import os, json, random\n'), ((1089, 1113), 'rando... |
import sep
import numpy as np
import scarlet
from scarlet.wavelet import mad_wavelet, Starlet
from .utils import extract_obj, image_gaia_stars
from astropy.table import Table, Column
from astropy import units as u
from astropy.units import Quantity
from astropy.coordinates import SkyCoord
from kuaizi.mock import Data... | [
"scarlet.Frame",
"scarlet.interpolation.sinc_interp",
"numpy.size",
"astropy.coordinates.SkyCoord",
"numpy.argsort",
"scarlet.resampling.convert_coordinates",
"numpy.array",
"numpy.sum",
"scarlet.wavelet.mad_wavelet",
"astropy.table.Column",
"astropy.convolution.Gaussian2DKernel",
"scarlet.wav... | [((677, 756), 'scarlet.Frame', 'scarlet.Frame', (['data_lr.images.shape'], {'wcs': 'data_lr.wcs', 'channels': 'data_lr.channels'}), '(data_lr.images.shape, wcs=data_lr.wcs, channels=data_lr.channels)\n', (690, 756), False, 'import scarlet\n'), ((801, 880), 'scarlet.Frame', 'scarlet.Frame', (['data_hr.images.shape'], {'... |
import numpy as np
import cv2
from keras.layers import Input
from keras.models import Model
from keras.models import load_model
decoder = load_model('roses_decoder.h5')
perceptron = load_model('decoder-perceptron.h5')
path = 'dataset/rose'
id=25 # sample code
param0 = np.loadtxt(path+'{:04d}.txt'.format(id))
id=26 ... | [
"numpy.copy",
"keras.models.load_model",
"numpy.asarray",
"cv2.imshow",
"numpy.zeros",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.resize",
"cv2.createTrackbar",
"cv2.namedWindow"
] | [((139, 169), 'keras.models.load_model', 'load_model', (['"""roses_decoder.h5"""'], {}), "('roses_decoder.h5')\n", (149, 169), False, 'from keras.models import load_model\n'), ((183, 218), 'keras.models.load_model', 'load_model', (['"""decoder-perceptron.h5"""'], {}), "('decoder-perceptron.h5')\n", (193, 218), False, '... |
# -*- coding: utf-8 -*-
# distrib.py
import pandas as pd
import numpy as np
import scipy.integrate
import scipy.interpolate
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager
from .utils import grouper
from .plotting import plotVertBar
def integrate(xvec, yvec):
return abs(scipy.integ... | [
"numpy.abs",
"matplotlib.pyplot.grid",
"numpy.sqrt",
"numpy.median",
"matplotlib.pyplot.ylabel",
"numpy.average",
"matplotlib.pyplot.gca",
"matplotlib.font_manager.FontProperties",
"numpy.log",
"matplotlib.pyplot.plot",
"numpy.diff",
"numpy.sum",
"matplotlib.pyplot.figure",
"numpy.linspace... | [((2684, 2711), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 5)'}), '(figsize=(15, 5))\n', (2694, 2711), True, 'import matplotlib.pyplot as plt\n'), ((6911, 6935), 'numpy.sqrt', 'np.sqrt', (['(var + mean ** 2)'], {}), '(var + mean ** 2)\n', (6918, 6935), True, 'import numpy as np\n'), ((6954, 6985),... |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by appl... | [
"onnx.helper.make_graph",
"onnx.helper.make_node",
"numpy.ones",
"os.makedirs",
"paddle.fluid.global_scope",
"onnx.helper.make_tensor_value_info",
"os.path.join",
"onnx.helper.make_model",
"os.path.isdir",
"math.fabs",
"onnx.helper.make_tensor",
"onnx.checker.check_model"
] | [((1961, 2030), 'onnx.helper.make_tensor', 'helper.make_tensor', ([], {'name': 'name', 'data_type': 'dtype', 'dims': 'dims', 'vals': 'value'}), '(name=name, data_type=dtype, dims=dims, vals=value)\n', (1979, 2030), False, 'from onnx import helper, onnx_pb\n'), ((2059, 2128), 'onnx.helper.make_node', 'helper.make_node',... |
"""
glucoseDataFrame.py
Creates a dataframe of glucose related statistics
in diabetics for predictive analysis.
"""
import sys
import os
import math
from datetime import *
from dateutil.parser import parse
import pandas as pd
import numpy as np
sys.path.append("..") # proper file path for importing local modules
from... | [
"dateutil.parser.parse",
"pandas.read_csv",
"pythonScripts.jsonToCsv.convertToCsv",
"os.path.join",
"os.getcwd",
"numpy.array",
"pandas.concat",
"os.path.basename",
"pandas.DataFrame",
"pandas.notnull",
"sys.path.append"
] | [((247, 268), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (262, 268), False, 'import sys\n'), ((2268, 2282), 'pythonScripts.jsonToCsv.convertToCsv', 'convertToCsv', ([], {}), '()\n', (2280, 2282), False, 'from pythonScripts.jsonToCsv import convertToCsv\n'), ((2302, 2337), 'os.path.basename', ... |
# Copyright 2013-2021 The Salish Sea MEOPAR contributors
# and The University of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/... | [
"collections.namedtuple",
"salishsea_tools.wind_tools.wind_speed_dir",
"salishsea_tools.stormtools.storm_surge_risk_level",
"nowcast.figures.shared.plot_risk_level_marker",
"nowcast.figures.shared.get_tides",
"nowcast.figures.shared.find_ssh_max",
"matplotlib.pyplot.figure",
"matplotlib.gridspec.GridS... | [((4396, 4517), 'collections.namedtuple', 'namedtuple', (['"""PlotData"""', '"""ssh_ts, max_ssh, max_ssh_time, risk_levels, u_wind_4h_avg, v_wind_4h_avg, max_wind_avg"""'], {}), "('PlotData',\n 'ssh_ts, max_ssh, max_ssh_time, risk_levels, u_wind_4h_avg, v_wind_4h_avg, max_wind_avg'\n )\n", (4406, 4517), False, 'f... |
#!/usr/bin/env python
import numpy as np
dim = 3
A = np.ones(shape=(dim, dim))
B = A.copy()
b = np.empty(dim)
for i in range(dim):
b[i] = i + 2
print('A')
print(A)
print('b')
print(b)
for j in range(dim):
A[:, j] *= b[j]
print('% (1)')
print(A)
print('% (2)')
print(B * b)
| [
"numpy.empty",
"numpy.ones"
] | [((56, 81), 'numpy.ones', 'np.ones', ([], {'shape': '(dim, dim)'}), '(shape=(dim, dim))\n', (63, 81), True, 'import numpy as np\n'), ((99, 112), 'numpy.empty', 'np.empty', (['dim'], {}), '(dim)\n', (107, 112), True, 'import numpy as np\n')] |
#
# python_grabber
#
import cv2
import numpy as np
def save_image(filename, img):
cv2.imwrite(filename, img)
def sepia(img):
kernel = np.float32([
[0.272, 0.534, 0.131],
[0.349, 0.686, 0.168],
[0.393, 0.769, 0.189]])
return cv2.transform(img, kernel)
def edge_preserving(img):
... | [
"cv2.imwrite",
"cv2.transform",
"cv2.pencilSketch",
"cv2.edgePreservingFilter",
"cv2.stylization",
"numpy.float32"
] | [((89, 115), 'cv2.imwrite', 'cv2.imwrite', (['filename', 'img'], {}), '(filename, img)\n', (100, 115), False, 'import cv2\n'), ((147, 233), 'numpy.float32', 'np.float32', (['[[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, 0.189]]'], {}), '([[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, \n ... |
from collections import defaultdict
import itertools
from skorch.net import NeuralNet
from skorch.dataset import Dataset
import pandas as pd
from sklearn import preprocessing
from tqdm import tqdm
import more_itertools as mit
import numpy as np
import skorch
import torch
import logging
logging.getLogger('matplotlib').... | [
"logging.getLogger",
"sklearn.preprocessing.LabelEncoder",
"logging.debug",
"numpy.where",
"tqdm.tqdm",
"torch.Tensor",
"numpy.stack",
"torch.tensor",
"torch.cuda.is_available",
"collections.defaultdict",
"numpy.isnan",
"skorch.dataset.Dataset",
"torch.no_grad",
"pandas.concat"
] | [((288, 319), 'logging.getLogger', 'logging.getLogger', (['"""matplotlib"""'], {}), "('matplotlib')\n", (305, 319), False, 'import logging\n'), ((1754, 1771), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1765, 1771), False, 'from collections import defaultdict\n'), ((1843, 1895), 'logging.debu... |
import os
import numpy as np
import torch
import matplotlib.pyplot as plt
import cv2
import rlkit.torch.sac.diayn
from .mode_actions_sampler import ModeActionSampler
from network import ModeDisentanglingNetwork
from env import DmControlEnvForPytorchBothObstype
class DisentanglingTester:
def __init__(self,
... | [
"torch.manual_seed",
"os.makedirs",
"torch.load",
"os.path.join",
"torch.from_numpy",
"torch.cuda.is_available",
"numpy.random.seed",
"cv2.VideoWriter_fourcc",
"torch.no_grad"
] | [((990, 1013), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (1007, 1013), False, 'import torch\n'), ((1022, 1042), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1036, 1042), True, 'import numpy as np\n'), ((1260, 1297), 'os.makedirs', 'os.makedirs', (['video_dir'], {'exist... |
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import numpy as np
import matplotlib.pyplot as plt
from splane import pzmap, grpDelay, bodePlot, convert2SOS
from scipy import signal
# Esta es una liberia tomada de la comunidad [https://stackoverflow.com/questions/35304245/multiply-scipy... | [
"numpy.roots"
] | [((420, 497), 'numpy.roots', 'np.roots', (['[-256 * e, 0, -640 * e, 0, -560 * e, 0, -200 * e, 0, -25 * e, 0, 1]'], {}), '([-256 * e, 0, -640 * e, 0, -560 * e, 0, -200 * e, 0, -25 * e, 0, 1])\n', (428, 497), True, 'import numpy as np\n')] |
"""Функции проверки статуса дивидендов"""
from urllib.error import URLError
import numpy as np
import pandas as pd
from local.dividends import comony_ru
from local.dividends import dohod_ru
from local.dividends import smart_lab_ru
from local.dividends.sqlite import DividendsDataManager
from local.dividends.sqlite imp... | [
"numpy.isclose",
"local.dividends.sqlite.DividendsDataManager",
"local.dividends.smart_lab_ru.dividends_smart_lab",
"pandas.Timestamp",
"pandas.concat"
] | [((1072, 1106), 'local.dividends.smart_lab_ru.dividends_smart_lab', 'smart_lab_ru.dividends_smart_lab', ([], {}), '()\n', (1104, 1106), False, 'from local.dividends import smart_lab_ru\n'), ((1928, 1956), 'local.dividends.sqlite.DividendsDataManager', 'DividendsDataManager', (['ticker'], {}), '(ticker)\n', (1948, 1956)... |
import matplotlib.pyplot as plt
import numpy as np
import os
from scipy import stats
from transposonmapper.statistics import dataframe_from_pergenefile
def make_datafile(path_a,filelist_a,path_b,filelist_b):
"""Assembly the datafile name to analyze
Parameters
----------
path_a : str
Path o... | [
"numpy.mean",
"numpy.log10",
"os.path.join",
"os.path.isfile",
"transposonmapper.statistics.dataframe_from_pergenefile",
"scipy.stats.ttest_ind"
] | [((1143, 1170), 'os.path.join', 'os.path.join', (['path_a', 'files'], {}), '(path_a, files)\n', (1155, 1170), False, 'import os\n'), ((1186, 1210), 'os.path.isfile', 'os.path.isfile', (['datafile'], {}), '(datafile)\n', (1200, 1210), False, 'import os\n'), ((1337, 1364), 'os.path.join', 'os.path.join', (['path_b', 'fil... |
__version__ = '0.1.13'
import logging
mpl_logger = logging.getLogger('matplotlib')
mpl_logger.setLevel(logging.WARNING)
from cartesian_explorer import lazy_imports
from cartesian_explorer.lib.lru_cache import lru_cache
from cartesian_explorer.lib.lru_cache_mproc import lru_cache as lru_cache_mproc
from cartesian_expl... | [
"logging.getLogger",
"numpy.exp",
"numpy.log",
"cartesian_explorer.Explorer.Explorer"
] | [((51, 82), 'logging.getLogger', 'logging.getLogger', (['"""matplotlib"""'], {}), "('matplotlib')\n", (68, 82), False, 'import logging\n'), ((837, 847), 'cartesian_explorer.Explorer.Explorer', 'Explorer', ([], {}), '()\n', (845, 847), False, 'from cartesian_explorer.Explorer import Explorer\n'), ((973, 994), 'numpy.exp... |
import torch
import argparse
import scipy
import numpy as np
import pickle
from deeprobust.graph.targeted_attack import Nettack
from deeprobust.graph.utils import *
from deeprobust.graph.data import Dataset
from deeprobust.graph.defense import *
from sklearn.preprocessing import normalize
from tqdm import tqdm
from sc... | [
"torch.LongTensor",
"torch.exp",
"torch.cuda.is_available",
"deeprobust.graph.targeted_attack.Nettack",
"numpy.linalg.norm",
"numpy.mean",
"argparse.ArgumentParser",
"numpy.dot",
"numpy.random.seed",
"numpy.random.choice",
"numpy.set_printoptions",
"torch.device",
"torch.manual_seed",
"dee... | [((442, 467), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (465, 467), False, 'import argparse\n'), ((1148, 1173), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1171, 1173), False, 'import torch\n'), ((1183, 1227), 'torch.device', 'torch.device', (["('cuda' if args.... |
import numpy as np
import json
from os.path import join
from tqdm import tqdm
from scipy.optimize import least_squares
from pose_optimize.multiview_geo import reproject_error
DEBUG=False
def reproject_error_loss(p3d, p4, p6, cam_proj_4, cam_proj_6, num_kpt=23):
'''
Return:
kp4_e, kp6_e: error array, ... | [
"scipy.optimize.least_squares",
"tqdm.tqdm",
"numpy.square",
"numpy.array",
"numpy.dot",
"numpy.zeros",
"pose_optimize.multiview_geo.reproject_error",
"numpy.concatenate",
"sys.exit",
"json.load",
"json.dump"
] | [((2154, 2194), 'numpy.concatenate', 'np.concatenate', (['(l1 * kp4_e, l1 * kp6_e)'], {}), '((l1 * kp4_e, l1 * kp6_e))\n', (2168, 2194), True, 'import numpy as np\n'), ((2567, 2585), 'numpy.zeros', 'np.zeros', (['num_bone'], {}), '(num_bone)\n', (2575, 2585), True, 'import numpy as np\n'), ((2604, 2622), 'numpy.zeros',... |
import pseudopol.ppseudopol as p_pp
import numpy as np
import sys
max_val=int(sys.argv[1])
vals=list(np.random.randint(1,500000,5000, dtype=np.uint32))
print(p_pp.find_max_subsum(max_val, vals))
| [
"numpy.random.randint",
"pseudopol.ppseudopol.find_max_subsum"
] | [((103, 154), 'numpy.random.randint', 'np.random.randint', (['(1)', '(500000)', '(5000)'], {'dtype': 'np.uint32'}), '(1, 500000, 5000, dtype=np.uint32)\n', (120, 154), True, 'import numpy as np\n'), ((160, 195), 'pseudopol.ppseudopol.find_max_subsum', 'p_pp.find_max_subsum', (['max_val', 'vals'], {}), '(max_val, vals)\... |
# INFO
__author__ = "<NAME>"
__date__ = "26 Mar 2022"
__license__ = "MIT"
__version__ = "1.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Definitive version"
__copyright__ = "© 2022"
# SCRIPT
import numpy as np
from logistic_regression import *
from other_functions import *
print('\033[92m\033[1m\... | [
"numpy.append"
] | [((2421, 2436), 'numpy.append', 'np.append', (['w', 'b'], {}), '(w, b)\n', (2430, 2436), True, 'import numpy as np\n')] |
#!/usr/bin/python3.8
"""
Genetic Algorithm to maximize surveillance over a population for AI Assignment.
Author: Sam (eremus-dev)
Repo: https://github.com/eremus-dev
"""
import math
from collections import Counter
from typing import List, Dict
import numpy as np
import matplotlib.pyplot as plt
from test_pop import te... | [
"matplotlib.pyplot.grid",
"math.sqrt",
"numpy.array",
"numpy.mean",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.cla",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.Circle",
"numpy.random.choice",
"matplotlib.... | [((1327, 1351), 'numpy.array', 'np.array', (['obs'], {'copy': '(True)'}), '(obs, copy=True)\n', (1335, 1351), True, 'import numpy as np\n'), ((4549, 4558), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (4556, 4558), True, 'import matplotlib.pyplot as plt\n'), ((4776, 4793), 'matplotlib.pyplot.axis', 'plt.axis',... |
r"""
Echelle Spectrum
----------------
An abstract base class for a high resolution spectrum, for some echelle order :math:`m \in ` out of :math:`M` total orders, each with vectors for wavelength, flux, and uncertainty, e.g. :math:`F_m(\lambda)`. This class is a subclass of specutils' Spectrum1D and is intended to ha... | [
"logging.getLogger",
"numpy.sqrt",
"celerite2.GaussianProcess",
"numpy.log",
"scipy.signal.savgol_filter",
"scipy.interpolate.interp1d",
"numpy.isfinite",
"specutils.spectra.spectral_region.SpectralRegion",
"copy.deepcopy",
"numpy.arange",
"numpy.mean",
"numpy.exp",
"astropy.nddata.StdDevUnc... | [((1464, 1491), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1481, 1491), False, 'import logging\n'), ((1543, 1598), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {'category': 'VerifyWarning'}), "('ignore', category=VerifyWarning)\n", (1564, 1598), False, 'import... |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Hillas shower parametrization.
TODO:
-----
- Should have a separate function or option to compute 3rd order
moments + asymmetry (which are not always needed)
- remove alpha calculation (which is only about (0,0), and make a get
alpha function tha... | [
"numpy.abs",
"collections.namedtuple",
"numpy.sqrt",
"numpy.asanyarray",
"numpy.sum",
"numpy.arctan2",
"numpy.row_stack",
"numpy.hypot"
] | [((608, 686), 'collections.namedtuple', 'namedtuple', (['"""MomentParameters"""', '"""size,cen_x,cen_y,length,width,r,phi,psi,miss"""'], {}), "('MomentParameters', 'size,cen_x,cen_y,length,width,r,phi,psi,miss')\n", (618, 686), False, 'from collections import namedtuple\n'), ((863, 933), 'collections.namedtuple', 'name... |
import pandas as pd
from matplotlib import pyplot as plt
from scipy.interpolate import spline
from scipy.ndimage.filters import gaussian_filter1d
import numpy as np
#%%
# TZ numbers via https://www.fangraphs.com/leaders.aspx?pos=3b&stats=fld&lg=all&qual=y&type=0&season=2017&month=0&season1=1961&ind=1&team=0&rost=0&ag... | [
"numpy.ones",
"pandas.read_csv",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.figure",
"scipy.ndimage.filters.gaussian_filter1d",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"pandas.concat"
] | [((347, 368), 'pandas.read_csv', 'pd.read_csv', (['"""TZ.csv"""'], {}), "('TZ.csv')\n", (358, 368), True, 'import pandas as pd\n'), ((666, 688), 'pandas.read_csv', 'pd.read_csv', (['"""UZR.csv"""'], {}), "('UZR.csv')\n", (677, 688), True, 'import pandas as pd\n'), ((791, 811), 'pandas.concat', 'pd.concat', (['[df, df2]... |
# -*- python -*-
import math
import numpy
import Shadow
from Shadow.ShadowPreprocessorsXraylib import prerefl, pre_mlayer, bragg
from srxraylib.sources import srfunc
from sirepo.template import transfer_mat_bl
from pykern.pkcollections import PKDict
from pykern import pkjson
sigmax = 0.0045000000000000005
sigdix = 2.... | [
"pykern.pkjson.dump_pretty",
"Shadow.OE",
"Shadow.ShadowTools.plotxy",
"sirepo.template.transfer_mat_bl.create_mat_rays",
"math.sqrt",
"numpy.matrix",
"numpy.transpose"
] | [((418, 458), 'sirepo.template.transfer_mat_bl.create_mat_rays', 'transfer_mat_bl.create_mat_rays', (['epsilon'], {}), '(epsilon)\n', (449, 458), False, 'from sirepo.template import transfer_mat_bl\n'), ((472, 586), 'numpy.matrix', 'numpy.matrix', (['[[sigmax ** 2, 0, 0, 0], [0, sigdix ** 2, 0, 0], [0, 0, sigmaz ** 2, ... |
import json
import logging
import os
import re
from collections import namedtuple
from copy import deepcopy
from typing import Any, Dict, List, Tuple
import numpy as np
import pandas as pd
import spacy
from scirex_utilities.analyse_pwc_entity_results import *
from scirex_utilities.entity_utils import *
from spacy.toke... | [
"logging.basicConfig",
"json.loads",
"collections.namedtuple",
"os.listdir",
"argparse.ArgumentParser",
"spacy.load",
"tqdm.tqdm",
"json.dumps",
"spacy.tokens.Doc",
"os.path.join",
"os.path.isdir",
"pandas.concat",
"re.finditer",
"copy.deepcopy",
"pandas.DataFrame",
"numpy.cumsum",
"... | [((357, 370), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {}), '()\n', (368, 370), False, 'from tqdm import tqdm\n'), ((383, 482), 'collections.namedtuple', 'namedtuple', (['"""Span"""', "['start', 'end', 'token_start', 'token_end', 'entity', 'links', 'modified']"], {}), "('Span', ['start', 'end', 'token_start', 'token_end... |
# Copyright (C) 2018 Innoviz Technologies
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD 3-Clause license. See the LICENSE file for details.
import pandas as pd
import os
import numpy as np
from utilities.math_utils import RotationTranslationData
from visualizatio... | [
"utilities.data_utils.enumerate_frames",
"os.path.join",
"os.getcwd",
"utilities.math_utils.RotationTranslationData",
"numpy.concatenate",
"numpy.linalg.norm",
"utilities.data_utils.read_all_data",
"utilities.data_utils.frame_to_filename",
"visualizations.vis.pcshow"
] | [((463, 516), 'os.path.join', 'os.path.join', (['base_dir', '"""data_examples"""', '"""test_video"""'], {}), "(base_dir, 'data_examples', 'test_video')\n", (475, 516), False, 'import os\n'), ((642, 680), 'utilities.data_utils.enumerate_frames', 'data_utils.enumerate_frames', (['video_dir'], {}), '(video_dir)\n', (669, ... |
from typing import Sequence, Optional
import pandas as pd
import numpy as np
def formatted_corr_df(df: pd.DataFrame, cols: Optional[Sequence[str]] = None) -> pd.DataFrame:
"""
Calculates correlations on a DataFrame and displays only the lower triangular of the
resulting correlation DataFrame.
:param... | [
"numpy.tril"
] | [((738, 749), 'numpy.tril', 'np.tril', (['df'], {}), '(df)\n', (745, 749), True, 'import numpy as np\n')] |
import torch
import get_data
import numpy as np
import torchaudio
def number_of_correct(pred, target):
return pred.squeeze().eq(target).sum().item()
def get_likely_index(tensor):
return tensor.argmax(dim=-1)
def compute_accuracy(model, data_loader, device):
model.eval()
correct = 0
for data, t... | [
"torch.nn.Softmax",
"get_data.prepare_wav",
"torchaudio.load",
"torch.unsqueeze",
"get_data.idx_to_label",
"numpy.argsort"
] | [((707, 750), 'get_data.prepare_wav', 'get_data.prepare_wav', (['waveform', 'sample_rate'], {}), '(waveform, sample_rate)\n', (727, 750), False, 'import get_data\n'), ((938, 956), 'numpy.argsort', 'np.argsort', (['(-probs)'], {}), '(-probs)\n', (948, 956), True, 'import numpy as np\n'), ((1156, 1181), 'torchaudio.load'... |
import cv2
import numpy as np
from plantcv.plantcv.transform import nonuniform_illumination
def test_nonuniform_illumination_rgb(transform_test_data):
"""Test for PlantCV."""
# Load rgb image
rgb_img = cv2.imread(transform_test_data.small_rgb_img)
corrected = nonuniform_illumination(img=rgb_img, ksize... | [
"numpy.mean",
"plantcv.plantcv.transform.nonuniform_illumination",
"cv2.imread"
] | [((216, 261), 'cv2.imread', 'cv2.imread', (['transform_test_data.small_rgb_img'], {}), '(transform_test_data.small_rgb_img)\n', (226, 261), False, 'import cv2\n'), ((278, 324), 'plantcv.plantcv.transform.nonuniform_illumination', 'nonuniform_illumination', ([], {'img': 'rgb_img', 'ksize': '(11)'}), '(img=rgb_img, ksize... |
def main():
import numpy as np
import matplotlib.pyplot as plt
import torchvision
from torch.autograd import Variable
import torch.nn as nn
import pickle
from random import randint, randrange
import sys
from tqdm import tqdm
import cv2
print("CUDA available: {}".format(torch... | [
"models.DSCLRCN_OldContext.DSCLRCN",
"cv2.resize",
"tqdm.tqdm.write",
"torch.load",
"util.data_utils.get_SALICON_datasets",
"tqdm.tqdm",
"models.CoSADUV_NoTemporal.CoSADUV_NoTemporal",
"util.data_utils.get_video_datasets",
"torch.from_numpy",
"models.CoSADUV.CoSADUV",
"torch.cuda.is_available",
... | [((12055, 12107), 'torch.multiprocessing.set_start_method', 'torch.multiprocessing.set_start_method', (['"""forkserver"""'], {}), "('forkserver')\n", (12093, 12107), False, 'import torch\n'), ((1562, 1627), 'util.data_utils.get_SALICON_datasets', 'get_SALICON_datasets', (['dataset_root_dir', 'mean_image_name', 'img_siz... |
import copy as _copy
import math as _math
import os as _os
import cv2 as _cv2
import numpy as _np
from PIL import Image as _IMG
from easytorch.utils.logger import *
"""
##################################################################################################
Very useful image related utilities
##############... | [
"PIL.Image.open",
"copy.deepcopy",
"numpy.ones",
"numpy.where",
"scipy.ndimage.measurements.label",
"os.path.join",
"math.sqrt",
"numpy.max",
"cv2.createCLAHE",
"numpy.array",
"numpy.zeros",
"numpy.pad",
"numpy.bincount",
"numpy.min",
"copy.copy",
"numpy.zeros_like"
] | [((3470, 3535), 'numpy.zeros', '_np.zeros', (['[arr_2d.shape[0], arr_2d.shape[1], 3]'], {'dtype': '_np.uint8'}), '([arr_2d.shape[0], arr_2d.shape[1], 3], dtype=_np.uint8)\n', (3479, 3535), True, 'import numpy as _np\n'), ((4755, 4767), 'numpy.max', '_np.max', (['arr'], {}), '(arr)\n', (4762, 4767), True, 'import numpy ... |
'''
Created on July 7, 2019
@author: Terry
@email:<EMAIL>
'''
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_iris
from sklearn.tree import DecisionTreeClassifier, plot_tree
# Parameter
n_classes = 3
plot_colors = "ryb"
plot_step = 0.02
# Load data
iris = load_... | [
"sklearn.datasets.load_iris",
"matplotlib.pyplot.contourf",
"matplotlib.pyplot.ylabel",
"numpy.arange",
"numpy.where",
"matplotlib.pyplot.xlabel",
"sklearn.tree.DecisionTreeClassifier",
"matplotlib.pyplot.figure",
"sklearn.tree.plot_tree",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.sca... | [((315, 326), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (324, 326), False, 'from sklearn.datasets import load_iris\n'), ((1404, 1477), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['"""Decision surface of a decision tree using paired features"""'], {}), "('Decision surface of a decision tree using ... |
""" Intermediate Factors
@author: <NAME>
This module computes the interpolated features between the principal vectors -- the one
linking source to target following the geodesics on the Grassmannian. We use the
equivalent formulation derived in [1] and represent this geodesics for each pair
of principal components.
E... | [
"numpy.block",
"numpy.diag",
"joblib.Parallel",
"numpy.array",
"numpy.linspace",
"numpy.isnan",
"numpy.cos",
"precise.principal_vectors.PVComputation",
"numpy.min",
"numpy.sin",
"joblib.delayed",
"numpy.isinf"
] | [((2327, 2403), 'numpy.min', 'np.min', (['[self.source_components_.shape[0], self.target_components_.shape[0]]'], {}), '([self.source_components_.shape[0], self.target_components_.shape[0]])\n', (2333, 2403), True, 'import numpy as np\n'), ((2602, 2632), 'precise.principal_vectors.PVComputation', 'PVComputation', (['n_... |
import numpy as np
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn.cross_validation import train_test_split
import theanets
import climate
climate.enable_default_logging()
X_orig = np.load('/Users/bzamecnik/Documents/music-processing/music-processing-experiments/c-scale-piano_spectrogram_2... | [
"theanets.Experiment",
"climate.enable_default_logging",
"sklearn.preprocessing.StandardScaler",
"matplotlib.animation.ArtistAnimation",
"numpy.zeros",
"sklearn.cross_validation.train_test_split",
"numpy.load",
"sklearn.preprocessing.MinMaxScaler"
] | [((168, 200), 'climate.enable_default_logging', 'climate.enable_default_logging', ([], {}), '()\n', (198, 200), False, 'import climate\n'), ((211, 347), 'numpy.load', 'np.load', (['"""/Users/bzamecnik/Documents/music-processing/music-processing-experiments/c-scale-piano_spectrogram_2048_hamming.npy"""'], {}), "(\n '... |
import os
import glob
import argparse
import numpy as np
def parse_args():
parser = argparse.ArgumentParser(description='Display datas')
parser.add_argument('--data-dir', default='C:/Users/junya/Documents/plant_segmentation_data',
help='dataset directory')
parser.add_argument('--va... | [
"numpy.random.choice",
"numpy.savetxt",
"os.path.join",
"argparse.ArgumentParser"
] | [((90, 142), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Display datas"""'}), "(description='Display datas')\n", (113, 142), False, 'import argparse\n'), ((548, 582), 'os.path.join', 'os.path.join', (['args.data_dir', 'phase'], {}), '(args.data_dir, phase)\n', (560, 582), False, 'impo... |
# python3
# Copyright 2021 InstaDeep Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applic... | [
"copy.deepcopy",
"acme.specs.BoundedArray",
"dm_env.TimeStep",
"numpy.asarray",
"mava.utils.wrapper_utils.apply_env_wrapper_preprocessers",
"acme.specs.Array",
"mava.utils.wrapper_utils.convert_np_type",
"acme.wrappers.gym_wrapper._convert_to_spec",
"mava.utils.wrapper_utils.parameterized_restart",
... | [((2552, 2610), 'mava.utils.wrapper_utils.parameterized_restart', 'parameterized_restart', (['reward', 'self._discount', 'observation'], {}), '(reward, self._discount, observation)\n', (2573, 2610), False, 'from mava.utils.wrapper_utils import apply_env_wrapper_preprocessers, convert_np_type, parameterized_restart\n'),... |
import os
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits import mplot3d
from mpl_toolkits.mplot3d import Axes3D
from sklearn.decomposition import IncrementalPCA
## set paths
inputpath = './input/'
## Toggle PCA for better visualization of clusters
pca_flag = int(input("\nPerform ... | [
"os.path.exists",
"numpy.shape",
"numpy.unique",
"numpy.where",
"os.path.realpath",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.axes",
"matplotlib.pyplot.title",
"sklearn.decomposition.IncrementalPCA",
"numpy.genfromtxt",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((935, 971), 'os.path.exists', 'os.path.exists', (["(inputpath + 'kx.txt')"], {}), "(inputpath + 'kx.txt')\n", (949, 971), False, 'import os\n'), ((1027, 1062), 'numpy.genfromtxt', 'np.genfromtxt', (["(inputpath + 'kx.txt')"], {}), "(inputpath + 'kx.txt')\n", (1040, 1062), True, 'import numpy as np\n'), ((1071, 1107),... |
import numpy as np
import matplotlib.pyplot as plt
import pickle
import pandas as pd
import cv2 as cv
import os
def read_pkl(file_path):
obj = pd.read_pickle(file_path)
return obj
def show_results(obj , folder):
for im_num in obj.keys():
data = obj[im_num]
file = data['fileName'][12:]
... | [
"pandas.read_pickle",
"cv2.imwrite",
"cv2.rectangle",
"matplotlib.pyplot.imshow",
"os.path.join",
"numpy.asarray",
"matplotlib.pyplot.pause",
"cv2.imread"
] | [((149, 174), 'pandas.read_pickle', 'pd.read_pickle', (['file_path'], {}), '(file_path)\n', (163, 174), True, 'import pandas as pd\n'), ((1781, 1828), 'os.path.join', 'os.path.join', (['folder', '"""YoloV3_res\\\\res_pkl.pkl"""'], {}), "(folder, 'YoloV3_res\\\\res_pkl.pkl')\n", (1793, 1828), False, 'import os\n'), ((33... |
import cv2
import numpy as np
def class_name(classid):
id_dict = {1:'Scratch', 2:'Dent', 3:'Shatter', 4:'Dislocation'}
return id_dict[classid]
def damage_cost(classid):
# cost_dict = {1: [800, 1400], 2:[1200, 3000],3:19000, 4:17000}
cost_dict = {1: 900, 2:1600, 3:19000, 4:17000}
return... | [
"numpy.float32"
] | [((500, 521), 'numpy.float32', 'np.float32', (['crop_mask'], {}), '(crop_mask)\n', (510, 521), True, 'import numpy as np\n')] |
import re
import numpy as np
def compounddic2atomsfraction(compounds):
def createNewDic(dic, multiplyby):
values = list(dic.values())
keys = dic.keys()
newValues = np.array(values)*multiplyby
newDic = dict(zip(keys, newValues))
return newDic
def composition2atoms(cstr)... | [
"numpy.array",
"re.findall"
] | [((336, 383), 're.findall', 're.findall', (['"""([A-Z][a-z]?)(\\\\d*\\\\.?\\\\d*)"""', 'cstr'], {}), "('([A-Z][a-z]?)(\\\\d*\\\\.?\\\\d*)', cstr)\n", (346, 383), False, 'import re\n'), ((194, 210), 'numpy.array', 'np.array', (['values'], {}), '(values)\n', (202, 210), True, 'import numpy as np\n')] |
"""utilities to speedup calculations with jit
Author: <NAME>
Affiliation: TokyoTech & OSX
"""
import numpy as np
import torch
from numba import f8, jit
@jit(f8[:, :](f8[:, :]), nopython=True)
def get_normed_vec_mag(arr_vec: np.ndarray) -> np.ndarray:
"""compute
from [[x1, y1], [x2, y2], ...]
to [... | [
"numpy.hstack",
"numpy.where",
"numpy.sum",
"numpy.zeros",
"numpy.empty"
] | [((669, 703), 'numpy.where', 'np.where', (['(vec_mag == 0)', '(1)', 'vec_mag'], {}), '(vec_mag == 0, 1, vec_mag)\n', (677, 703), True, 'import numpy as np\n'), ((758, 787), 'numpy.hstack', 'np.hstack', (['(arr_vec, vec_mag)'], {}), '((arr_vec, vec_mag))\n', (767, 787), True, 'import numpy as np\n'), ((1706, 1752), 'num... |
import os
import os.path as op
import numpy as np
from numpy.testing import assert_almost_equal
from ..core import ShootingPoint, find_and_replace
def test_read_cv_values():
test_file_loc = op.join(op.dirname(op.abspath(__file__)),
'test_data', 'COLVAR2')
sp = ShootingPoint(name=... | [
"numpy.testing.assert_almost_equal",
"numpy.array",
"os.path.abspath",
"os.remove"
] | [((432, 457), 'numpy.array', 'np.array', (['[1.0, 2.0, 3.0]'], {}), '([1.0, 2.0, 3.0])\n', (440, 457), True, 'import numpy as np\n'), ((1616, 1637), 'os.remove', 'os.remove', (['"""test.log"""'], {}), "('test.log')\n", (1625, 1637), False, 'import os\n'), ((534, 565), 'numpy.testing.assert_almost_equal', 'assert_almost... |
from tqdm import tqdm
import pandas as pd
import numpy as np
from pathlib import Path
from hashlib import md5
from sklearn.feature_extraction.text import TfidfVectorizer
from scipy import sparse as sp
import argparse
def break_text(raw):
return np.array([ i for i, t in enumerate(raw) if t == '¶' ][::2])
def ma... | [
"pandas.Series",
"argparse.ArgumentParser",
"pathlib.Path",
"numpy.array",
"sklearn.feature_extraction.text.TfidfVectorizer"
] | [((616, 634), 'pathlib.Path', 'Path', (['args.dataset'], {}), '(args.dataset)\n', (620, 634), False, 'from pathlib import Path\n'), ((2296, 2321), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2319, 2321), False, 'import argparse\n'), ((874, 887), 'numpy.array', 'np.array', (['idx'], {}), '(i... |
import torch
import os
from glob import glob
import numpy as np
from torch.nn import functional as F
import time
class Generator(object):
def __init__(self, model, exp_name, threshold = 0.1, checkpoint = None, device = torch.device("cuda")):
self.model = model.to(device)
self.model.eval()
s... | [
"torch.rand",
"torch.load",
"numpy.sort",
"torch.nn.functional.normalize",
"os.path.dirname",
"numpy.zeros",
"torch.randint",
"numpy.array",
"os.path.basename",
"time.time",
"torch.randn",
"glob.glob",
"torch.device"
] | [((224, 244), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (236, 244), False, 'import torch\n'), ((645, 656), 'time.time', 'time.time', ([], {}), '()\n', (654, 656), False, 'import time\n'), ((844, 860), 'numpy.zeros', 'np.zeros', (['(0, 3)'], {}), '((0, 3))\n', (852, 860), True, 'import numpy as... |
import numpy as np
def process_actions(actions, l_action):
n_steps = len(actions)
actions_1hot = np.zeros([n_steps, l_action], dtype=int)
actions_1hot[np.arange(n_steps), actions] = 1
return actions_1hot
def get_action_others_1hot(action_all, agent_id, l_action):
action_all = list(a... | [
"numpy.reshape",
"numpy.ones",
"numpy.delete",
"numpy.indices",
"numpy.stack",
"numpy.zeros",
"numpy.cumsum",
"numpy.arange"
] | [((112, 152), 'numpy.zeros', 'np.zeros', (['[n_steps, l_action]'], {'dtype': 'int'}), '([n_steps, l_action], dtype=int)\n', (120, 152), True, 'import numpy as np\n'), ((415, 458), 'numpy.zeros', 'np.zeros', (['[num_others, l_action]'], {'dtype': 'int'}), '([num_others, l_action], dtype=int)\n', (423, 458), True, 'impor... |
# import the necessary packages
from imutils.video import VideoStream
from imutils import face_utils
import argparse
import imutils
import time
import dlib
import cv2
import tensorflow as tf
from tensorflow.keras.models import load_model
import numpy as np
from matplotlib import pyplot as plt
import os
... | [
"argparse.ArgumentParser",
"dlib.shape_predictor",
"time.perf_counter",
"time.sleep",
"cv2.putText",
"dlib.get_frontal_face_detector",
"imutils.resize",
"cv2.imshow",
"tensorflow.keras.models.load_model",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"imutils.face_utils.shape_to_np",
"numpy.a... | [((421, 446), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (444, 446), False, 'import argparse\n'), ((863, 895), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (893, 895), False, 'import dlib\n'), ((909, 954), 'dlib.shape_predictor', 'dlib.shape_predicto... |
import pandas as pd
import os, sys
from pandas.tseries.holiday import USFederalHolidayCalendar
from pandas.tseries.offsets import CustomBusinessDay
from sklearn.utils import check_array
import numpy as np
from datetime import timedelta
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))+'/'
def mean_absolute_pe... | [
"numpy.fabs",
"pandas.tseries.holiday.USFederalHolidayCalendar",
"pandas.DatetimeIndex",
"pandas.to_datetime",
"os.path.join",
"os.path.dirname",
"datetime.timedelta",
"pandas.date_range"
] | [((800, 850), 'pandas.DatetimeIndex', 'pd.DatetimeIndex', ([], {'start': 'start', 'end': 'end', 'freq': 'us_bd'}), '(start=start, end=end, freq=us_bd)\n', (816, 850), True, 'import pandas as pd\n'), ((1011, 1075), 'pandas.date_range', 'pd.date_range', ([], {'start': 'date', 'periods': '(2)', 'freq': '"""1d"""', 'tz': '... |
"""
UKPDS
See:
"""
import numpy as np
from cvdm.score import BaseRisk
from cvdm.score import clean_age, clean_hba1c, clean_bp, clean_tchdl
# coefficients for survival
BETA = np.array([ 1.059, # age at diagnosis of diabetes
0.525, # risk for females
0.390, # Afro-Carribean ethnic... | [
"numpy.power",
"cvdm.score.clean_age",
"numpy.log",
"numpy.exp",
"numpy.array",
"cvdm.score.clean_hba1c",
"cvdm.score.clean_tchdl",
"cvdm.score.clean_bp"
] | [((179, 236), 'numpy.array', 'np.array', (['[1.059, 0.525, 0.39, 1.35, 1.183, 1.088, 3.845]'], {}), '([1.059, 0.525, 0.39, 1.35, 1.183, 1.088, 3.845])\n', (187, 236), True, 'import numpy as np\n'), ((1134, 1196), 'numpy.exp', 'np.exp', (['(-q * D ** (age - ageDiab) * (1 - D ** tYear) / (1 - D))'], {}), '(-q * D ** (age... |
"""
Module contenant les classes utiles à la modélisation sous forme de graphe :
Sommet, Arc, Graphe
auteur : cmarichal
"""
from typing import Tuple, List
from math import floor
import numpy as np
from classes_traitement_plan import CouleurSpeciale, Plan
class Sommet:
"""Sommet ayant une position et un numéro""... | [
"numpy.array",
"numpy.sqrt"
] | [((895, 907), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (903, 907), True, 'import numpy as np\n'), ((2004, 2016), 'numpy.array', 'np.array', (['Gr'], {}), '(Gr)\n', (2012, 2016), True, 'import numpy as np\n'), ((1730, 1754), 'numpy.sqrt', 'np.sqrt', (['(x ** 2 + y ** 2)'], {}), '(x ** 2 + y ** 2)\n', (1737, 17... |
"""Support routines for the qdyn_prop_gate utility"""
import re
import numpy as np
from .units import UnitFloat
def _isqrt(n):
"""Integer square root of n > 0
>>> _isqrt(1024**2)
1024
>>> _isqrt(10)
3
"""
assert n >= 0
x = n
y = (x + 1) // 2
while y < x:
x = y
... | [
"numpy.reshape",
"re.search"
] | [((1834, 1874), 'numpy.reshape', 'np.reshape', (['vals[1::2]', 'shape'], {'order': '"""F"""'}), "(vals[1::2], shape, order='F')\n", (1844, 1874), True, 'import numpy as np\n'), ((1920, 1960), 'numpy.reshape', 'np.reshape', (['vals[2::2]', 'shape'], {'order': '"""F"""'}), "(vals[2::2], shape, order='F')\n", (1930, 1960)... |
import logging
import numpy as np
from collections import Counter
from imblearn.base import SamplerMixin
from imblearn.utils import check_target_type, hash_X_y
from sklearn.utils import check_X_y, check_random_state, safe_indexing
__all__ = ['RandomUnderSampler']
def check_ratio(ratio, y):
"""check and returns ... | [
"logging.getLogger",
"sklearn.utils.check_random_state",
"numpy.unique",
"sklearn.utils.check_X_y",
"numpy.hstack",
"numpy.flatnonzero",
"sklearn.utils.safe_indexing",
"collections.Counter",
"imblearn.utils.check_target_type",
"imblearn.utils.hash_X_y"
] | [((365, 375), 'collections.Counter', 'Counter', (['y'], {}), '(y)\n', (372, 375), False, 'from collections import Counter\n'), ((1916, 1943), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1933, 1943), False, 'import logging\n'), ((2390, 2435), 'sklearn.utils.check_X_y', 'check_X_y', (['... |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, ... | [
"data_utils.generate_training_points",
"data_utils.read_data",
"trainer.validated",
"trainer.OUTPUTS_SPEC.keys",
"os.listdir",
"data_utils.read_labels",
"trainer.deserialize",
"tensorflow.TensorSpec",
"pytest.raises",
"trainer.run",
"tempfile.TemporaryDirectory",
"trainer.INPUTS_SPEC.keys",
... | [((2461, 2501), 'unittest.mock.patch.object', 'mock.patch.object', (['trainer', '"""PADDING"""', '(2)'], {}), "(trainer, 'PADDING', 2)\n", (2478, 2501), False, 'from unittest import mock\n'), ((1674, 1717), 'trainer.validated', 'trainer.validated', (['tensor_dict', 'values_spec'], {}), '(tensor_dict, values_spec)\n', (... |
'''
<NAME>
simple ray trace - tools and classes to specify and instantiate rays
'''
import numpy as np
from srt_modules.useful_math import euler1232C
class Ray:
def __init__(self, pos=None, dirs=None):
self.X = pos # 3 x N position vectors of rays
self.d = dirs # direction vectors of rays in same... | [
"numpy.array",
"numpy.dot",
"numpy.cos",
"numpy.sin",
"srt_modules.useful_math.euler1232C",
"numpy.shape"
] | [((2083, 2104), 'numpy.dot', 'np.dot', (['DCM', 'ray_dirs'], {}), '(DCM, ray_dirs)\n', (2089, 2104), True, 'import numpy as np\n'), ((2585, 2604), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (2593, 2604), True, 'import numpy as np\n'), ((1869, 1883), 'numpy.array', 'np.array', (['rays'], {}), '(ray... |
import os
import torch
import time
import numpy as np
from tqdm import tqdm
import seaborn as sns
import matplotlib
import matplotlib.pyplot as plt
from generator import generator
from utils import get_model
sns.set_style("whitegrid")
font = {'family': 'serif',
'style': 'normal',
'size': 10}
matplotlib.... | [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"seaborn.set_style",
"numpy.array",
"matplotlib.ticker.ScalarFormatter",
"torch.cuda.is_available",
"matplotlib.rc",
"matplotlib.pyplot.semilogy",
"generator.generator",
"matplotlib.pyplot.xlabel",
"torch.set_default_tensor_type",
"matplotl... | [((208, 234), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (221, 234), True, 'import seaborn as sns\n'), ((309, 338), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {}), "('font', **font)\n", (322, 338), False, 'import matplotlib\n'), ((346, 397), 'matplotlib.ticker.ScalarForm... |
import numpy as np
from gym.envs.mujoco import mujoco_env
from gym import utils
import os
from scipy.spatial.distance import euclidean
from meta_mb.meta_envs.base import RandomEnv
#from mujoco-py.mujoco_py.pxd.mujoco import local
import mujoco_py
class PegFullBlueEnv(RandomEnv, utils.EzPickle):
def __init__(self, ... | [
"numpy.clip",
"numpy.square",
"numpy.array",
"numpy.zeros",
"scipy.spatial.distance.euclidean",
"os.path.dirname",
"numpy.random.uniform",
"meta_mb.meta_envs.base.RandomEnv.__init__"
] | [((571, 597), 'numpy.array', 'np.array', (['[x, y, z + 0.15]'], {}), '([x, y, z + 0.15])\n', (579, 597), True, 'import numpy as np\n'), ((623, 642), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (631, 642), True, 'import numpy as np\n'), ((669, 695), 'numpy.array', 'np.array', (['[x, y, z - 0.15]'], ... |
import os
import numpy as np
import tensorflow as tf
import math
from PIL import Image
#import pdb
F = tf.app.flags.FLAGS
"""
Save tensorflow model
Parameters:
* checkpoint_dir - name of the directory where model is to be saved
* sess - current tensorflow session
* saver - tensorflow saver
"""
def save_model(checkpo... | [
"os.path.exists",
"PIL.Image.fromarray",
"os.makedirs",
"os.path.join",
"numpy.diag",
"tensorflow.train.get_checkpoint_state",
"numpy.sum",
"numpy.zeros",
"numpy.around",
"os.path.basename",
"numpy.min"
] | [((838, 883), 'tensorflow.train.get_checkpoint_state', 'tf.train.get_checkpoint_state', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (867, 883), True, 'import tensorflow as tf\n'), ((1990, 2034), 'numpy.zeros', 'np.zeros', (['(N_full_imgs, img_h, img_w, img_d)'], {}), '((N_full_imgs, img_h, img_w, img_d))\n', (1998,... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.