code stringlengths 31 1.05M | apis list | extract_api stringlengths 97 1.91M |
|---|---|---|
"""
Data access functions
---------------------
"""
from __future__ import absolute_import
from os.path import join as pjoin, basename, dirname
import subprocess
import tempfile
import logging
import numpy as np
import h5py
import rasterio
from rasterio.crs import CRS
from rasterio.warp import reproject
from rasterio... | [
"numpy.uint8",
"tempfile.TemporaryDirectory",
"rasterio.open",
"rasterio.band",
"rasterio.warp.reproject",
"wagl.geobox.GriddedGeoBox",
"os.path.dirname",
"numpy.zeros",
"wagl.geobox.GriddedGeoBox.from_dataset",
"numpy.empty",
"wagl.tiling.generate_tiles",
"os.path.basename",
"logging.error"... | [((2564, 2594), 'numpy.empty', 'np.empty', (['stack_shape', 'a.dtype'], {}), '(stack_shape, a.dtype)\n', (2572, 2594), True, 'import numpy as np\n'), ((13165, 13259), 'wagl.geobox.GriddedGeoBox', 'GriddedGeoBox', ([], {'shape': 'subs.shape', 'origin': '(ul_x, ul_y)', 'pixelsize': 'geobox.pixelsize', 'crs': 'prj'}), '(s... |
from fluxrgnn import dataloader, utils
from fluxrgnn.models import *
import torch
from torch.utils.data import random_split, Subset
from torch.optim import lr_scheduler
from torch_geometric.data import DataLoader, DataListLoader
from torch_geometric.utils import to_dense_adj
from omegaconf import DictConfig, OmegaConf
... | [
"torch.utils.data.ConcatDataset",
"torch.pow",
"torch.cuda.is_available",
"fluxrgnn.dataloader.load_dataset",
"numpy.arange",
"fluxrgnn.utils.finalize_results",
"fluxrgnn.utils.plot_training_curves",
"torch.autograd.set_detect_anomaly",
"numpy.ones",
"torch_geometric.utils.to_dense_adj",
"os.pat... | [((2464, 2500), 'torch.utils.data.ConcatDataset', 'torch.utils.data.ConcatDataset', (['data'], {}), '(data)\n', (2494, 2500), False, 'import torch\n'), ((3248, 3317), 'torch_geometric.data.DataLoader', 'DataLoader', (['train_data'], {'batch_size': 'cfg.model.batch_size', 'shuffle': '(True)'}), '(train_data, batch_size=... |
import numpy as np
from typing import Callable
from .base_score import BaseScore
class BleiLaffertyScore(BaseScore):
"""
This score implements method described in 2009 paper
Blei, <NAME>., and <NAME>erty. "Topic models." Text Mining.
Chapman and Hall/CRC, 2009. 101-124.
At the core this score he... | [
"numpy.sum",
"numpy.log"
] | [((1537, 1559), 'numpy.log', 'np.log', (['(phi + blei_eps)'], {}), '(phi + blei_eps)\n', (1543, 1559), True, 'import numpy as np\n'), ((1580, 1603), 'numpy.sum', 'np.sum', (['log_phi'], {'axis': '(1)'}), '(log_phi, axis=1)\n', (1586, 1603), True, 'import numpy as np\n'), ((2180, 2229), 'numpy.sum', 'np.sum', (['modalit... |
import os
import sys
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import numpy as np
from astropy.io import fits
# ------------------------------------------------------------
# Input
opacity = os.path.join(sys.argv[1], "")
# ------------------------------------------------------------
cou... | [
"os.listdir",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.xlabel",
"numpy.size",
"os.path.join",
"matplotlib.pyplot.plot",
"numpy.linspace",
"numpy.zeros",
"astropy.io.fits.getdata",
"astropy.io.fits.open",
"matplotlib.pyplot.yscale",
"matplotlib.pyplot.xscale"... | [((222, 251), 'os.path.join', 'os.path.join', (['sys.argv[1]', '""""""'], {}), "(sys.argv[1], '')\n", (234, 251), False, 'import os\n'), ((344, 363), 'os.listdir', 'os.listdir', (['opacity'], {}), '(opacity)\n', (354, 363), False, 'import os\n'), ((533, 552), 'os.listdir', 'os.listdir', (['opacity'], {}), '(opacity)\n'... |
import numpy as np
import pandas as pd
def sra(real, synth):
"""
SRA can be thought of as the (empirical) probability of a
comparison on the synthetic data being ”correct” (i.e. the same as
the comparison would be on the real data).
From "Measuring the quality of Synthetic data for use in competit... | [
"numpy.sum",
"numpy.array"
] | [((1060, 1093), 'numpy.sum', 'np.sum', (['(1 / (k * (k - 1)) * sum_I)'], {}), '(1 / (k * (k - 1)) * sum_I)\n', (1066, 1093), True, 'import numpy as np\n'), ((935, 949), 'numpy.array', 'np.array', (['None'], {}), '(None)\n', (943, 949), True, 'import numpy as np\n'), ((970, 984), 'numpy.array', 'np.array', (['None'], {}... |
""" Code for fitting circles, ellipses, planes, etc.
"""
import numpy as np
from numpy.linalg import eig, inv
from stentseg.utils.new_pointset import PointSet
def fit_circle(pp, warnIfIllDefined=True):
""" Fit a circle on the given 2D points
Returns a tuple (x, y, r).
In case the three points... | [
"numpy.sqrt",
"visvis.xlabel",
"stentseg.utils.new_pointset.PointSet",
"numpy.hstack",
"visvis.ylabel",
"visvis.plot",
"numpy.array",
"numpy.linalg.norm",
"numpy.sin",
"visvis.title",
"visvis.clf",
"numpy.dot",
"numpy.linspace",
"numpy.arctan",
"numpy.random.normal",
"visvis.subplot",
... | [((1926, 1942), 'stentseg.utils.new_pointset.PointSet', 'PointSet', (['[x, y]'], {}), '([x, y])\n', (1934, 1942), False, 'from stentseg.utils.new_pointset import PointSet\n'), ((3000, 3014), 'numpy.dot', 'np.dot', (['D.T', 'D'], {}), '(D.T, D)\n', (3006, 3014), True, 'import numpy as np\n'), ((3022, 3038), 'numpy.zeros... |
import numpy as np
from keras.utils import to_categorical
import copy
from common.utils import default_config, make_env, eligibility_traces, discount_rewards
from common.ppo_independant import PPOPolicyNetwork, ValueNetwork
render = False
normalize_inputs = True
config = default_config()
env = make_env(config, normal... | [
"numpy.mean",
"copy.deepcopy",
"numpy.std",
"keras.utils.to_categorical",
"numpy.array",
"common.ppo_independant.ValueNetwork",
"common.utils.discount_rewards",
"common.ppo_independant.PPOPolicyNetwork",
"common.utils.default_config",
"common.utils.make_env"
] | [((274, 290), 'common.utils.default_config', 'default_config', ([], {}), '()\n', (288, 290), False, 'from common.utils import default_config, make_env, eligibility_traces, discount_rewards\n'), ((297, 331), 'common.utils.make_env', 'make_env', (['config', 'normalize_inputs'], {}), '(config, normalize_inputs)\n', (305, ... |
import argparse
import baselineUtils
import torch
import torch.utils.data
import torch.nn as nn
import torch.nn.functional as F
import os
import time
from transformer.batch import subsequent_mask
from torch.optim import Adam,SGD,RMSprop,Adagrad
from transformer.noam_opt import NoamOpt
import numpy as np
import scipy.io... | [
"torch.utils.data.DataLoader",
"baselineUtils.create_dataset",
"torch.cuda.is_available",
"torch.utils.tensorboard.SummaryWriter",
"argparse.ArgumentParser",
"numpy.stack",
"os.mkdir",
"numpy.concatenate",
"baselineUtils.distance_metrics",
"torch.cat",
"torch.device",
"quantized_TF.QuantizedTF... | [((425, 502), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Train the individual Transformer model"""'}), "(description='Train the individual Transformer model')\n", (448, 502), False, 'import argparse\n'), ((2344, 2381), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', (["('lo... |
## This file is adopted from DVE's github repo: https://github.com/jamt9000/DVE
import torch.nn.functional as F
import torch.nn as nn
import time
import torch
from PIL import Image
import numpy as np
from pointcloud_utils import pointcloud_vis
import pointnet3.sinkhorn_approximate as sinkFunc
def save_data_as_image(f... | [
"pointnet3.sinkhorn_approximate.gumbel_sinkhorn",
"PIL.Image.fromarray",
"torch.nn.functional.l1_loss",
"numpy.amin",
"numpy.random.choice",
"torch.mean",
"torch.eye",
"torch.nn.functional.normalize",
"numpy.sum",
"torch.sum",
"torch.matmul",
"torch.no_grad",
"numpy.amax",
"torch.nn.functi... | [((350, 363), 'numpy.amin', 'np.amin', (['data'], {}), '(data)\n', (357, 363), True, 'import numpy as np\n'), ((378, 391), 'numpy.amax', 'np.amax', (['data'], {}), '(data)\n', (385, 391), True, 'import numpy as np\n'), ((520, 545), 'PIL.Image.fromarray', 'Image.fromarray', (['img', '"""P"""'], {}), "(img, 'P')\n", (535... |
import numpy as np
import unittest
from chainer import testing
from chainercv.experimental.links.model.pspnet import convolution_crop
class TestConvolutionCrop(unittest.TestCase):
def test_convolution_crop(self):
size = (8, 6)
stride = (8, 6)
n_channel = 3
img = np.random.uniform... | [
"chainercv.experimental.links.model.pspnet.convolution_crop",
"chainer.testing.run_module",
"numpy.random.uniform"
] | [((1282, 1320), 'chainer.testing.run_module', 'testing.run_module', (['__name__', '__file__'], {}), '(__name__, __file__)\n', (1300, 1320), False, 'from chainer import testing\n'), ((393, 447), 'chainercv.experimental.links.model.pspnet.convolution_crop', 'convolution_crop', (['img', 'size', 'stride'], {'return_param':... |
# -------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ----------------------------------------------------------------------... | [
"numpy.random.gamma",
"GPy.core.parameterization.priors.gammaln",
"numpy.log",
"psbutils.arrayshapes.Shapes"
] | [((1155, 1177), 'GPy.core.parameterization.priors.gammaln', 'priors.gammaln', (['self.a'], {}), '(self.a)\n', (1169, 1177), True, 'import GPy.core.parameterization.priors as priors\n'), ((1184, 1193), 'numpy.log', 'np.log', (['b'], {}), '(b)\n', (1190, 1193), True, 'import numpy as np\n'), ((1474, 1488), 'psbutils.arra... |
import numpy as np
from ..Delboeuf.delboeuf_parameters import _delboeuf_parameters_sizeinner, _delboeuf_parameters_sizeouter
def _ebbinghaus_parameters(illusion_strength=0, difference=0, size_min=0.25, distance=1, distance_auto=False):
# Size inner circles
parameters = _delboeuf_parameters_sizeinner(differe... | [
"numpy.max",
"numpy.linspace",
"numpy.cos",
"numpy.min",
"numpy.sin"
] | [((4327, 4369), 'numpy.linspace', 'np.linspace', (['(0)', '(360)'], {'num': 'n', 'endpoint': '(False)'}), '(0, 360, num=n, endpoint=False)\n', (4338, 4369), True, 'import numpy as np\n'), ((3354, 3397), 'numpy.min', 'np.min', (['[inner_size_left, inner_size_right]'], {}), '([inner_size_left, inner_size_right])\n', (336... |
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and rel... | [
"numpy.mean",
"tensorflow.device",
"tensorflow.tile",
"tensorflow.random_normal",
"dnnlib.util.open_url",
"numpy.log",
"pickle.load",
"dnnlib.tflib.convert_images_to_uint8",
"numpy.exp",
"numpy.sum",
"numpy.empty",
"numpy.std",
"dnnlib.tflib.run"
] | [((1319, 1391), 'numpy.empty', 'np.empty', (['[self.num_images, inception.output_shape[1]]'], {'dtype': 'np.float32'}), '([self.num_images, inception.output_shape[1]], dtype=np.float32)\n', (1327, 1391), True, 'import numpy as np\n'), ((1138, 1262), 'dnnlib.util.open_url', 'dnnlib.util.open_url', (['"""https://nvlabs-f... |
import numpy as np
from keras import Model
from keras.layers import Dense , GlobalAveragePooling2D
from PIL import Image, ImageDraw
from keras.applications import resnet
import numpy as np
def create_model(trainable=False):
#model = vgg16.VGG16(include_top=False, weights='imagenet',input_shape=(IMAGE_SIZE_H,IMAGE... | [
"PIL.Image.fromarray",
"keras.Model",
"PIL.ImageDraw.Draw",
"keras.applications.resnet.ResNet50",
"keras.layers.Dense",
"keras.layers.GlobalAveragePooling2D",
"numpy.load"
] | [((1233, 1304), 'numpy.load', 'np.load', (['"""C:\\\\users\\\\ateeb\\\\desktop\\\\localization\\\\X_cow_half_size.npy"""'], {}), "('C:\\\\users\\\\ateeb\\\\desktop\\\\localization\\\\X_cow_half_size.npy')\n", (1240, 1304), True, 'import numpy as np\n'), ((362, 448), 'keras.applications.resnet.ResNet50', 'resnet.ResNet5... |
import numpy as np
from numpy.testing import (assert_array_equal, assert_almost_equal,
assert_array_almost_equal, assert_equal, assert_)
from modules.scipy.special import VeroneseMap, VeroneseMapWithIdentity
def test_veronese_map():
x = np.random.randn(10)
n, m = len(x), 784
V... | [
"numpy.testing.assert_array_almost_equal",
"numpy.random.randn",
"modules.scipy.special.VeroneseMap",
"modules.scipy.special.VeroneseMapWithIdentity"
] | [((272, 291), 'numpy.random.randn', 'np.random.randn', (['(10)'], {}), '(10)\n', (287, 291), True, 'import numpy as np\n'), ((323, 362), 'modules.scipy.special.VeroneseMap', 'VeroneseMap', ([], {'shape': '(n, m)', 'shuffle': '(True)'}), '(shape=(n, m), shuffle=True)\n', (334, 362), False, 'from modules.scipy.special im... |
#!/usr/bin/env python
####################################################################
### This is the PYTHON version of program 3.4 from page 87 of #
### "Modeling Infectious Disease in humans and animals" #
### by Keeling & Rohani. #
### #
### It is the SEIR model wi... | [
"numpy.mean",
"pylab.ylim",
"pylab.subplot",
"numpy.hstack",
"pylab.plot",
"pylab.find",
"scipy.integrate.odeint",
"pylab.xlabel",
"matplotlib.font_manager.FontProperties",
"numpy.array",
"numpy.zeros",
"pylab.semilogy",
"numpy.vstack",
"pylab.xlim",
"pylab.ylabel",
"numpy.arange",
"... | [((766, 809), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 1.0 / (55 * 365)]'], {}), '([0.0, 0.0, 0.0, 1.0 / (55 * 365)])\n', (774, 809), True, 'import numpy as np\n'), ((815, 858), 'numpy.array', 'np.array', (['[1.0 / (55 * 365), 0.0, 0.0, 0.0]'], {}), '([1.0 / (55 * 365), 0.0, 0.0, 0.0])\n', (823, 858), True, 'impor... |
import numpy as np
def moving_average(a, n=3) :
"""
perform moving average, return a vector of same length as input
"""
a=a.ravel()
a = np.concatenate(([a[0]]*(n-1),a)) # repeating first values
ret = np.cumsum(a, dtype=float)
ret[n:] = ret[n:] - ret[:-n]
ret=ret[n - 1:] / n
return ... | [
"numpy.cumsum",
"numpy.concatenate"
] | [((158, 195), 'numpy.concatenate', 'np.concatenate', (['([a[0]] * (n - 1), a)'], {}), '(([a[0]] * (n - 1), a))\n', (172, 195), True, 'import numpy as np\n'), ((226, 251), 'numpy.cumsum', 'np.cumsum', (['a'], {'dtype': 'float'}), '(a, dtype=float)\n', (235, 251), True, 'import numpy as np\n')] |
import click
from train_anomaly_detection import main_func
import numpy as np
import os
# Define base parameters.
dataset_name = 'selfsupervised'
net_name = 'StackConvNet'
xp_path_base = 'log'
data_path = 'data/full'
train_folder = 'train'
val_pos_folder = 'val/wangen_sun_3_pos'
val_neg_folder = 'val/wangen_sun_3_neg'... | [
"numpy.mean",
"train_anomaly_detection.main_func",
"os.path.join",
"numpy.std"
] | [((4318, 4355), 'os.path.join', 'os.path.join', (['xp_path_base', '"""auc.npy"""'], {}), "(xp_path_base, 'auc.npy')\n", (4330, 4355), False, 'import os\n'), ((4385, 4409), 'numpy.mean', 'np.mean', (['auc_mat'], {'axis': '(0)'}), '(auc_mat, axis=0)\n', (4392, 4409), True, 'import numpy as np\n'), ((4430, 4453), 'numpy.s... |
from src.environments.slippery_grid import SlipperyGrid
import numpy as np
# A modified version of OpenAI Gym FrozenLake
# only the labelling function needs to be specified
sinks = []
for i in range(12, 16):
for j in range(15, 19):
sinks.append([i, j])
# create a SlipperyGrid object
FrozenLake = SlipperyG... | [
"src.environments.slippery_grid.SlipperyGrid",
"numpy.empty"
] | [((311, 407), 'src.environments.slippery_grid.SlipperyGrid', 'SlipperyGrid', ([], {'shape': '[20, 20]', 'initial_state': '[0, 10]', 'slip_probability': '(0.1)', 'sink_states': 'sinks'}), '(shape=[20, 20], initial_state=[0, 10], slip_probability=0.1,\n sink_states=sinks)\n', (323, 407), False, 'from src.environments.... |
"""
Loads the BVH files that make up the databases and processes them into the format required by our training algorithm.
It does NOT subdivide the clips into overlapping windows and does NOT split the data set into training and validation.
For this, use the script `extract_data_splits.py`.
This code is mostly copied ... | [
"numpy.mean",
"os.listdir",
"numpy.amin",
"numpy.minimum",
"Animation.positions_global",
"os.path.join",
"numpy.min",
"Quaternions.Quaternions.between",
"numpy.max",
"numpy.exp",
"numpy.array",
"scipy.ndimage.filters.gaussian_filter1d",
"numpy.concatenate",
"numpy.std",
"Pivots.Pivots.fr... | [((880, 898), 'BVH.load', 'BVH.load', (['filename'], {}), '(filename)\n', (888, 898), True, 'import BVH as BVH\n'), ((993, 1025), 'Animation.positions_global', 'Animation.positions_global', (['anim'], {}), '(anim)\n', (1019, 1025), True, 'import Animation as Animation\n'), ((1686, 1775), 'scipy.ndimage.filters.gaussian... |
#!/usr/bin/env python
# coding: utf-8
import os
import sys
import random
import math
import re
import time
import numpy as np
import cv2
import matplotlib
import matplotlib.pyplot as plt
import tensorflow as tf
from mrcnn.config import Config
# import utils
from mrcnn import model as modellib, utils
from mrcnn import ... | [
"mrcnn.model.MaskRCNN",
"os.path.exists",
"os.listdir",
"PIL.Image.open",
"mrcnn.utils.download_trained_weights",
"numpy.random.choice",
"mrcnn.visualize.display_top_masks",
"numpy.logical_not",
"os.path.join",
"numpy.max",
"numpy.zeros",
"os.path.abspath",
"sys.path.append",
"matplotlib.p... | [((436, 461), 'os.path.abspath', 'os.path.abspath', (['"""../../"""'], {}), "('../../')\n", (451, 461), False, 'import os\n'), ((482, 507), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (497, 507), False, 'import sys\n'), ((592, 622), 'os.path.join', 'os.path.join', (['ROOT_DIR', '"""logs"""... |
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, ... | [
"numpy.sqrt",
"tensorflow.compat.v2.keras.optimizers.Adam",
"tensorflow.compat.v2.zeros",
"dice_rl.utils.common.is_categorical_spec",
"dice_rl.data.dataset.convert_to_tfagents_timestep",
"tensorflow.compat.v2.nest.map_structure",
"tensorflow.compat.v2.square",
"tensorflow.compat.v2.einsum",
"tensorf... | [((3547, 3591), 'numpy.zeros', 'np.zeros', (['[self._dimension, self._dimension]'], {}), '([self._dimension, self._dimension])\n', (3555, 3591), True, 'import numpy as np\n'), ((3618, 3645), 'numpy.zeros', 'np.zeros', (['[self._dimension]'], {}), '([self._dimension])\n', (3626, 3645), True, 'import numpy as np\n'), ((3... |
import pandas as pd
import numpy as np
from collections import defaultdict
from itertools import combinations
# Alpha interval of .95
# corrected for multiple comparisons
Z_MULT = 2.98
def calculate_significance(array):
"""Calculate significance directly."""
return is_sig(*interval_from_values(array))
def... | [
"numpy.array",
"collections.defaultdict",
"pandas.read_csv"
] | [((776, 877), 'pandas.read_csv', 'pd.read_csv', (['"""/Users/stephantulkens/Google Drive/code/r/lrec/experiment_3_eng-uk_words.csv"""'], {}), "(\n '/Users/stephantulkens/Google Drive/code/r/lrec/experiment_3_eng-uk_words.csv'\n )\n", (787, 877), True, 'import pandas as pd\n'), ((882, 899), 'collections.defaultdic... |
"""<NAME>., 2019 - 2020. All rights reserved.
This file process the IO for the Text similarity """
import math
import os
import datetime
import shutil
import time
import pandas as pd
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
im... | [
"pandas.Series",
"os.path.exists",
"sklearn.metrics.pairwise.cosine_similarity",
"pandas.read_csv",
"numpy.arange",
"sklearn.feature_extraction.text.CountVectorizer",
"os.path.splitext",
"pandas.set_option",
"os.path.isfile",
"os.path.dirname",
"datetime.datetime.now",
"pandas.read_excel",
"... | [((368, 383), 'similarity.similarity_logging.get_logger', 'cl.get_logger', ([], {}), '()\n', (381, 383), True, 'import similarity.similarity_logging as cl\n'), ((2315, 2345), 'os.path.isfile', 'os.path.isfile', (['self.file_path'], {}), '(self.file_path)\n', (2329, 2345), False, 'import os\n'), ((2594, 2624), 'os.path.... |
"""
MesoNet
Authors: <NAME> and <NAME>, <NAME>
https://github.com/bf777/MesoNet
Licensed under the Creative Commons Attribution 4.0 International License (see LICENSE for details)
This file has been adapted from data.py in https://github.com/zhixuhao/unet
"""
from __future__ import print_function
from tensorflow.keras.... | [
"numpy.reshape",
"os.path.join",
"tensorflow.keras.preprocessing.image.ImageDataGenerator",
"numpy.max",
"numpy.zeros",
"skimage.transform.resize"
] | [((2002, 2032), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {}), '(**aug_dict)\n', (2020, 2032), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((2052, 2082), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([],... |
'''
Description: 构造一个数据集类,继承官方的torch.utils.data.Dataset
Author: HCQ
Company(School): UCAS
Email: <EMAIL>
Date: 2021-06-05 11:19:36
LastEditTime: 2021-06-10 10:40:49
FilePath: /pointnet-simple/framework/dataset.py
'''
import torch
import os
import json
from torch.utils.data import Dataset # 官方
from torch.utils.data impo... | [
"numpy.random.normal",
"numpy.mean",
"os.listdir",
"torch.utils.data.DataLoader",
"torch.Tensor",
"numpy.array",
"numpy.zeros",
"numpy.sum",
"numpy.cos",
"numpy.random.uniform",
"numpy.sin"
] | [((417, 428), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (425, 428), True, 'import numpy as np\n'), ((3954, 4004), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'batch_size': '(2)', 'shuffle': '(True)'}), '(train_data, batch_size=2, shuffle=True)\n', (3964, 4004), False, 'from torch.utils.data ... |
import numpy as np
import numpy.linalg as LA
import quaternion
from . import wind
from .launcher import Launcher
from .air import Air
class Enviroment:
def __init__(
self,
latitude,
longitude,
altitude=0
):
self.latitude = latitude
... | [
"numpy.dot",
"numpy.array",
"numpy.deg2rad",
"numpy.linalg.inv"
] | [((1043, 1171), 'numpy.array', 'np.array', (['[[-sinlon, -sinlat * coslon, coslat * coslon], [coslon, -sinlat * sinlon, \n coslat * sinlon], [0.0, coslat, sinlat]]'], {}), '([[-sinlon, -sinlat * coslon, coslat * coslon], [coslon, -sinlat *\n sinlon, coslat * sinlon], [0.0, coslat, sinlat]])\n', (1051, 1171), True... |
__author__ = 'sibirrer'
import numpy as np
from lenstronomy.LensModel.Profiles.base_profile import LensProfileBase
from lenstronomy.Util import derivative_util as calc_util
__all__ = ['CoredDensity']
class CoredDensity(LensProfileBase):
"""
class for a uniform cored density dropping steep in the outskirts
... | [
"lenstronomy.Util.derivative_util.d_x_diffr_dx",
"numpy.sqrt",
"lenstronomy.Util.derivative_util.d_r_dy",
"lenstronomy.Util.derivative_util.d_x_diffr_dy",
"numpy.log",
"lenstronomy.Util.derivative_util.d_r_dx",
"numpy.maximum",
"lenstronomy.Util.derivative_util.d_y_diffr_dy",
"numpy.arctan"
] | [((1301, 1327), 'numpy.sqrt', 'np.sqrt', (['(x_ ** 2 + y_ ** 2)'], {}), '(x_ ** 2 + y_ ** 2)\n', (1308, 1327), True, 'import numpy as np\n'), ((1340, 1362), 'numpy.maximum', 'np.maximum', (['r', 'self._s'], {}), '(r, self._s)\n', (1350, 1362), True, 'import numpy as np\n'), ((1996, 2022), 'numpy.sqrt', 'np.sqrt', (['(x... |
"""
============================================================================
Comparing anomaly detection algorithms for outlier detection on toy datasets
============================================================================
This example shows characteristics of different anomaly detection algorithms
on 2D d... | [
"numpy.array",
"numpy.random.RandomState",
"sklearn.datasets.make_blobs",
"matplotlib.pyplot.contour",
"numpy.linspace",
"matplotlib.pyplot.yticks",
"sklearn.neighbors.LocalOutlierFactor",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.ylim",
"sklearn.svm.OneClassSVM",
"sklearn.covariance.Ellip... | [((4773, 4870), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'left': '(0.02)', 'right': '(0.98)', 'bottom': '(0.001)', 'top': '(0.96)', 'wspace': '(0.05)', 'hspace': '(0.01)'}), '(left=0.02, right=0.98, bottom=0.001, top=0.96, wspace=\n 0.05, hspace=0.01)\n', (4792, 4870), True, 'import matplotl... |
from numbers import Number
from typing import List
from typing import Union
import numpy as np
from error_propagation.core import Complex
def npv(
cash: Union[List[Number], List[Complex]],
discount_rate: Union[Number, Complex], # noqa
) -> Complex:
"""NPV accounts for the time value of money and can be... | [
"error_propagation.core.Complex",
"numpy.array"
] | [((1090, 1103), 'error_propagation.core.Complex', 'Complex', (['(1)', '(0)'], {}), '(1, 0)\n', (1097, 1103), False, 'from error_propagation.core import Complex\n'), ((1255, 1269), 'numpy.array', 'np.array', (['cash'], {}), '(cash)\n', (1263, 1269), True, 'import numpy as np\n'), ((1215, 1228), 'error_propagation.core.C... |
import numpy as np
import cPickle
import os
import pdb
import cv2
def unpickle(file):
fo = open(file, 'rb')
dict = cPickle.load(fo)
fo.close()
return dict
def load_data(train_path,order,nb_groups, nb_cl, nb_val,SubMean = False):
xs = []
ys = []
for j in range(1):
d = unpickle(train_p... | [
"numpy.mean",
"numpy.unique",
"numpy.random.random_integers",
"numpy.where",
"numpy.zeros",
"numpy.empty",
"numpy.concatenate",
"numpy.pad",
"cPickle.load",
"numpy.float32",
"numpy.random.shuffle"
] | [((124, 140), 'cPickle.load', 'cPickle.load', (['fo'], {}), '(fo)\n', (136, 140), False, 'import cPickle\n'), ((608, 626), 'numpy.concatenate', 'np.concatenate', (['ys'], {}), '(ys)\n', (622, 626), True, 'import numpy as np\n'), ((954, 981), 'numpy.mean', 'np.mean', (['x[0:50000]'], {'axis': '(0)'}), '(x[0:50000], axis... |
import gym
import numpy as np
import pytest
from gym.spaces.discrete import Discrete
from gym.utils import seeding
from tianshou.data import Batch, Collector, ReplayBuffer
from tianshou.env import DummyVectorEnv, SubprocVectorEnv
from tianshou.policy import BasePolicy
class SimpleEnv(gym.Env):
"""A simplest exam... | [
"numpy.ones",
"tianshou.data.ReplayBuffer",
"tianshou.data.Collector",
"gym.spaces.discrete.Discrete",
"pytest.main",
"numpy.random.randint",
"numpy.zeros",
"numpy.random.seed",
"pytest.fixture",
"gym.utils.seeding.np_random"
] | [((1526, 1556), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1540, 1556), False, 'import pytest\n'), ((1573, 1590), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (1587, 1590), True, 'import numpy as np\n'), ((2076, 2095), 'tianshou.data.ReplayBuffer', 'Rep... |
import os, sys
sys.path.insert(0, os.path.join(os.pardir, 'src'))
def sympy_solution():
from sympy import symbols, Rational, solve
C1, C3, C4 = symbols('C1 C3 C4')
s = solve([C1 - 1 - C3,
C1 - Rational(1,2) - C3 - C4,
2 + 2*C3 + C4], [C1,C3,C4])
return s
import numpy as np
import matplo... | [
"matplotlib.pyplot.ylabel",
"numpy.array",
"numpy.sin",
"numpy.where",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.linspace",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.gca",
"sympy.symbols",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show",
"numpy.linalg.solve",
... | [((34, 64), 'os.path.join', 'os.path.join', (['os.pardir', '"""src"""'], {}), "(os.pardir, 'src')\n", (46, 64), False, 'import os, sys\n'), ((153, 172), 'sympy.symbols', 'symbols', (['"""C1 C3 C4"""'], {}), "('C1 C3 C4')\n", (160, 172), False, 'from sympy import symbols, Rational, solve\n'), ((375, 397), 'numpy.linspac... |
"""A script testing the extraction pipeline of RHEA
Steps
1) Initialise Format, Extractor and RadialVelocity
2) Define file paths for science, flat and dark frames
3) Extract/import spectra
4) Create/import reference spectra
5) Calculate radial velocities
6) Plot radial velocities
"""
import numpy as np
try... | [
"astropy.io.fits.getheader",
"numpy.where",
"pymfe.rhea.Format",
"pymfe.Extractor",
"astropy.io.fits.getdata",
"numpy.concatenate",
"astropy.io.fits.open",
"pymfe.rv.RadialVelocity",
"glob.glob"
] | [((850, 910), 'astropy.io.fits.getdata', 'pyfits.getdata', (['"""/priv/mulga1/jbento/rhea2_data/badpix.fits"""'], {}), "('/priv/mulga1/jbento/rhea2_data/badpix.fits')\n", (864, 910), True, 'import astropy.io.fits as pyfits\n'), ((919, 947), 'numpy.where', 'np.where', (['(badpixel_mask == 1)'], {}), '(badpixel_mask == 1... |
# %%
import numpy as np
import pathlib as pth
import matplotlib.pyplot as plt
import re
try:
from pileupplots_utils import *
except:
from .pileupplots_utils import *
# %%
if __name__ == "__main__":
parser = argparser()
args = parser.parse_args()
data_path = pth.Path(args.vial_fld)
fig_path... | [
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.subplot_mosaic",
"numpy.arange",
"pathlib.Path"
] | [((284, 307), 'pathlib.Path', 'pth.Path', (['args.vial_fld'], {}), '(args.vial_fld)\n', (292, 307), True, 'import pathlib as pth\n'), ((323, 345), 'pathlib.Path', 'pth.Path', (['args.fig_fld'], {}), '(args.fig_fld)\n', (331, 345), True, 'import pathlib as pth\n'), ((1065, 1109), 'matplotlib.pyplot.subplot_mosaic', 'plt... |
from typing import Union
import numpy as np
import talib
from jesse.helpers import get_candle_source
def efi(candles: np.ndarray, period: int = 13, source_type: str = "close", sequential: bool = False) -> Union[
float, np.ndarray]:
"""
EFI - Elders Force Index
:param candles: np.ndarray
:param ... | [
"numpy.full",
"talib.EMA",
"jesse.helpers.get_candle_source"
] | [((571, 622), 'jesse.helpers.get_candle_source', 'get_candle_source', (['candles'], {'source_type': 'source_type'}), '(candles, source_type=source_type)\n', (588, 622), False, 'from jesse.helpers import get_candle_source\n'), ((775, 808), 'talib.EMA', 'talib.EMA', (['dif'], {'timeperiod': 'period'}), '(dif, timeperiod=... |
from numpy import column_stack, savetxt
import os
lf = os.linesep#determing the linefeed for the operating system ('\n' for Linux or '\r\n' for Windows)
def _dump_matrix(f, matrix, fmt='%0.10g', delim='\t'):
savetxt(f, matrix, fmt=fmt, delimiter=delim)
return f
def _dump_vectors(f, vectorlist, fmt='%0.1... | [
"numpy.column_stack",
"numpy.savetxt"
] | [((215, 259), 'numpy.savetxt', 'savetxt', (['f', 'matrix'], {'fmt': 'fmt', 'delimiter': 'delim'}), '(f, matrix, fmt=fmt, delimiter=delim)\n', (222, 259), False, 'from numpy import column_stack, savetxt\n'), ((353, 377), 'numpy.column_stack', 'column_stack', (['vectorlist'], {}), '(vectorlist)\n', (365, 377), False, 'fr... |
import numpy as np
import torch
from scipy.stats import entropy as sc_entropy
class MultipredictionEntropy:
def __int__(self):
"""
Computes the entropy on multiple predictions of the same batch.
"""
super(MultipredictionEntropy, self).__init__()
def __call__(self, y, device='... | [
"torch.tensor",
"numpy.unique",
"torch.argmax"
] | [((575, 816), 'torch.tensor', 'torch.tensor', (['[[[0.7, 0.3, 0.1], [0.7, 0.3, 0.1], [0.7, 0.3, 0.2]], [[0.4, 0.6, 0.3], [\n 0.4, 0.6, 0.4], [0.6, 0.4, 0.3]], [[0.4, 0.6, 0.2], [0.6, 0.4, 0.8], [\n 0.6, 0.4, 0.7]], [[0.1, 0.9, 0.3], [0.1, 0.9, 0.3], [0.1, 0.9, 0.3]]]'], {}), '([[[0.7, 0.3, 0.1], [0.7, 0.3, 0.1], ... |
import time, math, cmath
import numpy as np
from functools import reduce
from qiskit import *
from qiskit.quantum_info import Statevector
from circuit_builder import CircuitBuilder
from agent import Agent
class QRPS_Agent(Agent):
def __init__(self, backend):
self.backend = backend
self.memory =... | [
"circuit_builder.CircuitBuilder",
"numpy.abs",
"numpy.linalg.eig",
"qiskit.quantum_info.Statevector",
"functools.reduce",
"numpy.delete",
"math.sqrt",
"numpy.sum",
"numpy.array",
"numpy.linalg.eigvals",
"numpy.split",
"numpy.concatenate",
"time.time"
] | [((1171, 1186), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (1177, 1186), True, 'import numpy as np\n'), ((1202, 1248), 'numpy.array', 'np.array', (['[(h / sum_weights) for h in weights]'], {}), '([(h / sum_weights) for h in weights])\n', (1210, 1248), True, 'import numpy as np\n'), ((2751, 2768), 'numpy.s... |
import logging
import os
import numpy as np
import tensorflow as tf
import sys
def create_log(name):
"""Logging."""
if os.path.exists(name):
os.remove(name)
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
# handler for logger file
handler1 = logging.FileHandler(name)
... | [
"logging.getLogger",
"os.path.exists",
"tensorflow.contrib.learn.python.learn.datasets.mnist.read_data_sets",
"logging.StreamHandler",
"numpy.mean",
"logging.Formatter",
"tensorflow.global_variables_initializer",
"numpy.array",
"logging.FileHandler",
"os.mkdir",
"numpy.expand_dims",
"sys.exit"... | [((129, 149), 'os.path.exists', 'os.path.exists', (['name'], {}), '(name)\n', (143, 149), False, 'import os\n'), ((188, 211), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (205, 211), False, 'import logging\n'), ((292, 317), 'logging.FileHandler', 'logging.FileHandler', (['name'], {}), '(name)\n... |
import os
import random
import time
import torch
import torch.nn.functional as F
import torch.nn as nn
import numpy as np
import scipy.io.wavfile as wavfile
import matplotlib
from mir_eval.separation import bss_eval_sources
from arguments import ArgParser
from dataset import MUSICMixDataset
from models import ModelBu... | [
"numpy.clip",
"dataset.MUSICMixDataset",
"viz.plot_loss_loc_sep_acc_metrics",
"torch.cuda.synchronize",
"models.activate",
"torch.squeeze",
"os.path.exists",
"torch.nn.functional.grid_sample",
"utils.istft_reconstruction",
"models.ModelBuilder",
"numpy.asarray",
"time.perf_counter",
"torch.n... | [((3521, 3535), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (3533, 3535), False, 'from utils import AverageMeter, recover_rgb, magnitude2heatmap, istft_reconstruction, warpgrid, combine_video_audio, save_video, makedirs\n'), ((3552, 3566), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (3564, 3566)... |
import numpy as np
import cv2
import matplotlib.pyplot as plt
from random import *
def centroid_histogram(clt):
numLabels = np.arange(0, len(np.unique(clt.labels_)) + 1)
(hist, _) = np.histogram(clt.labels_, bins=numLabels)
hist = hist.astype("float")
hist /= hist.sum()
# Olusturulan his... | [
"matplotlib.pyplot.imshow",
"numpy.histogram",
"matplotlib.pyplot.savefig",
"numpy.unique",
"matplotlib.pyplot.figure",
"numpy.zeros",
"matplotlib.pyplot.axis"
] | [((199, 240), 'numpy.histogram', 'np.histogram', (['clt.labels_'], {'bins': 'numLabels'}), '(clt.labels_, bins=numLabels)\n', (211, 240), True, 'import numpy as np\n'), ((604, 642), 'numpy.zeros', 'np.zeros', (['(300, 300, 3)'], {'dtype': '"""uint8"""'}), "((300, 300, 3), dtype='uint8')\n", (612, 642), True, 'import nu... |
import numpy as np
from .parse import parse_xtekct_file
class Config(object):
def __init__(self):
""" Configuration object which contains all settings neccessary for the forward projection
and tomographic reconstruction using the axitom algorithm.
"""
self.n_voxels_x = 2000
... | [
"numpy.arange"
] | [((908, 945), 'numpy.arange', 'np.arange', (['(0.0)', '(360)', 'self.angular_inc'], {}), '(0.0, 360, self.angular_inc)\n', (917, 945), True, 'import numpy as np\n'), ((2126, 2163), 'numpy.arange', 'np.arange', (['(0.0)', '(360)', 'self.angular_inc'], {}), '(0.0, 360, self.angular_inc)\n', (2135, 2163), True, 'import nu... |
from mod_copeland_yateesh import sample_complexity
args = {}
# args['heuristic'] = 'random'
args['heuristic'] = 'greedy'
# args['heuristic'] = 'mod_dcb'
args['n_voters'] = 4639
args['alpha'] = 0.05
args['seed'] = 42
args['ques_limit'] = 5
args['gamma'] = 0.5
args['probs'] = [0.05, 0.1, 0.2, 0.4]
q_limits = [1, 2, 3, ... | [
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.ylabel",
"seaborn.set_theme",
"matplotlib.pyplot.xlabel",
"numpy.array",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((2313, 2328), 'seaborn.set_theme', 'sns.set_theme', ([], {}), '()\n', (2326, 2328), True, 'import seaborn as sns\n'), ((2512, 2548), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Num of questions asked"""'], {}), "('Num of questions asked')\n", (2522, 2548), True, 'import matplotlib.pyplot as plt\n'), ((2549, 2584)... |
import numpy as np
import unittest
from convolution import conv2d, add_padding
class TestConvolution(unittest.TestCase):
def test_paddings_shape(self, N: int = 1000):
for _ in range(N):
m_h = np.random.randint(3, 100)
m_w = np.random.randint(3, 100)
random_matrix = np.... | [
"numpy.random.rand",
"numpy.random.choice",
"numpy.floor",
"numpy.array",
"numpy.random.randint",
"convolution.conv2d",
"unittest.main",
"convolution.add_padding"
] | [((19117, 19132), 'unittest.main', 'unittest.main', ([], {}), '()\n', (19130, 19132), False, 'import unittest\n'), ((3348, 3478), 'numpy.array', 'np.array', (['[[0, 4, 3, 2, 0, 1, 0], [4, 3, 0, 1, 0, 1, 0], [1, 3, 4, 2, 0, 1, 0], [3, 4,\n 2, 2, 0, 1, 0], [0, 0, 0, 0, 0, 1, 0]]'], {}), '([[0, 4, 3, 2, 0, 1, 0], [4, 3... |
#! /usr/bin/env python3
# coding=utf-8
""""""
"""
Author: <EMAIL>
"""
import datetime
import argparse
import sys, os
import gc
import subprocess
import traceback
import numpy as np
import toml
sys.path.append("..")
import trace_source
parser = argparse.ArgumentParser()
parser.add_argument('--station', help='stati... | [
"numpy.mean",
"os.listdir",
"argparse.ArgumentParser",
"trace_source.flexpart.plot_part_loc_map",
"datetime.datetime.strptime",
"trace_source.land_sfc.land_sfc",
"trace_source.flexpart.read_partpositions",
"subprocess.run",
"trace_source.flexpart.read_flexpart_traj_meta",
"os.getcwd",
"os.chdir"... | [((197, 218), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (212, 218), False, 'import sys, os\n'), ((250, 275), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (273, 275), False, 'import argparse\n'), ((836, 890), 'datetime.datetime.strptime', 'datetime.datetime.strptime... |
# Copyright 2020 DeepLearningResearch
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by... | [
"scipy.stats.mode",
"pickle.load",
"numpy.argmax",
"numpy.append",
"numpy.array",
"numpy.zeros"
] | [((2019, 2063), 'numpy.zeros', 'np.zeros', ([], {'shape': '(X_Pool_Dropout.shape[0], 1)'}), '(shape=(X_Pool_Dropout.shape[0], 1))\n', (2027, 2063), True, 'import numpy as np\n'), ((2582, 2621), 'numpy.zeros', 'np.zeros', ([], {'shape': 'X_Pool_Dropout.shape[0]'}), '(shape=X_Pool_Dropout.shape[0])\n', (2590, 2621), True... |
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).resolve().parent))
import unittest
import nanopq
import numpy as np
class TestSuite(unittest.TestCase):
def setUp(self):
np.random.seed(123)
def test_property(self):
opq = nanopq.OPQ(M=4, Ks=256)
self.assertEqual... | [
"numpy.allclose",
"pathlib.Path",
"numpy.random.random",
"numpy.linalg.inv",
"numpy.random.seed",
"copy.deepcopy",
"unittest.main",
"nanopq.OPQ"
] | [((2202, 2217), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2215, 2217), False, 'import unittest\n'), ((208, 227), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (222, 227), True, 'import numpy as np\n'), ((272, 295), 'nanopq.OPQ', 'nanopq.OPQ', ([], {'M': '(4)', 'Ks': '(256)'}), '(M=4, Ks=... |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | [
"tensorflow.python.ops.array_ops.placeholder",
"numpy.matrix",
"tensorflow.python.platform.test.main",
"tensorflow.python.ops.parsing_ops.decode_raw"
] | [((4671, 4682), 'tensorflow.python.platform.test.main', 'test.main', ([], {}), '()\n', (4680, 4682), False, 'from tensorflow.python.platform import test\n'), ((1163, 1210), 'tensorflow.python.ops.array_ops.placeholder', 'array_ops.placeholder', (['dtypes.string'], {'shape': '[2]'}), '(dtypes.string, shape=[2])\n', (118... |
"""This module contains the process that generates our regression test battery."""
import os
import json
import argparse
import numpy as np
from soepy.python.simulate.simulate_python import simulate
from soepy.python.soepy_config import TEST_RESOURCES_DIR
from soepy.test.random_init import random_init
from soepy.test... | [
"soepy.test.auxiliary.cleanup",
"numpy.testing.assert_array_almost_equal",
"soepy.test.random_init.random_init",
"argparse.ArgumentParser",
"soepy.test.random_init.print_dict",
"os.path.join",
"soepy.python.simulate.simulate_python.simulate",
"numpy.random.randint",
"numpy.random.seed"
] | [((934, 954), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (948, 954), True, 'import numpy as np\n'), ((967, 1008), 'numpy.random.randint', 'np.random.randint', (['(0)', '(1000)'], {'size': 'num_test'}), '(0, 1000, size=num_test)\n', (984, 1008), True, 'import numpy as np\n'), ((1024, 1087), 'os.p... |
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
import unittest
from generator import generator, generate
from extensions.ops.Cast import Cast
from mo.middle.passes.convert_data_type import packed_U4, packed_I4
from mo.middle.passes.infer import partial_infer
from ... | [
"mo.utils.unittest.graph.connect",
"generator.generate",
"mo.utils.unittest.graph.regular_op_with_empty_data",
"mo.utils.ir_engine.compare_graphs.compare_graphs",
"mo.middle.passes.infer.partial_infer",
"mo.utils.unittest.graph.result",
"numpy.array"
] | [((989, 2814), 'generator.generate', 'generate', (['*[([0], [0], packed_U4), ([1], [16], packed_U4), ([2], [32], packed_U4), ([\n 3], [48], packed_U4), ([4], [64], packed_U4), ([5], [80], packed_U4), (\n [6], [96], packed_U4), ([7], [112], packed_U4), ([8], [128], packed_U4),\n ([9], [144], packed_U4), ([10], ... |
import numpy as np
x = np.arange(18).reshape(6,3)
print(x)
y = np.array_split(x, 3)
y = np.delete(y, 1, axis=0).reshape(-1,3)
print(y)
print(x)
| [
"numpy.array_split",
"numpy.delete",
"numpy.arange"
] | [((65, 85), 'numpy.array_split', 'np.array_split', (['x', '(3)'], {}), '(x, 3)\n', (79, 85), True, 'import numpy as np\n'), ((24, 37), 'numpy.arange', 'np.arange', (['(18)'], {}), '(18)\n', (33, 37), True, 'import numpy as np\n'), ((90, 113), 'numpy.delete', 'np.delete', (['y', '(1)'], {'axis': '(0)'}), '(y, 1, axis=0)... |
"""
Module with reading functionalities for calibration spectra.
"""
import os
import configparser
from typing import Optional, Dict, Tuple
import h5py
import spectres
import numpy as np
from typeguard import typechecked
from scipy.optimize import curve_fit
from species.analysis import photometry
from species.core... | [
"numpy.mean",
"configparser.ConfigParser",
"numpy.ones",
"numpy.where",
"numpy.asarray",
"species.util.read_util.create_wavelengths",
"species.core.box.create_box",
"numpy.count_nonzero",
"spectres.spectres",
"os.getcwd",
"h5py.File",
"numpy.diag",
"numpy.append",
"species.analysis.photome... | [((1350, 1377), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1375, 1377), False, 'import configparser\n'), ((2588, 2710), 'spectres.spectres', 'spectres.spectres', (['wavel_points', 'calibbox.wavelength', 'calibbox.flux'], {'spec_errs': 'calibbox.error', 'fill': '(0.0)', 'verbose': '(Fal... |
# Copyright 2020 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in wri... | [
"jax.random.PRNGKey",
"flax.linen.Dense",
"jax.config.parse_flags_with_absl",
"flax.linen.module._get_suffix_value_pairs",
"flax.core.Scope",
"numpy.testing.assert_allclose",
"absl.testing.absltest.main",
"jax.numpy.array",
"jax.tree_map",
"numpy.array",
"flax.core.freeze",
"flax.linen.module.... | [((973, 1007), 'jax.config.parse_flags_with_absl', 'jax.config.parse_flags_with_absl', ([], {}), '()\n', (1005, 1007), False, 'import jax\n'), ((1040, 1071), 'jax.config.enable_omnistaging', 'jax.config.enable_omnistaging', ([], {}), '()\n', (1069, 1071), False, 'import jax\n'), ((21403, 21418), 'absl.testing.absltest.... |
import numpy as np
import argparse
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report
from torch.utils.tensorboard import SummaryWriter
import time
import torch
import random
import os
from transport import *
from models import *
import torch.nn.function... | [
"matplotlib.pyplot.ylabel",
"numpy.hstack",
"sklearn.metrics.classification_report",
"torch.softmax",
"numpy.array",
"matplotlib.rc",
"numpy.arange",
"matplotlib.pyplot.contourf",
"argparse.ArgumentParser",
"matplotlib.pyplot.xlabel",
"torch.set_num_threads",
"numpy.max",
"matplotlib.pyplot.... | [((462, 486), 'torch.set_num_threads', 'torch.set_num_threads', (['(8)'], {}), '(8)\n', (483, 486), False, 'import torch\n'), ((514, 531), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (525, 531), False, 'import random\n'), ((582, 602), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (596... |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 28 08:32:18 2018
@author: avanetten
Implement SP Metric
https://www.cv-foundation.org/openaccess/content_cvpr_2013/papers/Wegner_A_Higher-Order_CRF_2013_CVPR_paper.pdf
"""
import apls_utils
import apls
import os
import sys
import time
import nump... | [
"apls.create_edge_linestrings",
"osmnx_funcs.plot_graph",
"numpy.array",
"networkx.shortest_path",
"sys.path.append",
"networkx.has_path",
"os.path.exists",
"apls_utils.nodes_near_point",
"os.listdir",
"matplotlib.pyplot.close",
"matplotlib.patches.Circle",
"numpy.round",
"numpy.abs",
"os.... | [((565, 595), 'os.path.dirname', 'os.path.dirname', (['path_apls_src'], {}), '(path_apls_src)\n', (580, 595), False, 'import os\n'), ((596, 626), 'sys.path.append', 'sys.path.append', (['path_apls_src'], {}), '(path_apls_src)\n', (611, 626), False, 'import sys\n'), ((525, 551), 'os.path.realpath', 'os.path.realpath', (... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 18 17:57:38 2020
Copyright 2020 by <NAME>.
"""
# Standard library imports:
from math import exp, sqrt, pi
import numpy as np
# Learnpy imports:
from .RandVar import RandVar
from .RandVar2 import RandVar2
def mle(X, model):
"""
Compute the... | [
"math.sqrt",
"numpy.linalg.det",
"numpy.array",
"numpy.zeros",
"numpy.linalg.inv",
"numpy.outer",
"math.exp"
] | [((1551, 1586), 'numpy.array', 'np.array', (['[left_bound, right_bound]'], {}), '([left_bound, right_bound])\n', (1559, 1586), True, 'import numpy as np\n'), ((1818, 1834), 'numpy.zeros', 'np.zeros', (['[d, d]'], {}), '([d, d])\n', (1826, 1834), True, 'import numpy as np\n'), ((1960, 1980), 'numpy.linalg.inv', 'np.lina... |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six import with_metaclass
import numpy as np
import itertools
from slicerator import Slicerator, propagate_attr, index_attr
from .frame import Frame
from abc import ABCMeta, abstractmethod, abstr... | [
"numpy.prod",
"numpy.rollaxis",
"numpy.argsort",
"numpy.take",
"numpy.array",
"numpy.empty",
"six.with_metaclass",
"warnings.warn",
"numpy.cumprod",
"itertools.repeat"
] | [((379, 410), 'six.with_metaclass', 'with_metaclass', (['ABCMeta', 'object'], {}), '(ABCMeta, object)\n', (393, 410), False, 'from six import with_metaclass\n'), ((11374, 11398), 'numpy.argsort', 'np.argsort', (['to_drop_inds'], {}), '(to_drop_inds)\n', (11384, 11398), True, 'import numpy as np\n'), ((9848, 9876), 'num... |
from functools import wraps
import sys, time, os
import numpy as np
# Atomic weight
data = { "xx" : 1.00794, "H" : 1.00794, "He" : 4.00260, "Li" : 6.941, "Be" : 9.012187, "B" : 10.811,
"C" : 12.0107, "N" : 14.00674, "O" : 15.9994, "F" : 18.99840, "Ne" : 20.1797, "Na" : 22.98977,
"Mg" : 24.3050, "Al" : ... | [
"numpy.sqrt",
"os.path.join",
"sys._getframe",
"functools.wraps",
"numpy.exp",
"time.time"
] | [((2569, 2580), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (2574, 2580), False, 'from functools import wraps\n'), ((3216, 3248), 'os.path.join', 'os.path.join', (['dir_name', 'filename'], {}), '(dir_name, filename)\n', (3228, 3248), False, 'import sys, time, os\n'), ((2630, 2641), 'time.time', 'time.time',... |
import json
import os
from os.path import join as pjoin
import nibabel as nib
import numpy as np
import torch
from deep_hilbert_inverse_3chan_sparse import DeepHilbertInverse, MyDataset
from spectral_blending import blend_method
from utils import HilbertPlane, NORMALIZATION
def dataset_prediction(plane: HilbertPlan... | [
"numpy.eye",
"os.listdir",
"os.path.join",
"numpy.zeros",
"json.load",
"spectral_blending.blend_method",
"deep_hilbert_inverse_3chan_sparse.MyDataset",
"torch.cat"
] | [((831, 906), 'deep_hilbert_inverse_3chan_sparse.MyDataset', 'MyDataset', ([], {'filename': 'test_files[0]', 'plane': 'plane', 'transform': 'model.trafo_valid'}), '(filename=test_files[0], plane=plane, transform=model.trafo_valid)\n', (840, 906), False, 'from deep_hilbert_inverse_3chan_sparse import DeepHilbertInverse,... |
import stp.play as play
import stp.tactic as tactic
from rj_gameplay.tactic import clear_tactic, nmark_tactic, goalie_tactic
import stp.skill as skill
import stp.role as role
from stp.role.assignment.naive import NaiveRoleAssignment
import stp.rc as rc
import numpy as np
from typing import Dict, List, Tuple
class D... | [
"stp.play.flatten_requests",
"rj_gameplay.tactic.goalie_tactic.GoalieTactic",
"stp.play.unflatten_results",
"rj_gameplay.tactic.nmark_tactic.NMarkTactic",
"numpy.array",
"stp.role.assignment.naive.NaiveRoleAssignment"
] | [((393, 421), 'rj_gameplay.tactic.goalie_tactic.GoalieTactic', 'goalie_tactic.GoalieTactic', ([], {}), '()\n', (419, 421), False, 'from rj_gameplay.tactic import clear_tactic, nmark_tactic, goalie_tactic\n'), ((446, 473), 'rj_gameplay.tactic.nmark_tactic.NMarkTactic', 'nmark_tactic.NMarkTactic', (['(2)'], {}), '(2)\n',... |
import numpy as np
from pandas import (
TimedeltaIndex,
timedelta_range,
)
import pandas._testing as tm
class TestRepeat:
def test_repeat(self):
index = timedelta_range("1 days", periods=2, freq="D")
exp = TimedeltaIndex(["1 days", "1 days", "2 days", "2 days"])
for r... | [
"pandas._testing.assert_index_equal",
"pandas.TimedeltaIndex",
"numpy.repeat",
"pandas.timedelta_range"
] | [((187, 233), 'pandas.timedelta_range', 'timedelta_range', (['"""1 days"""'], {'periods': '(2)', 'freq': '"""D"""'}), "('1 days', periods=2, freq='D')\n", (202, 233), False, 'from pandas import TimedeltaIndex, timedelta_range\n'), ((249, 305), 'pandas.TimedeltaIndex', 'TimedeltaIndex', (["['1 days', '1 days', '2 days',... |
"""
Copyright (c) 2021, salesforce.com, inc.
All rights reserved.
SPDX-License-Identifier: BSD-3-Clause
For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
"""
import argparse
import glob
import logging
import os
import random
import sys
import timeit
from ... | [
"logging.getLogger",
"components.utils.mkdir_p",
"components.config.set_seed",
"torch.cuda.device_count",
"torch.utils.data.distributed.DistributedSampler",
"torch.cuda.is_available",
"transformers.AutoTokenizer.from_pretrained",
"torch.distributed.get_rank",
"torch.distributed.barrier",
"os.path.... | [((1353, 1380), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1370, 1380), False, 'import logging\n'), ((1780, 1827), 'functools.partial', 'partial', (['disamb_collate_fn'], {'tokenizer': 'tokenizer'}), '(disamb_collate_fn, tokenizer=tokenizer)\n', (1787, 1827), False, 'from functools i... |
# Copyright 2016-2019 The <NAME> at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you ... | [
"numpy.tile",
"tensorflow.python.keras.backend.get_value",
"tensorflow.python.framework.test_util.run_in_graph_and_eager_modes",
"deepcell.layers.RegressBoxes",
"deepcell.layers.ClipBoxes",
"numpy.random.random",
"numpy.array",
"numpy.random.randint",
"tensorflow.python.keras.backend.floatx",
"dee... | [((1666, 1709), 'tensorflow.python.framework.test_util.run_in_graph_and_eager_modes', 'tf_test_util.run_in_graph_and_eager_modes', ([], {}), '()\n', (1707, 1709), True, 'from tensorflow.python.framework import test_util as tf_test_util\n'), ((2634, 2677), 'tensorflow.python.framework.test_util.run_in_graph_and_eager_mo... |
#-*- coding: utf8 -*-
from __future__ import division
import numpy as n, pylab as p, networkx as x, random as r, collections as c, string
__doc__="""Este arquivo possui a classe Sistem, base para todas as animações
G=x.read_gml("1-400cpp.gml") # digrafo com peso
S=Sistem(G)
S.draw("grafo1.png")
S.add_msgs([msg1,msg2.... | [
"dateutil.parser.parse",
"numpy.reshape",
"networkx.to_agraph",
"numpy.hstack",
"numpy.random.random",
"pylab.plot",
"networkx.DiGraph",
"numpy.array",
"numpy.linspace",
"numpy.zeros",
"numpy.vstack",
"networkx.copy.deepcopy",
"numpy.sin",
"networkx.read_gml",
"pylab.clf",
"pylab.show"... | [((16102, 16113), 'networkx.DiGraph', 'x.DiGraph', ([], {}), '()\n', (16111, 16113), True, 'import numpy as n, pylab as p, networkx as x, random as r, collections as c, string\n'), ((17122, 17140), 'networkx.copy.deepcopy', 'x.copy.deepcopy', (['g'], {}), '(g)\n', (17137, 17140), True, 'import numpy as n, pylab as p, n... |
import h5py
import numpy as np
from keras.datasets import mnist
from keras.utils import to_categorical
# input image dimensions
img_rows, img_cols = 28, 28
# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(x_train.shape[0], img... | [
"keras.datasets.mnist.load_data",
"h5py.File",
"keras.utils.to_categorical",
"numpy.concatenate",
"h5py.special_dtype"
] | [((255, 272), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (270, 272), False, 'from keras.datasets import mnist\n'), ((569, 602), 'numpy.concatenate', 'np.concatenate', (['(y_train, y_test)'], {}), '((y_train, y_test))\n', (583, 602), True, 'import numpy as np\n'), ((826, 854), 'h5py.special_d... |
from __future__ import division
import cv2
import numpy as np
from opensfm import transformations
def rotation_from_angle_axis(angle_axis):
return cv2.Rodrigues(np.asarray(angle_axis))[0]
def rotation_from_ptr(pan, tilt, roll):
"""Camera rotation matrix from pan, tilt and roll."""
R1 = rotation_from_a... | [
"numpy.identity",
"numpy.cross",
"numpy.asarray",
"opensfm.transformations.euler_from_matrix",
"numpy.dot",
"numpy.arctan2",
"numpy.linalg.norm",
"opensfm.transformations.euler_matrix"
] | [((809, 845), 'numpy.dot', 'np.dot', (['rotation_matrix.T', '[0, 0, 1]'], {}), '(rotation_matrix.T, [0, 0, 1])\n', (815, 845), True, 'import numpy as np\n'), ((857, 887), 'numpy.arctan2', 'np.arctan2', (['Rt_ez[0]', 'Rt_ez[1]'], {}), '(Rt_ez[0], Rt_ez[1])\n', (867, 887), True, 'import numpy as np\n'), ((943, 979), 'num... |
import os
import nibabel as nib
import numpy.ma as ma
import settings_dist
import numpy as np
from tqdm import tqdm
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("root_dir")
parser.add_argument("sample")
parser.add_argument("train_test_split")
parser.add_argument("resize")
parser.add_argument(... | [
"numpy.dstack",
"numpy.mean",
"argparse.ArgumentParser",
"nibabel.load",
"numpy.asarray",
"os.path.join",
"numpy.ma.masked_not_equal",
"numpy.rot90",
"numpy.std",
"numpy.concatenate",
"os.walk"
] | [((142, 167), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (165, 167), False, 'import argparse\n'), ((1231, 1254), 'numpy.asarray', 'np.asarray', (['msks_parsed'], {}), '(msks_parsed)\n', (1241, 1254), True, 'import numpy as np\n'), ((1420, 1438), 'numpy.asarray', 'np.asarray', (['slices'], {... |
import numpy as np
import torch
import itertools
from torch.autograd import Variable
def getGridMask(frame, dimensions, num_person, neighborhood_size, grid_size, is_occupancy = False):
'''
This function computes the binary mask that represents the
occupancy of each ped in the other's grid
params:
... | [
"itertools.permutations",
"numpy.floor",
"numpy.zeros"
] | [((1441, 1480), 'itertools.permutations', 'itertools.permutations', (['list_indices', '(2)'], {}), '(list_indices, 2)\n', (1463, 1480), False, 'import itertools\n'), ((850, 881), 'numpy.zeros', 'np.zeros', (['(mnp, grid_size ** 2)'], {}), '((mnp, grid_size ** 2))\n', (858, 881), True, 'import numpy as np\n'), ((911, 94... |
import numpy as np
from envs.focal_point_task_us_env import FocalPointTaskUsEnv
from envs.plane_task_us_env import PlaneTaskUsEnv
from envs.phantom import (
ScatterersPhantom,
Ball,
Teddy
)
from envs.imaging import ImagingSystem, Probe
from envs.generator import (
ConstPhantomGenerator,
ConstProbeGe... | [
"envs.imaging.ImagingSystem",
"envs.generator.ConstProbeGenerator",
"envs.focal_point_task_us_env.FocalPointTaskUsEnv",
"envs.plane_task_us_env.PlaneTaskUsEnv",
"envs.generator.RandomProbeGenerator",
"numpy.array",
"envs.generator.ConstPhantomGenerator"
] | [((443, 624), 'envs.imaging.ImagingSystem', 'ImagingSystem', ([], {'c': '(1540)', 'fs': '(100000000.0)', 'image_width': '(40 / 1000)', 'image_height': '(90 / 1000)', 'image_resolution': '(40, 90)', 'median_filter_size': '(5)', 'dr_threshold': '(-200)', 'dec': '(1)', 'no_lines': '(64)'}), '(c=1540, fs=100000000.0, image... |
import numpy as np
from sklearn.neighbors import BallTree
from scipy.spatial.qhull import QhullError
from infomap import Infomap
from scipy.spatial import ConvexHull
from tqdm import tqdm
def pass_func(input, **kwargs):
return input
def query_neighbors(coords, r2, distance_metric='haversine', weighted=False):
... | [
"numpy.radians",
"infomap.Infomap",
"scipy.spatial.ConvexHull",
"numpy.array",
"numpy.zeros",
"sklearn.neighbors.BallTree"
] | [((1117, 1157), 'sklearn.neighbors.BallTree', 'BallTree', (['coords'], {'metric': 'distance_metric'}), '(coords, metric=distance_metric)\n', (1125, 1157), False, 'from sklearn.neighbors import BallTree\n'), ((1857, 1879), 'infomap.Infomap', 'Infomap', (['"""--two-level"""'], {}), "('--two-level')\n", (1864, 1879), Fals... |
"""
Derived module from filehandler.py to handle OpenFOAM files.
"""
import numpy as np
import pygem.filehandler as fh
class OpenFoamHandler(fh.FileHandler):
"""
OpenFOAM mesh file handler class.
:cvar string infile: name of the input file to be processed.
:cvar string outfile: name of the output fil... | [
"numpy.zeros"
] | [((1385, 1414), 'numpy.zeros', 'np.zeros', ([], {'shape': '(n_points, 3)'}), '(shape=(n_points, 3))\n', (1393, 1414), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2016-2018 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files ... | [
"numpy.trapz",
"silx.gui.qt.QApplication",
"silx.gui.plot.Plot1D",
"numpy.sum",
"silx.gui.plot.stats.stats.StatBase.__init__",
"numpy.arange"
] | [((3016, 3035), 'silx.gui.qt.QApplication', 'qt.QApplication', (['[]'], {}), '([])\n', (3031, 3035), False, 'from silx.gui import qt\n'), ((3048, 3056), 'silx.gui.plot.Plot1D', 'Plot1D', ([], {}), '()\n', (3054, 3056), False, 'from silx.gui.plot import Plot1D\n'), ((3066, 3082), 'numpy.arange', 'numpy.arange', (['(21)'... |
from PairedNeurons import PairedNeurons
from matplotlib import pyplot as plt
import os
import numpy as np
import cv2
from xlwt import Workbook
from skimage.segmentation import clear_border
SMOOTH = 1e-6
def iou_numpy(outputs: np.array, labels: np.array):
# outputs = outputs.squeeze(2)
intersection = (outpu... | [
"numpy.uint8",
"numpy.ones",
"cv2.threshold",
"os.path.join",
"skimage.segmentation.clear_border",
"matplotlib.pyplot.close",
"cv2.morphologyEx",
"PairedNeurons.PairedNeurons",
"cv2.distanceTransform",
"cv2.dilate",
"cv2.subtract",
"xlwt.Workbook",
"matplotlib.pyplot.subplots"
] | [((748, 800), 'PairedNeurons.PairedNeurons', 'PairedNeurons', (['img_dir', 'csv_dir', '(256)'], {'is_train': '(False)'}), '(img_dir, csv_dir, 256, is_train=False)\n', (761, 800), False, 'from PairedNeurons import PairedNeurons\n'), ((870, 880), 'xlwt.Workbook', 'Workbook', ([], {}), '()\n', (878, 880), False, 'from xlw... |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ###... | [
"mvpa2.base.dochelpers.borrowkwargs",
"mvpa2.measures.adhocsearchlightbase._STATS",
"numpy.unique",
"numpy.ones",
"mvpa2.misc.neighborhood.IndexQueryEngine",
"numpy.square",
"numpy.asanyarray",
"numpy.sum",
"numpy.zeros",
"numpy.empty",
"mvpa2.base.dochelpers._repr_attrs",
"numpy.argmin",
"m... | [((8166, 8243), 'mvpa2.base.dochelpers.borrowkwargs', 'borrowkwargs', (['M1NNSearchlight', '"""__init__"""'], {'exclude': "['roi_ids', 'queryengine']"}), "(M1NNSearchlight, '__init__', exclude=['roi_ids', 'queryengine'])\n", (8178, 8243), False, 'from mvpa2.base.dochelpers import borrowkwargs, _repr_attrs\n'), ((1046, ... |
import numpy as np
from sklearn.metrics import roc_curve, auc
from sklearn.metrics import confusion_matrix
from sklearn import preprocessing
from sklearn.preprocessing import LabelEncoder
# from IPython.display import Image,display
import matplotlib.pyplot as plt
data = []
labels = []
alldata = []
# XORdata=np.array([... | [
"matplotlib.pyplot.ylabel",
"sklearn.metrics.auc",
"numpy.array",
"sklearn.metrics.roc_curve",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.exp",
"matplotlib.pyplot.ylim",
"csv.reader",
"sklearn.metrics.confusion_matrix",
"sklearn.model_selection.train_test_split",
"numpy.argma... | [((9996, 10007), 'time.time', 'time.time', ([], {}), '()\n', (10005, 10007), False, 'import time\n'), ((10194, 10214), 'csv.reader', 'csv.reader', (['gpsTrack'], {}), '(gpsTrack)\n', (10204, 10214), False, 'import csv\n'), ((10513, 10542), 'sklearn.preprocessing.OneHotEncoder', 'preprocessing.OneHotEncoder', ([], {}), ... |
#!/usr/bin/env python3
"""Split PDFS by QR code and move images and PDFs to correct folder."""
import os
import traceback
import numpy
from . import write_to_log as logger
from . import submitty_ocr as scanner
# try importing required modules
try:
from PyPDF2 import PdfFileReader, PdfFileWriter
from pdf2imag... | [
"traceback.format_exc",
"cv2.threshold",
"os.path.join",
"os.chdir",
"numpy.array",
"pyzbar.pyzbar.decode",
"cv2.cvtColor",
"os.getpid",
"PyPDF2.PdfFileWriter",
"traceback.print_exc",
"PyPDF2.PdfFileReader"
] | [((463, 484), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (482, 484), False, 'import traceback\n'), ((854, 874), 'os.chdir', 'os.chdir', (['split_path'], {}), '(split_path)\n', (862, 874), False, 'import os\n'), ((894, 917), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['filename'], {}), '(filename)\n'... |
#coding=utf-8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required... | [
"os.path.exists",
"paddle.fluid.dygraph.learning_rate_scheduler.ReduceLROnPlateau",
"os.listdir",
"paddle.fluid.dygraph.base.to_variable",
"paddle.fluid.layers.cross_entropy",
"paddle.fluid.layers.mean",
"opts.parse_opts",
"numpy.array",
"numpy.zeros",
"paddle.fluid.CUDAPlace",
"models.model.gen... | [((2083, 2095), 'opts.parse_opts', 'parse_opts', ([], {}), '()\n', (2093, 2095), False, 'from opts import parse_opts\n'), ((1326, 1362), 'os.path.exists', 'os.path.exists', (['opt.Flow_resume_path'], {}), '(opt.Flow_resume_path)\n', (1340, 1362), False, 'import os\n'), ((1432, 1464), 'os.listdir', 'os.listdir', (['opt.... |
from math import gamma
from typing import Dict, List, Tuple
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn.functional as F
import torch.optim as optim
from atcenv.MASAC.buffer import ReplayBuffer
from atcenv.MASAC.mactor_critic import Actor, CriticQ, CriticV
from torch.nn.utils.clip_gr... | [
"numpy.clip",
"torch.optim.Adam",
"torch.cuda.get_device_name",
"atcenv.MASAC.mactor_critic.Actor",
"numpy.prod",
"atcenv.MASAC.buffer.ReplayBuffer",
"torch.FloatTensor",
"numpy.array",
"atcenv.MASAC.mactor_critic.CriticV",
"torch.cuda.is_available",
"atcenv.MASAC.mactor_critic.CriticQ",
"torc... | [((700, 760), 'atcenv.MASAC.buffer.ReplayBuffer', 'ReplayBuffer', (['STATE_DIM', 'ACTION_DIM', 'BUFFER_SIZE', 'BATCH_SIZE'], {}), '(STATE_DIM, ACTION_DIM, BUFFER_SIZE, BATCH_SIZE)\n', (712, 760), False, 'from atcenv.MASAC.buffer import ReplayBuffer\n'), ((1207, 1261), 'torch.zeros', 'torch.zeros', (['(1)'], {'requires_... |
from matplotlib import pyplot as plt
import numpy as np
def generate_and_save_images(model, epoch, test_input):
# Notice `training` is set to False.
# This is so all layers run in inference mode (batchnorm).
predictions = model(test_input, training=False)
fig = plt.figure(figsize=(10,10))
for i in ra... | [
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.close",
"numpy.max",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.title",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.legend"
] | [((276, 304), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (286, 304), True, 'from matplotlib import pyplot as plt\n'), ((547, 558), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (556, 558), True, 'from matplotlib import pyplot as plt\n'), ((594, 621), 'ma... |
import numpy as np
import scipy.io as scio
import scipy.sparse as scsp
import h5py as hp
from util import read_mymat73, read_mymat, build_img_dataset, process_ad_dataset, mv_dataset, mv_tabular_collate, AverageMeter, save_roc_pr_curve_data, get_all_labels, \
load_print_results, filter_nan_grad, read_dataset, build_... | [
"models.encoder_decoder.mvae_ad",
"models.encoder_decoder.mvenc",
"torch.from_numpy",
"models.encoder_decoder.mvae_tf",
"torch.nn.MSELoss",
"models.encoder_decoder.mvae_ss",
"torch.cuda.is_available",
"torch.sum",
"util.process_ad_dataset",
"util.filter_nan_grad",
"models.DeepCCAModels.cca",
"... | [((1981, 2030), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['torch.DoubleTensor'], {}), '(torch.DoubleTensor)\n', (2010, 2030), False, 'import torch\n'), ((2818, 2827), 'torch.nn.MSELoss', 'MSELoss', ([], {}), '()\n', (2825, 2827), False, 'from torch.nn import MSELoss, CrossEntropyLoss\n'), ((81... |
# coding: utf-8
#########################################################################
# Name:
#
# Calcurate equivalent potential temperature.
#
# Usage:
# example:
#
# Author: <NAME>
# Date: 2021/08/13
#########################################################################
import argparse
#from datetime import da... | [
"os.listdir",
"argparse.ArgumentParser",
"netCDF4.Dataset",
"os.path.join",
"math.log",
"numpy.array",
"os.path.abspath",
"re.search"
] | [((498, 523), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (521, 523), False, 'import argparse\n'), ((1132, 1147), 'netCDF4.Dataset', 'Dataset', (['ncfile'], {}), '(ncfile)\n', (1139, 1147), False, 'from netCDF4 import Dataset\n'), ((1419, 1462), 'numpy.array', 'np.array', (['[i for i in self... |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from numpy import asarray
from scipy.spatial import Voronoi
from scipy.spatial import Delaunay
__all__ = [
'delaunay_from_points_numpy',
'voronoi_from_points_numpy',
]
def delaunay_from_points_numpy... | [
"numpy.asarray",
"scipy.spatial.Voronoi",
"scipy.spatial.Delaunay"
] | [((955, 970), 'numpy.asarray', 'asarray', (['points'], {}), '(points)\n', (962, 970), False, 'from numpy import asarray\n'), ((979, 1000), 'scipy.spatial.Delaunay', 'Delaunay', (['xyz[:, 0:2]'], {}), '(xyz[:, 0:2])\n', (987, 1000), False, 'from scipy.spatial import Delaunay\n'), ((1315, 1330), 'numpy.asarray', 'asarray... |
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agr... | [
"collections.OrderedDict",
"oneflow.experimental.unittest.skip_unless_1n1d",
"numpy.array",
"test_util.GenArgList",
"unittest.main",
"oneflow.experimental.device"
] | [((2909, 2941), 'oneflow.experimental.unittest.skip_unless_1n1d', 'flow.unittest.skip_unless_1n1d', ([], {}), '()\n', (2939, 2941), True, 'import oneflow.experimental as flow\n'), ((786, 829), 'numpy.array', 'np.array', (['[[1, 2, 3], [4, 5, 6], [7, 8, 9]]'], {}), '([[1, 2, 3], [4, 5, 6], [7, 8, 9]])\n', (794, 829), Tr... |
from builtins import *
import argparse
import numpy as np
import os
from bnpy.ioutil.DataReader import loadDataFromSavedTask, loadLPKwargsFromDisk
from bnpy.ioutil.DataReader import loadKwargsFromDisk
from bnpy.ioutil.ModelReader import loadModelForLap
from bnpy.util import StateSeqUtil
from bnpy.birthmove.BCreateOneP... | [
"bnpy.ioutil.ModelReader.loadModelForLap",
"bnpy.ioutil.DataReader.loadDataFromSavedTask",
"numpy.unique",
"argparse.ArgumentParser",
"bnpy.ioutil.DataReader.loadKwargsFromDisk",
"bnpy.util.StateSeqUtil.alignEstimatedStateSeqToTruth",
"bnpy.birthmove.BLogger.configure",
"numpy.argmax",
"numpy.sum",
... | [((1424, 1461), 'bnpy.ioutil.ModelReader.loadModelForLap', 'loadModelForLap', (['taskoutpath', 'lapFrac'], {}), '(taskoutpath, lapFrac)\n', (1439, 1461), False, 'from bnpy.ioutil.ModelReader import loadModelForLap\n'), ((1473, 1524), 'bnpy.ioutil.DataReader.loadDataFromSavedTask', 'loadDataFromSavedTask', (['taskoutpat... |
import numpy as np
import random
from FuncionAptitud import fitness
lista = [0, 1, 2, 3, 4, 5, 6, 7] # son los valores en los que puede estar la reyna
poblacion = np.empty((50,8))
for i in range(50):
random.shuffle(lista)
for j in range(8):
poblacion[i, j] = lista[j]
def padres(conjunto):
r1 = ra... | [
"FuncionAptitud.fitness",
"random.random",
"numpy.empty",
"random.shuffle"
] | [((165, 182), 'numpy.empty', 'np.empty', (['(50, 8)'], {}), '((50, 8))\n', (173, 182), True, 'import numpy as np\n'), ((206, 227), 'random.shuffle', 'random.shuffle', (['lista'], {}), '(lista)\n', (220, 227), False, 'import random\n'), ((318, 333), 'random.random', 'random.random', ([], {}), '()\n', (331, 333), False, ... |
from __future__ import absolute_import
from __future__ import print_function
from pysnptools.util.mapreduce1.runner import *
import logging
import fastlmm.pyplink.plink as plink
import pysnptools.util as pstutil
import pysnptools.util.pheno as pstpheno
import numpy as np
from fastlmm.inference import LMM
import scipy.s... | [
"numpy.sqrt",
"pysnptools.util.create_directory_if_necessary",
"numpy.hstack",
"numpy.array",
"logging.info",
"pysnptools.util.pheno.loadOnePhen",
"numpy.arange",
"pysnptools.util.pheno.loadPhen",
"numpy.savez",
"numpy.exp",
"pysnptools.util.intersect_apply",
"doctest.testmod",
"numpy.ones",... | [((20159, 20170), 'time.time', 'time.time', ([], {}), '()\n', (20168, 20170), False, 'import time\n'), ((24987, 25004), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (25002, 25004), False, 'import doctest\n'), ((8978, 9073), 'pysnptools.util.intersect_apply', 'pstutil.intersect_apply', (['[self.test_snps, sel... |
from dataset import CovidImageDataset
from argparse import ArgumentParser
import torch
import torch.nn as nn
from model import VGG
import numpy as np
import os
from pytorch_lightning.utilities.seed import seed_everything
import random
def seed_worker(worker_id):
'''
https://pytorch.org/docs/stable/notes/rando... | [
"torch.nn.CrossEntropyLoss",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"torch.optim.lr_scheduler.CosineAnnealingLR",
"torch.initial_seed",
"os.makedirs",
"torch.max",
"os.path.join",
"random.seed",
"model.VGG",
"torch.cuda.is_available",
"numpy.random.seed",
"torch.use_determ... | [((609, 636), 'numpy.random.seed', 'np.random.seed', (['worker_seed'], {}), '(worker_seed)\n', (623, 636), True, 'import numpy as np\n'), ((641, 665), 'random.seed', 'random.seed', (['worker_seed'], {}), '(worker_seed)\n', (652, 665), False, 'import random\n'), ((2124, 2140), 'argparse.ArgumentParser', 'ArgumentParser'... |
import math
#######
import random
import cv2
import numpy as np
import matplotlib.pyplot as plt
from tensorpack.dataflow.imgaug.geometry import RotationAndCropValid
def crop_meta_image(image,annos,mask):
_target_height=368
_target_width =368
if len(np.shape(image))==2:
image = cv2.cvtColor(image, c... | [
"matplotlib.pyplot.imshow",
"random.uniform",
"cv2.warpAffine",
"cv2.resize",
"cv2.flip",
"random.randrange",
"cv2.copyMakeBorder",
"math.cos",
"numpy.random.randint",
"cv2.cvtColor",
"numpy.random.uniform",
"tensorpack.dataflow.imgaug.geometry.RotationAndCropValid.largest_rotated_rect",
"ma... | [((358, 373), 'numpy.shape', 'np.shape', (['image'], {}), '(image)\n', (366, 373), True, 'import numpy as np\n'), ((3920, 3935), 'numpy.shape', 'np.shape', (['image'], {}), '(image)\n', (3928, 3935), True, 'import numpy as np\n'), ((4659, 4744), 'cv2.resize', 'cv2.resize', (['image', '(_target_width, _target_height)'],... |
from __future__ import division
import random
import pprint
import sys
import time
import numpy as np
from optparse import OptionParser
import pickle
from keras import backend as K
from keras.optimizers import Adam, SGD, RMSprop
from keras.layers import Input
from keras.models import Model
from frcnn import config, dat... | [
"log.logger.exception",
"frcnn.resnet.base_net",
"frcnn.data_generators.get_anchor_gt",
"frcnn.roi_helpers.calc_iou",
"os.path.exists",
"numpy.mean",
"numpy.where",
"frcnn.resnet.rpn",
"keras.utils.plot_model",
"frcnn.losses.rpn_class_loss",
"frcnn.losses.rpn_regr_loss",
"keras.models.Model",
... | [((548, 562), 'optparse.OptionParser', 'OptionParser', ([], {}), '()\n', (560, 562), False, 'from optparse import OptionParser\n'), ((3065, 3080), 'frcnn.config.Config', 'config.Config', ([], {}), '()\n', (3078, 3080), False, 'from frcnn import config, data_generators\n'), ((4984, 5015), 'random.shuffle', 'random.shuff... |
"""
cobyladriver.py - Contains a driver that wraps the cobyla
optimizer as used in pyOpt:
Minimize a function using the Constrained Optimization BY Linear
Approximation (COBYLA) method.
COBYLA is gradient-free and can handle inequality constraints.
"""
from math import isnan
from numpy import zeros, array, hstack
... | [
"openmdao.main.datatypes.api.Float",
"numpy.hstack",
"openmdao.main.datatypes.api.Enum",
"openmdao.main.interfaces.implements",
"cobyla.cobyla.cobyla",
"openmdao.main.datatypes.api.Int",
"numpy.zeros",
"cobyla.cobyla.closeunit",
"openmdao.util.decorators.add_delegate",
"openmdao.main.datatypes.api... | [((837, 898), 'openmdao.util.decorators.add_delegate', 'add_delegate', (['HasParameters', 'HasIneqConstraints', 'HasObjective'], {}), '(HasParameters, HasIneqConstraints, HasObjective)\n', (849, 898), False, 'from openmdao.util.decorators import add_delegate\n'), ((1211, 1285), 'openmdao.main.interfaces.implements', 'i... |
import warnings
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import statsmodels.api as sm
import statsmodels.formula.api as smf
from statsmodels.stats.weightstats import DescrStatsW
from scipy.stats import norm
from zepid.causal.utils import (propensity_score, plot_kde, plot_love,
... | [
"numpy.sqrt",
"zepid.causal.utils.plot_love",
"zepid.causal.utils.plot_kde",
"numpy.nanmean",
"statsmodels.api.families.family.Binomial",
"zepid.causal.utils.positivity",
"statsmodels.api.families.family.Gaussian",
"statsmodels.formula.api.glm",
"zepid.causal.utils.propensity_score",
"numpy.where"... | [((8650, 8748), 'zepid.causal.utils.propensity_score', 'propensity_score', (['self.df', 'self._exp_model'], {'weights': 'self._weight_', 'print_results': 'print_results'}), '(self.df, self._exp_model, weights=self._weight_,\n print_results=print_results)\n', (8666, 8748), False, 'from zepid.causal.utils import prope... |
import argparse
import time
import cv2
import numpy as np
from estimator import TfPoseEstimator
from loguru import logger
from alfred.utils.log import init_logger
init_logger()
fps_time = 0
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='tf-pose-estimation Video')
parser.add_argum... | [
"argparse.ArgumentParser",
"loguru.logger.debug",
"loguru.logger.info",
"estimator.TfPoseEstimator.draw_humans",
"cv2.imshow",
"numpy.zeros",
"alfred.utils.log.init_logger",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.waitKey",
"time.time"
] | [((166, 179), 'alfred.utils.log.init_logger', 'init_logger', ([], {}), '()\n', (177, 179), False, 'from alfred.utils.log import init_logger\n'), ((1813, 1838), 'loguru.logger.debug', 'logger.debug', (['"""finished+"""'], {}), "('finished+')\n", (1825, 1838), False, 'from loguru import logger\n'), ((236, 299), 'argparse... |
import numpy as np
import nanonet.tb as tb
from test.test_hamiltonian_module import expected_bulk_silicon_band_structure
def test_simple_atomic_chain():
""" """
site_energy = -1.0
coupling = -1.0
l_const = 1.0
a = tb.Orbitals('A')
a.add_orbital(title='s', energy=-1, )
xyz_file = """1
... | [
"nanonet.tb.HamiltonianSp",
"numpy.sqrt",
"nanonet.tb.get_k_coords",
"nanonet.tb.set_tb_params",
"nanonet.tb.Orbitals",
"numpy.testing.assert_allclose",
"numpy.array",
"numpy.linspace",
"numpy.zeros",
"test.test_hamiltonian_module.expected_bulk_silicon_band_structure",
"numpy.cos"
] | [((237, 253), 'nanonet.tb.Orbitals', 'tb.Orbitals', (['"""A"""'], {}), "('A')\n", (248, 253), True, 'import nanonet.tb as tb\n'), ((397, 444), 'nanonet.tb.set_tb_params', 'tb.set_tb_params', ([], {'PARAMS_A_A': "{'ss_sigma': -1.0}"}), "(PARAMS_A_A={'ss_sigma': -1.0})\n", (413, 444), True, 'import nanonet.tb as tb\n'), ... |
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import logging as log
import numpy as np
from openvino.tools.mo.front.common.partial_infer.utils import shape_array, dynamic_dimension_value
from openvino.tools.mo.front.tf.common import tf_data_type_decode
from openvino.tools.mo.utils... | [
"openvino.tools.mo.front.common.partial_infer.utils.shape_array",
"numpy.full",
"numpy.array",
"openvino.tools.mo.utils.utils.refer_to_faq_msg",
"numpy.frombuffer",
"logging.error"
] | [((437, 530), 'openvino.tools.mo.front.common.partial_infer.utils.shape_array', 'shape_array', (['[(dim.size if dim.size >= 0 else dynamic_dimension_value) for dim in pb.dim]'], {}), '([(dim.size if dim.size >= 0 else dynamic_dimension_value) for\n dim in pb.dim])\n', (448, 530), False, 'from openvino.tools.mo.front... |
import os
import pandas as pd
import numpy as np
import datetime as dt
import sys
from datetime import datetime
import rasterio
import geopandas as gpd
pkg_dir = os.path.join(os.path.dirname(__file__),'..')
sys.path.insert(0, pkg_dir)
from ela.textproc import *
from ela.spatial import *
from ela.classification impor... | [
"rasterio.crs.CRS",
"sys.path.insert",
"numpy.reshape",
"ela.utils.flip",
"rasterio.transform.from_origin",
"ela.io.GeotiffExporter",
"os.path.join",
"os.path.dirname",
"numpy.zeros",
"numpy.array",
"numpy.isnan",
"numpy.empty",
"numpy.full",
"geopandas.GeoDataFrame",
"numpy.arange"
] | [((209, 236), 'sys.path.insert', 'sys.path.insert', (['(0)', 'pkg_dir'], {}), '(0, pkg_dir)\n', (224, 236), False, 'import sys\n'), ((176, 201), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (191, 201), False, 'import os\n'), ((1893, 1918), 'numpy.isnan', 'np.isnan', (['predicted[1, 1]'], {}... |
from hybrid_astar_planner.HybridAStar.hybrid_astar_wrapper \
import apply_hybrid_astar
import numpy as np
from pylot.planning.planner import Planner
class HybridAStarPlanner(Planner):
"""Wrapper around the Hybrid A* planner.
Note:
Details can be found at `Hybrid A* Planner`_.
Args:
... | [
"hybrid_astar_planner.HybridAStar.hybrid_astar_wrapper.apply_hybrid_astar",
"numpy.deg2rad"
] | [((2491, 2552), 'hybrid_astar_planner.HybridAStar.hybrid_astar_wrapper.apply_hybrid_astar', 'apply_hybrid_astar', (['initial_conditions', 'self._hyperparameters'], {}), '(initial_conditions, self._hyperparameters)\n', (2509, 2552), False, 'from hybrid_astar_planner.HybridAStar.hybrid_astar_wrapper import apply_hybrid_a... |
import sys, os
import time
import numpy as np
import torch
import torch.nn as nn
from torch.utils import data
from parsers import parse_a3m, read_templates
from RoseTTAFoldModel import RoseTTAFoldModule_e2e
import util
from collections import namedtuple
from ffindex import *
from kinematics import xyz_to_c6d, c6d_to_b... | [
"torch.from_numpy",
"numpy.array",
"torch.cuda.is_available",
"torch.sum",
"sys.exit",
"numpy.arange",
"torch.arange",
"os.path.exists",
"argparse.ArgumentParser",
"RoseTTAFoldModel.RoseTTAFoldModule_e2e",
"numpy.ix_",
"torch.cuda.amp.autocast",
"numpy.concatenate",
"parsers.parse_a3m",
... | [((1975, 2067), 'numpy.array', 'np.array', (['[[-0.676, -1.294, 0.0], [0.0, 0.0, 0.0], [1.5, -0.174, 0.0]]'], {'dtype': 'np.float32'}), '([[-0.676, -1.294, 0.0], [0.0, 0.0, 0.0], [1.5, -0.174, 0.0]],\n dtype=np.float32)\n', (1983, 2067), True, 'import numpy as np\n'), ((2214, 2230), 'numpy.deg2rad', 'np.deg2rad', ([... |
import numpy as np
import pytest
import pandas as pd
from pandas import (
DatetimeIndex,
Index,
)
import pandas._testing as tm
dtlike_dtypes = [
np.dtype("timedelta64[ns]"),
np.dtype("datetime64[ns]"),
pd.DatetimeTZDtype("ns", "Asia/Tokyo"),
pd.PeriodDtype("ns"),
]
@pytest.... | [
"pandas.DatetimeIndex",
"pandas.Index",
"pytest.mark.parametrize",
"numpy.array",
"pandas._testing.assert_numpy_array_equal",
"pandas.PeriodDtype",
"numpy.dtype",
"pandas.DatetimeTZDtype",
"numpy.arange"
] | [((313, 361), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ldtype"""', 'dtlike_dtypes'], {}), "('ldtype', dtlike_dtypes)\n", (336, 361), False, 'import pytest\n'), ((364, 412), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""rdtype"""', 'dtlike_dtypes'], {}), "('rdtype', dtlike_dtypes)\n", (3... |
import numpy as np
from src.dqn.replay.memory import Memory
from src.dqn.replay.sum_tree import SumTree
class PERMemory(Memory):
def __init__(self, size, state_size, alpha, beta, epsilon, beta_grow):
super().__init__(size, state_size)
self.alpha = alpha
self.beta = beta
self.epsil... | [
"numpy.abs",
"numpy.power",
"numpy.empty",
"numpy.random.uniform",
"src.dqn.replay.sum_tree.SumTree"
] | [((424, 442), 'src.dqn.replay.sum_tree.SumTree', 'SumTree', (['self.size'], {}), '(self.size)\n', (431, 442), False, 'from src.dqn.replay.sum_tree import SumTree\n'), ((840, 879), 'numpy.empty', 'np.empty', (['(batch_size, self.state_size)'], {}), '((batch_size, self.state_size))\n', (848, 879), True, 'import numpy as ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.