code stringlengths 31 1.05M | apis list | extract_api stringlengths 97 1.91M |
|---|---|---|
from dataclasses import dataclass
from typing import List, NamedTuple
import numpy as np
from generic_search import bfsCave, nodeToPath
wall = "#"
emptySpace = "."
class GridLocation(NamedTuple):
column: int
row: int
def __lt__(self, other):
return self.row < other.row or \
self.row... | [
"numpy.full",
"generic_search.nodeToPath"
] | [((6315, 6367), 'numpy.full', 'np.full', (['(puzzleHeight, puzzleWidth)', '"""."""'], {'dtype': 'str'}), "((puzzleHeight, puzzleWidth), '.', dtype=str)\n", (6322, 6367), True, 'import numpy as np\n'), ((4063, 4083), 'generic_search.nodeToPath', 'nodeToPath', (['solution'], {}), '(solution)\n', (4073, 4083), False, 'fro... |
# -*- coding: utf-8 -*-
import cv2
import sys
import numpy as np
import argparse
imagePath = "img.png"
sx = sy = None
previewImage = None
if len(sys.argv) < 3:
print("""
Usage:
python mouseInteractive -i img.png
""")
sys.exit(-1)
if sys.argv[1]=="-i":
imagePath = sys.argv[2]
def cre... | [
"cv2.line",
"cv2.circle",
"cv2.waitKey",
"cv2.destroyAllWindows",
"numpy.zeros",
"cv2.imread",
"cv2.setMouseCallback",
"sys.exit",
"cv2.moveWindow",
"cv2.imshow",
"cv2.namedWindow"
] | [((1089, 1110), 'cv2.imread', 'cv2.imread', (['imagePath'], {}), '(imagePath)\n', (1099, 1110), False, 'import cv2\n'), ((1112, 1135), 'cv2.namedWindow', 'cv2.namedWindow', (['"""demo"""'], {}), "('demo')\n", (1127, 1135), False, 'import cv2\n'), ((1136, 1162), 'cv2.namedWindow', 'cv2.namedWindow', (['"""preview"""'], ... |
import numpy
class channel_noise_simulator:
"""Class to hold usefull funktions to simulate noise in a channel"""
def __init__(self):
return
# _____________create bits___________________
def create_random_bits_list(self, len):
"""create a random len bits long bitstring """
bi... | [
"numpy.random.randint",
"numpy.random.random"
] | [((381, 407), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(2)'], {}), '(0, 2)\n', (401, 407), False, 'import numpy\n'), ((601, 627), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(2)'], {}), '(0, 2)\n', (621, 627), False, 'import numpy\n'), ((1019, 1040), 'numpy.random.random', 'numpy.random.r... |
import torch, add_path
import numpy as np
from baselines.all_baselines import Poly2SLS, Vanilla2SLS, DirectNN, \
GMM, DeepIV, AGMM
import os
import tensorflow
from MMR_IVs.util import ROOT_PATH, load_data
import random
random.seed(527)
def eval_model(model, test):
g_pred_test = model.predict(test.x)
mse = ... | [
"numpy.load",
"numpy.random.seed",
"numpy.mean",
"MMR_IVs.util.load_data",
"os.path.join",
"numpy.std",
"os.path.exists",
"baselines.all_baselines.DeepIV",
"tensorflow.set_random_seed",
"random.seed",
"baselines.all_baselines.DirectNN",
"numpy.save",
"torch.manual_seed",
"baselines.all_bas... | [((223, 239), 'random.seed', 'random.seed', (['(527)'], {}), '(527)\n', (234, 239), False, 'import random\n'), ((460, 528), 'numpy.savez', 'np.savez', (['save_path'], {'x': 'test.w', 'y': 'test.y', 'g_true': 'test.g', 'g_hat': 'g_pred'}), '(save_path, x=test.w, y=test.y, g_true=test.g, g_hat=g_pred)\n', (468, 528), Tru... |
#This code is written for dynamic step-size. step size c0 gets smaller when it achieves the number 200.
#Author: <NAME>, Senior Research Fellow, University of Delhi
#Date: 5-07-2021
from math import *
import numpy as np
c0=50.0
for x in np.arange(c0,580,10):
t=10*(abs(200.1-c0)/200.1)*abs(np.log(0.3/abs(c0-200.1)... | [
"numpy.arange"
] | [((239, 261), 'numpy.arange', 'np.arange', (['c0', '(580)', '(10)'], {}), '(c0, 580, 10)\n', (248, 261), True, 'import numpy as np\n')] |
# Copyright (c) 2003-2019 by <NAME>
#
# TreeCorr is free software: redistribution and use in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions... | [
"numpy.sum",
"test_helper.assert_raises",
"treecorr.Catalog",
"numpy.mean",
"test_helper.get_from_wiki",
"os.path.join",
"numpy.unique",
"numpy.std",
"numpy.random.RandomState",
"numpy.max",
"numpy.testing.assert_allclose",
"numpy.average",
"numpy.testing.assert_array_equal",
"treecorr.set... | [((1012, 1040), 'test_helper.get_from_wiki', 'get_from_wiki', (['"""des_sv.fits"""'], {}), "('des_sv.fits')\n", (1025, 1040), False, 'from test_helper import get_from_wiki, CaptureLog, assert_raises, profile, timer\n'), ((1057, 1092), 'os.path.join', 'os.path.join', (['"""data"""', '"""des_sv.fits"""'], {}), "('data', ... |
from .. import Explanation
from ..utils import OpChain
from . import colors
import numpy as np
def convert_color(color):
try:
color = pl.get_cmap(color)
except:
pass
if color == "shap_red":
color = colors.red_rgb
elif color == "shap_blue":
color = colors.blue_rgb
... | [
"numpy.argsort",
"numpy.abs",
"numpy.array",
"numpy.delete"
] | [((1167, 1190), 'numpy.argsort', 'np.argsort', (['clust_order'], {}), '(clust_order)\n', (1177, 1190), True, 'import numpy as np\n'), ((3993, 4037), 'numpy.delete', 'np.delete', (['partition_tree_new', 'ptind'], {'axis': '(0)'}), '(partition_tree_new, ptind, axis=0)\n', (4002, 4037), True, 'import numpy as np\n'), ((46... |
import numpy as np
import tensorflow as tf
from read_data import get_X_y
from sklearn.preprocessing import MinMaxScaler
import matplotlib.pyplot as plt
import pickle
class NN():
def __init__(self, batch_size = 300, graph = tf.get_default_graph(),test_size = 0.1, steps_back=8, num_TCL=30):
self.... | [
"tensorflow.reshape",
"sklearn.preprocessing.MinMaxScaler",
"tensorflow.matmul",
"tensorflow.nn.conv2d",
"tensorflow.get_default_graph",
"tensorflow.nn.relu",
"tensorflow.concat",
"tensorflow.placeholder",
"numpy.reshape",
"tensorflow.nn.bias_add",
"tensorflow.losses.mean_squared_error",
"tens... | [((11564, 11609), 'read_data.get_X_y', 'get_X_y', ([], {'steps_back': '(7)', 'filename': '"""Q_data0.csv"""'}), "(steps_back=7, filename='Q_data0.csv')\n", (11571, 11609), False, 'from read_data import get_X_y\n'), ((239, 261), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (259, 261), True, ... |
'''
Compare the data where they overlap in the uv plane.
No offset correction is needed.
'''
from spectral_cube import SpectralCube
import numpy as np
import astropy.units as u
import matplotlib.pyplot as plt
import os
import scipy.ndimage as nd
from uvcombine.scale_factor import find_scale_factor
from cube_analys... | [
"numpy.exp",
"matplotlib.pyplot.tight_layout",
"os.path.join",
"scipy.ndimage.distance_transform_edt",
"numpy.zeros_like",
"uvcombine.scale_factor.find_scale_factor",
"cube_analysis.feather_cubes.feather_compare_cube",
"matplotlib.pyplot.close",
"plotting_styles.onecolumn_figure",
"numpy.isfinite"... | [((1775, 1805), 'os.path.join', 'os.path.join', (['data_path', '"""GBT"""'], {}), "(data_path, 'GBT')\n", (1787, 1805), False, 'import os\n'), ((2574, 2768), 'cube_analysis.feather_cubes.feather_compare_cube', 'feather_compare_cube', (['vla_cube', 'gbt_cube', 'las'], {'num_cores': '(1)', 'lowresfwhm': 'gbt_eff_beam', '... |
#!/usr/bin/env python3
# Set this to True to enable building extensions using Cython.
# Set it to False to build extensions from the C file (that
# was previously created using Cython).
# Set it to 'auto' to build with Cython if available, otherwise
# from the C file.
import sys
from setuptools import setup, find_pac... | [
"setuptools.Extension",
"Cython.Build.cythonize",
"distutils.command.sdist.sdist.run",
"numpy.get_include",
"setuptools.find_packages"
] | [((1910, 1946), 'setuptools.Extension', 'Extension', (['module.name', '[module.pyx]'], {}), '(module.name, [module.pyx])\n', (1919, 1946), False, 'from setuptools import setup, find_packages, Extension\n'), ((2070, 2104), 'setuptools.Extension', 'Extension', (['module.name', '[module.c]'], {}), '(module.name, [module.c... |
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.4'
# jupytext_version: 1.1.5
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# # s_checklist_scenariobased_step04 [<img src="https:/... | [
"matplotlib.pyplot.title",
"pandas.read_csv",
"numpy.ones",
"numpy.shape",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.figure",
"numpy.arange",
"matplotlib.pyplot.gca",
"numpy.sqrt",
"scipy.stats.t.cdf",
"arpym.statistics.quantile_sp",
"matplotlib.pyplot.yticks",
"arpym.tools.add_logo"... | [((763, 795), 'pandas.plotting.register_matplotlib_converters', 'register_matplotlib_converters', ([], {}), '()\n', (793, 795), False, 'from pandas.plotting import register_matplotlib_converters\n'), ((1126, 1153), 'numpy.datetime64', 'np.datetime64', (['"""2012-10-26"""'], {}), "('2012-10-26')\n", (1139, 1153), True, ... |
import os
import sys
sys.path.append(os.getcwd())
import tensorflow as tf
tf.get_logger().setLevel('ERROR')
from tensorflow import keras
from common.inputs.voc2010 import voc_parts
from common import layers, losses, utils, train, attacks
from common.ops.routing import activated_entropy, coupling_entropy
import numpy... | [
"tensorflow.keras.applications.VGG19",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.metrics.CategoricalAccuracy",
"common.attacks.evaluate_attacks_success_rate",
"tensorflow.keras.losses.CategoricalCrossentropy",
"common.train.Trainer",
"numpy.mean",
"config.parse_args",
"common.utils.get_shap... | [((384, 419), 'tensorflow.keras.regularizers.l2', 'keras.regularizers.l2', (['WEIGHT_DECAY'], {}), '(WEIGHT_DECAY)\n', (405, 419), False, 'from tensorflow import keras\n'), ((441, 471), 'tensorflow.keras.initializers.he_normal', 'keras.initializers.he_normal', ([], {}), '()\n', (469, 471), False, 'from tensorflow impor... |
from teca import *
import numpy as np
import sys
def get_request_callback(rank, var_names):
def request(port, md_in, req_in):
sys.stderr.write('descriptive_stats::request MPI %d\n'%(rank))
req = teca_metadata(req_in)
req['arrays'] = var_names
return [req]
return request
def get... | [
"numpy.average",
"numpy.std",
"numpy.percentile",
"numpy.max",
"numpy.min",
"sys.stderr.write"
] | [((139, 201), 'sys.stderr.write', 'sys.stderr.write', (["('descriptive_stats::request MPI %d\\n' % rank)"], {}), "('descriptive_stats::request MPI %d\\n' % rank)\n", (155, 201), False, 'import sys\n'), ((401, 463), 'sys.stderr.write', 'sys.stderr.write', (["('descriptive_stats::execute MPI %d\\n' % rank)"], {}), "('des... |
import numpy as np
def time_between_values(df, cols):
gap_df = df[cols].dropna(how='any')
return gap_df.index.to_series().diff(-1).dt.total_seconds().abs()
def distance_to_monitor(df):
dist = np.sqrt(
df.left_gaze_origin_in_user_coordinate_system_x ** 2
+ df.left_gaze_origin_in_user_coor... | [
"numpy.sqrt"
] | [((208, 389), 'numpy.sqrt', 'np.sqrt', (['(df.left_gaze_origin_in_user_coordinate_system_x ** 2 + df.\n left_gaze_origin_in_user_coordinate_system_y ** 2 + df.\n left_gaze_origin_in_user_coordinate_system_z ** 2)'], {}), '(df.left_gaze_origin_in_user_coordinate_system_x ** 2 + df.\n left_gaze_origin_in_user_co... |
import numpy as np
from datetime import datetime
from astropy.io import ascii
from astropy.time import Time
from argparse import ArgumentParser
from antares_client.search import search, download
def build_query(ra0, dec0, fov, date):
"""Generate a query (a Python dictionary) to submit to the ANTARES client.
... | [
"numpy.radians",
"antares_client.search.download",
"datetime.datetime.today",
"argparse.ArgumentParser",
"astropy.time.Time"
] | [((1882, 1898), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1896, 1898), False, 'from datetime import datetime\n'), ((1913, 1979), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Client API to query ANTARES alert DB"""'}), "(description='Client API to query ANTARES alert DB')\n... |
import copy
import numpy as np
PXL2CM = 0.035277778
def print_formatted_stats(stats):
"""
Print formatted results for result tables
"""
print("& {:.2f} & {:.2f} & {:.2f} & {:.2f} \\\\" .format(np.mean(stats['tracked_until_end_ratio']),
np... | [
"copy.deepcopy",
"numpy.abs",
"numpy.sum",
"numpy.median",
"numpy.asarray",
"numpy.mean"
] | [((1224, 1252), 'numpy.asarray', 'np.asarray', (['alignment_errors'], {}), '(alignment_errors)\n', (1234, 1252), True, 'import numpy as np\n'), ((1280, 1304), 'numpy.abs', 'np.abs', (['alignment_errors'], {}), '(alignment_errors)\n', (1286, 1304), True, 'import numpy as np\n'), ((1327, 1354), 'numpy.asarray', 'np.asarr... |
# Copyright 2021 Arm Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agree... | [
"tensorflow.keras.layers.Conv2D",
"tensorflow.pad",
"tensorflow.eye",
"numpy.zeros",
"models.quantize_utils.compute_ranges",
"tensorflow.transpose",
"tensorflow.constant",
"tensorflow.Variable",
"tensorflow.nn.conv2d",
"tensorflow.keras.Sequential",
"tensorflow.keras.layers.Layer.__init__",
"t... | [((2255, 2282), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['layers'], {}), '(layers)\n', (2274, 2282), True, 'import tensorflow as tf\n'), ((2789, 2825), 'tensorflow.keras.layers.Layer.__init__', 'tf.keras.layers.Layer.__init__', (['self'], {}), '(self)\n', (2819, 2825), True, 'import tensorflow as tf\n'),... |
from IPython import display
from torch.utils.data import DataLoader
from torchvision import transforms, datasets
import os
import tensorflow as tf
from tensorflow import nn, layers
from tensorflow.contrib import layers as clayers
import numpy as np
import errno
import torchvision.utils as vutils
from tensorboardX impo... | [
"tensorflow.trainable_variables",
"tensorflow.contrib.layers.flatten",
"tensorflow.reshape",
"tensorflow.zeros_like",
"torchvision.datasets.CIFAR10",
"matplotlib.pyplot.figure",
"numpy.random.normal",
"tensorflow.layers.conv2d_transpose",
"torchvision.transforms.Normalize",
"tensorflow.nn.leaky_re... | [((989, 1045), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'shuffle': '(True)'}), '(dataset, batch_size=batch_size, shuffle=True)\n', (999, 1045), False, 'from torch.utils.data import DataLoader\n'), ((3937, 3993), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], ... |
import csv
import logging
import numpy as np
import datajoint as dj
import pathlib
import scipy.io as scio
from tifffile import imread
from . import InsertBuffer
from .reference import ccf_ontology
from . import get_schema_name
schema = dj.schema(get_schema_name('ccf'))
log = logging.getLogger(__name__)
@schem... | [
"scipy.io.loadmat",
"pathlib.Path",
"numpy.where",
"tifffile.imread",
"datajoint.conn",
"logging.getLogger"
] | [((284, 311), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (301, 311), False, 'import logging\n'), ((1964, 1982), 'tifffile.imread', 'imread', (['stack_path'], {}), '(stack_path)\n', (1970, 1982), False, 'from tifffile import imread\n'), ((3995, 4057), 'scipy.io.loadmat', 'scio.loadmat'... |
import numpy as np
import pandas as pd
import sys
import os
from utils import DATA_DIR
class Dataset(object):
def __init__(self, DATA_NAME):
self.DATA_NAME = DATA_NAME
print("Initializing dataset:", DATA_NAME)
sys.stdout.flush()
data = pd.read_csv(os.path.join(DATA_DIR, "... | [
"numpy.sum",
"numpy.ones",
"numpy.array",
"sys.stdout.flush",
"numpy.random.choice",
"pandas.factorize",
"os.path.join"
] | [((249, 267), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (265, 267), False, 'import sys\n'), ((500, 536), 'pandas.factorize', 'pd.factorize', (["data['item_id'].values"], {}), "(data['item_id'].values)\n", (512, 536), True, 'import pandas as pd\n'), ((570, 606), 'pandas.factorize', 'pd.factorize', (["dat... |
import os
import random
import syft as sy
import pandas as pd
import numpy as np
from PIL import Image
from tqdm import tqdm
from torch import ( # pylint:disable=no-name-in-module
manual_seed,
stack,
cat,
std_mean,
save,
is_tensor,
from_numpy,
randperm,
default_generator,
)
from tor... | [
"numpy.isin",
"albumentations.Lambda",
"albumentations.RandomSunFlare",
"albumentations.GaussNoise",
"albumentations.Resize",
"pandas.read_csv",
"albumentations.RandomShadow",
"torch.cat",
"albumentations.RandomFog",
"numpy.mean",
"albumentations.Normalize",
"torch.std_mean",
"os.path.join",... | [((4313, 4474), 'torchvision.transforms.RandomAffine', 'transforms.RandomAffine', ([], {'degrees': 'args.rotation', 'translate': '(args.translate, args.translate)', 'scale': '(1.0 - args.scale, 1.0 + args.scale)', 'shear': 'args.shear'}), '(degrees=args.rotation, translate=(args.translate,\n args.translate), scale=(... |
import warnings
warnings.simplefilter("ignore", UserWarning)
warnings.simplefilter("ignore", FutureWarning)
import argparse
import os
import pandas as pd
import numpy as np
from torch import nn
from torch.utils.data import DataLoader
from tqdm import tqdm
from collections import defaultdict
from catalyst.utils impo... | [
"numpy.random.seed",
"pandas.DataFrame.from_dict",
"warnings.simplefilter",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"catalyst.utils.any2device",
"os.path.exists",
"collections.defaultdict",
"pytorch_toolbelt.utils.fs.change_extension",
"os.environ.get",
"pytorch_toolbelt.utils.... | [((18, 62), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'UserWarning'], {}), "('ignore', UserWarning)\n", (39, 62), False, 'import warnings\n'), ((63, 109), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'FutureWarning'], {}), "('ignore', FutureWarning)\n", (84, 109), False,... |
#! /usr/bin/env python3
import sys
import csv
import argparse
import numpy as np
import pandas as pd
from mll_calc.all_jobs import parent_jobs, kid_jobs
def row_calcs(ext_test):
if 'no' in ext_test:
#db_rows = 450240
#max_jobs = 9750
db_rows = 90048 * 4
max_jobs = 978 * 4
else:... | [
"numpy.arange",
"csv.writer",
"sys.exit"
] | [((993, 1059), 'sys.exit', 'sys.exit', (['"""total expected jobs does not equal one of db_row lists"""'], {}), "('total expected jobs does not equal one of db_row lists')\n", (1001, 1059), False, 'import sys\n'), ((414, 443), 'numpy.arange', 'np.arange', (['(0)', 'db_rows', 'n_rows'], {}), '(0, db_rows, n_rows)\n', (42... |
# import the needed packages
import pickle
from sklearn import preprocessing
import time
from os import listdir
from os.path import isfile, join
from random import randint, uniform
import numpy as np
from matplotlib import pyplot as plt
import cv2 as cv
from scipy import ndimage
from skimage import morphol... | [
"cv2.GaussianBlur",
"numpy.random.seed",
"cv2.bitwise_and",
"numpy.argmax",
"scipy.ndimage.binary_fill_holes",
"numpy.ones",
"numpy.exp",
"cv2.imshow",
"cv2.inRange",
"numpy.unique",
"pandas.DataFrame",
"cv2.contourArea",
"numpy.zeros_like",
"random.randint",
"cv2.cvtColor",
"scipy.ndi... | [((622, 640), 'numpy.random.seed', 'np.random.seed', (['(26)'], {}), '(26)\n', (636, 640), True, 'import numpy as np\n'), ((1152, 1169), 'numpy.unique', 'np.unique', (['target'], {}), '(target)\n', (1161, 1169), True, 'import numpy as np\n'), ((3827, 3865), 'numpy.exp', 'np.exp', (['(-((x - mean) ** 2 / (2 * var)))'], ... |
############################################################
# -*- coding: utf-8 -*-
#
# # # # # # ####
# ## ## # ## # #
# # # # # # # # # ###
# # ## # ## ## #
# # # # # # ####
#
# Python-based Tool for interaction with the 10micron mounts
# GUI with PyQT5 ... | [
"os.path.basename",
"os.getcwd",
"copy.copy",
"numpy.clip",
"astrometry.transform.Transform",
"numpy.interp",
"operator.itemgetter",
"logging.getLogger"
] | [((621, 648), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (638, 648), False, 'import logging\n'), ((727, 756), 'astrometry.transform.Transform', 'transform.Transform', (['self.app'], {}), '(self.app)\n', (746, 756), False, 'from astrometry import transform\n'), ((13679, 13807), 'numpy.... |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to... | [
"src.loss.Quadrupletloss",
"argparse.ArgumentParser",
"mindspore.train.callback.ModelCheckpoint",
"src.dataset.create_dataset1",
"moxing.file.copy_parallel",
"numpy.mean",
"mindspore.train.serialization.load_checkpoint",
"os.path.join",
"mindspore.train.serialization.load_param_into_net",
"mindspo... | [((1586, 1597), 'mindspore.common.set_seed', 'set_seed', (['(1)'], {}), '(1)\n', (1594, 1597), False, 'from mindspore.common import set_seed\n'), ((1608, 1667), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Image classification"""'}), "(description='Image classification')\n", (1631, 166... |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law... | [
"numpy.random.uniform",
"torch.ne",
"copy.deepcopy",
"common_utils.run_tests",
"util_test.create_common_tensor"
] | [((8284, 8295), 'common_utils.run_tests', 'run_tests', ([], {}), '()\n', (8293, 8295), False, 'from common_utils import TestCase, run_tests\n'), ((894, 918), 'torch.ne', 'torch.ne', (['input1', 'input2'], {}), '(input1, input2)\n', (902, 918), False, 'import torch\n'), ((1051, 1075), 'torch.ne', 'torch.ne', (['input1',... |
"""
Module containing the definitions and methods to compute
a variety of indices used to study ENSO
"""
from typing import List, Optional, Tuple
import numpy as np
import xarray as xr
from eofs.xarray import Eof
from .core import compute_anomaly, compute_climatology, xconvolve
class ECindex:
"""
Computes ... | [
"numpy.deg2rad",
"numpy.zeros",
"xarray.merge",
"numpy.array",
"numpy.sqrt"
] | [((2338, 2349), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (2346, 2349), True, 'import numpy as np\n'), ((3047, 3073), 'xarray.merge', 'xr.merge', (['[eindex, cindex]'], {}), '([eindex, cindex])\n', (3055, 3073), True, 'import xarray as xr\n'), ((3931, 3954), 'numpy.array', 'np.array', (['smooth_kernel'], {}), ... |
import unittest
from spn.algorithms.EM import EM_optimization
from spn.algorithms.Inference import log_likelihood
from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn
from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn
from spn.structure.Base import Context
fro... | [
"unittest.main",
"spn.algorithms.LearningWrappers.learn_parametric",
"numpy.random.seed",
"spn.algorithms.EM.EM_optimization",
"spn.algorithms.Inference.log_likelihood",
"numpy.array",
"numpy.random.normal",
"spn.structure.Base.Context"
] | [((1190, 1205), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1203, 1205), False, 'import unittest\n'), ((556, 574), 'numpy.random.seed', 'np.random.seed', (['(17)'], {}), '(17)\n', (570, 574), True, 'import numpy as np\n'), ((793, 894), 'spn.structure.Base.Context', 'Context', ([], {'meta_types': '([MetaType.RE... |
##-------------------------------------------
## 2 VARIABLE NORMAL DISTIBUTION
##-------------------------------------------
import matplotlib.pyplot as plt
import numpy as np
#USER INPUTS
FUNC=2
FS=18 #FONT SIZE
CMAP='hsv' #'RdYlBu'
#normal distribution param
ux=0.5; uy=0.0
sx=2.0; sy=1.0 #STD-DEV
rho=0.5... | [
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"numpy.array",
"numpy.exp",
"numpy.linspace",
"matplotlib.pyplot.subplots"
] | [((364, 386), 'numpy.array', 'np.array', (['[[ux], [uy]]'], {}), '([[ux], [uy]])\n', (372, 386), True, 'import numpy as np\n'), ((417, 483), 'numpy.array', 'np.array', (['[[sx ** 2.0, rho * sy * sx], [rho * sy * sx, sy ** 2.0]]'], {}), '([[sx ** 2.0, rho * sy * sx], [rho * sy * sx, sy ** 2.0]])\n', (425, 483), True, 'i... |
import torch
import torch.nn as nn
import torch.nn.functional as F
import pytorch_lightning as pl
from torchsummaryX import summary
from torch.nn.utils import weight_norm, remove_weight_norm
from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space
from typing im... | [
"numpy.random.seed",
"torch.sqrt",
"torch.cat",
"torch.randn",
"utils.get_padding",
"matplotlib.pyplot.figure",
"torch.rand_like",
"utils.load_config",
"torch.nn.functional.leaky_relu",
"torchsummaryX.summary",
"torch.no_grad",
"utils.walk_ratent_space",
"torch.nn.MSELoss",
"torch.nn.Conv1... | [((11251, 11266), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (11264, 11266), False, 'import torch\n'), ((14800, 14834), 'utils.load_config', 'load_config', (['"""hparams/origin.json"""'], {}), "('hparams/origin.json')\n", (14811, 14834), False, 'from utils import load_config\n'), ((3380, 3395), 'torch.nn.Modul... |
# -*- coding: utf-8 -*-
from __future__ import print_function
"""
Created on Tue Oct 6 16:23:04 2020
@author: Admin
"""
import numpy as np
import pandas as pd
import math
import os
from keras.layers import Dense
from keras.layers import LSTM
from keras.optimizers import Adam
from sklearn.preprocessing i... | [
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"pandas.read_csv",
"os.getcwd",
"keras.layers.LSTM",
"sklearn.preprocessing.MinMaxScaler",
"keras.optimizers.Adam",
"numpy.empty_like",
"keras.layers.Dense",
"numpy.array",
"numpy.reshape",
"keras.models.Sequential",
"sklearn.metrics.mean_s... | [((587, 638), 'pandas.read_csv', 'pd.read_csv', (['filepath'], {'usecols': '[1]', 'engine': '"""python"""'}), "(filepath, usecols=[1], engine='python')\n", (598, 638), True, 'import pandas as pd\n'), ((901, 942), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'feature_range': 'feature_range'}), '(feature_r... |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.spatial.distance import pdist, squareform
from scipy.cluster.hierarchy import linkage, dendrogram
from sklearn.cluster import AgglomerativeClustering
# # Organizing clusters as a hierarchical tree
# ## Grouping clusters in bottom-up fas... | [
"pandas.DataFrame",
"numpy.random.seed",
"matplotlib.pyplot.show",
"numpy.random.random_sample",
"scipy.cluster.hierarchy.linkage",
"matplotlib.pyplot.figure",
"sklearn.cluster.AgglomerativeClustering",
"scipy.spatial.distance.pdist",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.tight_layout",
... | [((326, 345), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (340, 345), True, 'import numpy as np\n'), ((470, 518), 'pandas.DataFrame', 'pd.DataFrame', (['X'], {'columns': 'variables', 'index': 'labels'}), '(X, columns=variables, index=labels)\n', (482, 518), True, 'import pandas as pd\n'), ((1898,... |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 24 15:19:55 2020
@author: mi19356
"""
import numpy as np
import os
import pandas as pd
from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree
import random
import math
from scrape import vtk_scrap
from dataconversions import data_reco... | [
"dataconversions.data_reconstruct_dream",
"random.uniform",
"dataconversions.printtofiletext",
"xml.etree.ElementTree.Element",
"dataconversions.data_reconstruct",
"xml.etree.ElementTree.Comment",
"numpy.sin",
"numpy.cos",
"xml.etree.ElementTree.SubElement",
"numpy.dot",
"scrape.vtk_scrap",
"x... | [((547, 591), 'scrape.vtk_scrap', 'vtk_scrap', (['"""PF_00130000"""', '"""graindata"""', 'dream'], {}), "('PF_00130000', 'graindata', dream)\n", (556, 591), False, 'from scrape import vtk_scrap\n'), ((605, 655), 'dataconversions.data_reconstruct', 'data_reconstruct', (['vtkdata', 'vtkdataPoints', '(1)', 'orien'], {}), ... |
"""
Use this script to post-process the predicted softmax segmentation.
This script performs rigid register of the softmax prediction to the subject space.
@author: <NAME> (<EMAIL>)
"""
import os
from argparse import ArgumentParser
import numpy as np
import nibabel as nib
parser = ArgumentParser()
parser.add_argumen... | [
"nibabel.Nifti1Image",
"os.mkdir",
"numpy.sum",
"argparse.ArgumentParser",
"nibabel.load",
"os.path.exists",
"os.system",
"nibabel.save",
"os.path.split",
"os.path.join"
] | [((285, 301), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (299, 301), False, 'from argparse import ArgumentParser\n'), ((982, 1031), 'os.path.join', 'os.path.join', (['output_dir', "('%s_inv.txt' % aff_name)"], {}), "(output_dir, '%s_inv.txt' % aff_name)\n", (994, 1031), False, 'import os\n'), ((1131... |
""" Construct dataset """
import sys
import math
import pandas as pd
import numpy as np
import csv
def calc_gaps(station):
"""Calculate gaps in time series"""
df = pd.read_csv('../data/all_clean/{0}-clean.txt'.format(station), parse_dates=['Date'])
df = df.set_index(['Date'])
df.index = pd.to_datetim... | [
"numpy.timedelta64",
"pandas.to_datetime",
"math.floor"
] | [((307, 331), 'pandas.to_datetime', 'pd.to_datetime', (['df.index'], {}), '(df.index)\n', (321, 331), True, 'import pandas as pd\n'), ((1041, 1083), 'math.floor', 'math.floor', (['(total_gap / 3600 / 24 / 365.25)'], {}), '(total_gap / 3600 / 24 / 365.25)\n', (1051, 1083), False, 'import math\n'), ((1095, 1137), 'math.f... |
import numpy as np
import matplotlib.pyplot as plt
import keras
import keras.layers as klayers
import time_series as tsutils
import processing
import metrics
class ModelBase(object):
# Required 'context' information for a model
input_window = None
# How many point the model can predict for a single give... | [
"time_series.free_run_batch",
"processing.StandardScaler",
"keras.Sequential",
"keras.layers.Flatten",
"numpy.genfromtxt",
"keras.layers.Conv1D",
"metrics.evaluate",
"keras.layers.Dense",
"numpy.squeeze"
] | [((1529, 1548), 'numpy.genfromtxt', 'np.genfromtxt', (['path'], {}), '(path)\n', (1542, 1548), True, 'import numpy as np\n'), ((2123, 2199), 'time_series.free_run_batch', 'tsutils.free_run_batch', (['model.predict', 'ctx', 'predict_points', 'ts'], {'batch_size': '(1)'}), '(model.predict, ctx, predict_points, ts, batch_... |
import bisect
import operator
import numpy as np
import torch
from torch.utils import data
from multilayer_perceptron import *
from utils import *
def preprocess_weights(weights):
w_later = np.abs(weights[-1])
w_input = np.abs(weights[0])
for i in range(len(weights) - 2, 0, -1):
w_later = np.matm... | [
"numpy.minimum",
"numpy.abs",
"numpy.sum",
"numpy.array",
"torch.device",
"operator.itemgetter",
"bisect.insort"
] | [((196, 215), 'numpy.abs', 'np.abs', (['weights[-1]'], {}), '(weights[-1])\n', (202, 215), True, 'import numpy as np\n'), ((230, 248), 'numpy.abs', 'np.abs', (['weights[0]'], {}), '(weights[0])\n', (236, 248), True, 'import numpy as np\n'), ((3519, 3538), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n"... |
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import OneHotEncoder
import keras
from keras import backend as K
from keras.models import Sequential
from keras.layers import Activation
from keras.layers.core import Dense
from keras.optimizers import Adam... | [
"keras.layers.core.Dense",
"pandas.read_csv",
"numpy.savetxt",
"sklearn.preprocessing.MinMaxScaler",
"sklearn.preprocessing.OneHotEncoder",
"keras.optimizers.Adam"
] | [((826, 860), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'feature_range': '(0, 1)'}), '(feature_range=(0, 1))\n', (838, 860), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((1008, 1040), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {'categories': '"""auto"""'}), "(categori... |
r'''This dataloader is an attemp to make a master DL that provides 2 augmented version
of a sparse clip (covering minimum 64 frames) and 2 augmented versions of 4 dense clips
(covering 16 frames temporal span minimum)'''
import os
import torch
import numpy as np
import matplotlib.pyplot as plt
from torch.utils.data imp... | [
"torchvision.transforms.functional.to_tensor",
"random.shuffle",
"torchvision.transforms.functional.adjust_saturation",
"numpy.random.randint",
"os.path.join",
"torch.utils.data.DataLoader",
"torchvision.transforms.functional.hflip",
"torchvision.transforms.ToPILImage",
"torchvision.transforms.funct... | [((16132, 16163), 'torch.stack', 'torch.stack', (['sparse_clip'], {'dim': '(0)'}), '(sparse_clip, dim=0)\n', (16143, 16163), False, 'import torch\n'), ((16182, 16213), 'torch.stack', 'torch.stack', (['dense_clip0'], {'dim': '(0)'}), '(dense_clip0, dim=0)\n', (16193, 16213), False, 'import torch\n'), ((16232, 16263), 't... |
# General
import numpy as np
import random
import argparse
import json
import commentjson
import joblib
import os
import pathlib
from collections import OrderedDict
# Pytorch
import torch
import pytorch_lightning as pl
from pytorch_lightning.loggers import TensorBoardLogger
from pytorch_lightning.callbacks import Mode... | [
"pytorch_lightning.Trainer",
"numpy.random.seed",
"argparse.ArgumentParser",
"joblib.dump",
"json.dumps",
"pathlib.Path",
"torch.autograd.set_detect_anomaly",
"optuna.integration.PyTorchLightningPruningCallback",
"os.path.join",
"optuna.samplers.TPESampler",
"random.randint",
"os.path.exists",... | [((602, 666), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Learn subgraph embeddings"""'}), "(description='Learn subgraph embeddings')\n", (625, 666), False, 'import argparse\n'), ((2974, 3016), 'torch.manual_seed', 'torch.manual_seed', (["hyperparameters['seed']"], {}), "(hyperparamet... |
import numpy as np
import est_dir
def test_1():
"""
Test for compute_forward() - check for flag=True.
"""
np.random.seed(90)
m = 10
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.random.unif... | [
"numpy.random.seed",
"est_dir.combine_tracking",
"numpy.ones",
"est_dir.backward_tracking",
"numpy.round",
"est_dir.compute_direction_LS",
"numpy.copy",
"est_dir.compute_coeffs",
"numpy.identity",
"est_dir.forward_tracking",
"est_dir.compute_direction_XY",
"numpy.min",
"numpy.linalg.inv",
... | [((134, 152), 'numpy.random.seed', 'np.random.seed', (['(90)'], {}), '(90)\n', (148, 152), True, 'import numpy as np\n'), ((272, 285), 'numpy.ones', 'np.ones', (['(m,)'], {}), '((m,))\n', (279, 285), True, 'import numpy as np\n'), ((306, 336), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(20)', '(m,)'], {}), ... |
"""
Notes
-----
This test and docs/source/usage/iss/iss_cli.sh test the same code paths and should be updated
together
"""
import os
import unittest
import numpy as np
import pandas as pd
import pytest
from starfish.test.full_pipelines.cli._base_cli_test import CLITest
from starfish.types import Features
EXPERIMENT... | [
"os.path.join",
"pandas.Series",
"numpy.unique"
] | [((6573, 6639), 'numpy.unique', 'np.unique', (['intensities.coords[Features.TARGET]'], {'return_counts': '(True)'}), '(intensities.coords[Features.TARGET], return_counts=True)\n', (6582, 6639), True, 'import numpy as np\n'), ((6675, 6699), 'pandas.Series', 'pd.Series', (['counts', 'genes'], {}), '(counts, genes)\n', (6... |
from mayavi import mlab as mayalab
import numpy as np
import os
def plot_pc(pcs,color=None,scale_factor=.05,mode='point'):
if color == 'red':
mayalab.points3d(pcs[:,0],pcs[:,1],pcs[:,2],mode=mode,scale_factor=scale_factor,color=(1,0,0))
print("color",color)
elif color == 'blue':
mayalab.points3d(pcs[:... | [
"numpy.load",
"mayavi.mlab.quiver3d",
"mayavi.mlab.show",
"mayavi.mlab.points3d",
"numpy.array",
"numpy.tile",
"numpy.eye",
"os.path.join"
] | [((1488, 1514), 'numpy.tile', 'np.tile', (['origin_pc', '(3, 1)'], {}), '(origin_pc, (3, 1))\n', (1495, 1514), True, 'import numpy as np\n'), ((2297, 2311), 'mayavi.mlab.show', 'mayalab.show', ([], {}), '()\n', (2309, 2311), True, 'from mayavi import mlab as mayalab\n'), ((2695, 2712), 'numpy.load', 'np.load', (['save_... |
import numpy as np
import torch
from agent.heuristics.util import get_agent_turn, wrapper, get_days, \
get_recent_byr_offers, get_last_norm
from agent.const import DELTA_SLR, NUM_COMMON_CONS
class HeuristicSlr:
def __init__(self, delta=None):
self.patient = np.isclose(delta, DELTA_SLR[-1])
def __... | [
"agent.heuristics.util.get_agent_turn",
"agent.heuristics.util.get_days",
"agent.heuristics.util.get_last_norm",
"numpy.isclose",
"torch.zeros",
"agent.heuristics.util.wrapper",
"agent.heuristics.util.get_recent_byr_offers"
] | [((276, 308), 'numpy.isclose', 'np.isclose', (['delta', 'DELTA_SLR[-1]'], {}), '(delta, DELTA_SLR[-1])\n', (286, 308), True, 'import numpy as np\n'), ((465, 495), 'agent.heuristics.util.get_agent_turn', 'get_agent_turn', ([], {'x': 'x', 'byr': '(False)'}), '(x=x, byr=False)\n', (479, 495), False, 'from agent.heuristics... |
from abc import abstractmethod
from numpy import random
from rec.base import ParametrizedObject
from rec.dataset.dataset import Dataset
class DatasetSplitter(ParametrizedObject):
@abstractmethod
def split(self, dataset):
assert isinstance(dataset, Dataset)
pass
def _prepare_target_datas... | [
"rec.dataset.dataset.Dataset",
"numpy.random.shuffle"
] | [((356, 377), 'rec.dataset.dataset.Dataset', 'Dataset', (['dataset.name'], {}), '(dataset.name)\n', (363, 377), False, 'from rec.dataset.dataset import Dataset\n'), ((393, 414), 'rec.dataset.dataset.Dataset', 'Dataset', (['dataset.name'], {}), '(dataset.name)\n', (400, 414), False, 'from rec.dataset.dataset import Data... |
import numpy as np
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from sklearn.model_selection import train_test_split
def down_scale(x, scale=2):
# order 2 -> order 4
h = int(np.sqrt(x.shape[1]))
img = x.astype("float32").reshape(x.shape[0], h, h, 1)
scaled_img = t... | [
"tensorflow.sin",
"sklearn.model_selection.train_test_split",
"tensorflow.reshape",
"tensorflow.nn.avg_pool",
"tensorflow.examples.tutorials.mnist.input_data.read_data_sets",
"tensorflow.cos",
"numpy.concatenate",
"numpy.sqrt"
] | [((319, 418), 'tensorflow.nn.avg_pool', 'tf.nn.avg_pool', (['img'], {'ksize': '[1, scale, scale, 1]', 'strides': '[1, scale, scale, 1]', 'padding': '"""VALID"""'}), "(img, ksize=[1, scale, scale, 1], strides=[1, scale, scale, 1\n ], padding='VALID')\n", (333, 418), True, 'import tensorflow as tf\n'), ((506, 550), 't... |
"""
This is a pseudo-public API for downstream libraries. We ask that downstream
authors
1) Try to avoid using internals directly altogether, and failing that,
2) Use only functions exposed here (or in core.internals)
"""
from __future__ import annotations
from collections import defaultdict
from typing import Defa... | [
"pandas.core.internals.blocks.extract_pandas_array",
"pandas.core.dtypes.common.pandas_dtype",
"numpy.empty",
"pandas.core.internals.blocks.new_block",
"pandas.core.internals.managers.simple_blockify",
"pandas.core.dtypes.common.is_datetime64tz_dtype",
"pandas.core.internals.managers.BlockManager",
"c... | [((1554, 1595), 'pandas.core.internals.blocks.extract_pandas_array', 'extract_pandas_array', (['values', 'dtype', 'ndim'], {}), '(values, dtype, ndim)\n', (1574, 1595), False, 'from pandas.core.internals.blocks import Block, CategoricalBlock, DatetimeTZBlock, ExtensionBlock, check_ndim, ensure_block_shape, extract_pand... |
import numpy as np
from tensorflow import keras
import matplotlib.pyplot as plt
import os
import cv2
import random
import sklearn.model_selection as model_selection
import datetime
from model import createModel
from contextlib import redirect_stdout
categories = ["NonDemented", "MildDemented", "ModerateDemented", "Ver... | [
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"sklearn.model_selection.train_test_split",
"matplotlib.pyplot.legend",
"random.shuffle",
"tensorflow.keras.callbacks.ModelCheckpoint",
"model.createModel",
"tensorflow.keras.optimizers.Adam",
"numpy.array",
"matplotl... | [((1254, 1315), 'sklearn.model_selection.train_test_split', 'model_selection.train_test_split', (['data', 'labels'], {'test_size': '(0.2)'}), '(data, labels, test_size=0.2)\n', (1286, 1315), True, 'import sklearn.model_selection as model_selection\n'), ((1367, 1440), 'sklearn.model_selection.train_test_split', 'model_s... |
#!/usr/bin/env python
import sys
import os.path
from os.path import join as PJ
import re
import json
import numpy as np
from tqdm import tqdm
import igraph as ig
import jgf
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
def calcModularity(g):
if("Community" in g.vertex_attributes()):
Ci... | [
"numpy.sum",
"numpy.nan_to_num",
"numpy.mean",
"os.path.join",
"numpy.nanmean",
"numpy.std",
"numpy.power",
"numpy.isfinite",
"json.JSONEncoder.default",
"json.dump",
"tqdm.tqdm",
"numpy.average",
"jgf.igraph.save",
"numpy.percentile",
"matplotlib.use",
"jgf.igraph.load",
"sys.exit",... | [((200, 214), 'matplotlib.use', 'mpl.use', (['"""Agg"""'], {}), "('Agg')\n", (207, 214), True, 'import matplotlib as mpl\n'), ((8293, 8331), 'os.path.join', 'PJ', (['outputDirectory', '"""network.json.gz"""'], {}), "(outputDirectory, 'network.json.gz')\n", (8295, 8331), True, 'from os.path import join as PJ\n'), ((8695... |
####################################################################
# #
# MD_plotting_toolkit, #
# a python package to visualize the results obtained from MD #
# ... | [
"os.path.abspath",
"os.remove",
"MD_plotting_toolkit.data_processing.deduplicate_data",
"MD_plotting_toolkit.data_processing.scale_data",
"MD_plotting_toolkit.data_processing.read_2d_data",
"MD_plotting_toolkit.data_processing.analyze_data",
"os.path.isfile",
"numpy.diff",
"numpy.array",
"numpy.ar... | [((840, 883), 'os.path.join', 'os.path.join', (['current_path', '"""sample_inputs"""'], {}), "(current_path, 'sample_inputs')\n", (852, 883), False, 'import os\n'), ((898, 942), 'os.path.join', 'os.path.join', (['current_path', '"""sample_outputs"""'], {}), "(current_path, 'sample_outputs')\n", (910, 942), False, 'impo... |
from pytrigno import TrignoAccel
from pytrigno import TrignoEMG
from pytrigno import TrignoOrientation
import numpy as np
from scipy.spatial.transform import Rotation as R
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import time
#Reading one sensor accel data:
#t=TrignoAccel(channel_range=(0... | [
"matplotlib.pyplot.show",
"time.time",
"matplotlib.animation.FuncAnimation",
"numpy.shape",
"pytrigno.TrignoOrientation",
"scipy.spatial.transform.Rotation.from_quat",
"matplotlib.pyplot.subplots"
] | [((646, 734), 'pytrigno.TrignoOrientation', 'TrignoOrientation', ([], {'channel_range': '(0, orientation_channels - 1)', 'samples_per_read': '(100)'}), '(channel_range=(0, orientation_channels - 1),\n samples_per_read=100)\n', (663, 734), False, 'from pytrigno import TrignoOrientation\n'), ((943, 958), 'matplotlib.p... |
# ==============================================================================
# Copyright 2019 - <NAME>
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, ... | [
"pickle.dump",
"diplomacy_research.utils.tensorflow.tf.data.Iterator.from_structure",
"os.unlink",
"diplomacy_research.utils.tensorflow.tf.device",
"math.ceil",
"os.path.getsize",
"os.path.dirname",
"diplomacy_research.utils.tensorflow.tf.data.TFRecordDataset",
"os.path.exists",
"numpy.zeros",
"... | [((1125, 1152), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1142, 1152), False, 'import logging\n'), ((7732, 7829), 'os.path.join', 'os.path.join', (['self.checkpoint_dir', '"""status"""', "('status-%03d.pkl' % self.cluster_config.task_id)"], {}), "(self.checkpoint_dir, 'status', 'sta... |
import numpy as np
import uuid
import os
import pandas as pd
import psutil
import pickle
#import kde_info
#from lanfactory.config import
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import load_model
from tensorflow.python.client import device_lib
import warnings
from lanfactory.ut... | [
"pandas.DataFrame",
"numpy.random.choice",
"lanfactory.utils.try_gen_folder",
"numpy.log",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.callbacks.ReduceLROnPlateau",
"numpy.empty",
"tensorflow.keras.callbacks.ModelCheckpoint",
"tensorflow.keras.losses.Huber",
"uuid.uuid1",
"numpy.arange",
... | [((2206, 2327), 'numpy.arange', 'np.arange', (['(index % self.batches_per_file * self.batch_size)', '((index % self.batches_per_file + 1) * self.batch_size)', '(1)'], {}), '(index % self.batches_per_file * self.batch_size, (index % self.\n batches_per_file + 1) * self.batch_size, 1)\n', (2215, 2327), True, 'import n... |
"""
Sample data files with missing data create ancestors at many different time points,
often only one ancestor in each time point, which can cause difficulties parallelising
the inference. This script takes a sampledata file (usually containing missing data),
calculates the times-as-freq values, then bins them into fr... | [
"argparse.ArgumentParser",
"numpy.around",
"tsinfer.formats.allele_counts",
"tsinfer.load",
"numpy.unique"
] | [((446, 490), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (469, 490), False, 'import argparse\n'), ((1549, 1582), 'numpy.around', 'np.around', (['(times * sd.num_samples)'], {}), '(times * sd.num_samples)\n', (1558, 1582), True, 'import numpy as np\... |
#!/usr/bin/env python
#
# test_x5.py -
#
# Author: <NAME> <<EMAIL>>
#
import os.path as op
import numpy as np
import pytest
import h5py
import fsl.data.image as fslimage
import fsl.utils.tempdir as tempdir
import fsl.transform.affine as affine
import fsl.transform.fnirt as fnirt
import fsl.tr... | [
"fsl.transform.x5.readLinearX5",
"h5py.File",
"fsl.transform.nonlinear.convertDeformationSpace",
"os.path.dirname",
"fsl.data.image.Image",
"fsl.transform.x5.readNonLinearX5",
"numpy.isclose",
"numpy.random.randint",
"numpy.array",
"fsl.transform.x5.writeLinearX5",
"pytest.raises",
"numpy.rand... | [((646, 671), 'numpy.array', 'np.array', (["group['Matrix']"], {}), "(group['Matrix'])\n", (654, 671), True, 'import numpy as np\n'), ((1145, 1170), 'numpy.array', 'np.array', (["group['Matrix']"], {}), "(group['Matrix'])\n", (1153, 1170), True, 'import numpy as np\n'), ((2301, 2345), 'os.path.join', 'op.join', (['data... |
import numpy as np
import scipy.ndimage as nd
import torch
import torch.nn as nn
from torch.nn import functional as F
from .utils import dequeue_and_enqueue
def compute_rce_loss(predict, target):
from einops import rearrange
predict = F.softmax(predict, dim=1)
with torch.no_grad():
_, num_cls, ... | [
"numpy.partition",
"torch.log",
"torch.nn.CrossEntropyLoss",
"torch.cat",
"torch.nn.functional.cross_entropy",
"torch.nn.functional.softmax",
"torch.softmax",
"scipy.ndimage.zoom",
"torch.FloatTensor",
"torch.clamp",
"einops.rearrange",
"numpy.where",
"numpy.rollaxis",
"torch.zeros",
"to... | [((247, 272), 'torch.nn.functional.softmax', 'F.softmax', (['predict'], {'dim': '(1)'}), '(predict, dim=1)\n', (256, 272), True, 'from torch.nn import functional as F\n'), ((2693, 2720), 'torch.sort', 'torch.sort', (['prob_l', '(1)', '(True)'], {}), '(prob_l, 1, True)\n', (2703, 2720), False, 'import torch\n'), ((2834,... |
"""Tools used by the examples """
import numpy as np
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+"/../meep_tomo")
from meep_tomo import extract, common
import ex_bpg
def compute_metrices(tomo_path, approx, autofocus=False):
"""Compute RMS and TV metrices for a MEEP-simulat... | [
"os.path.abspath",
"numpy.load",
"numpy.abs",
"os.path.isdir",
"os.path.exists",
"meep_tomo.extract.get_tomo_ri_structure",
"meep_tomo.common.mkdir_p",
"ex_bpg.backpropagate_fdtd_data",
"numpy.arange",
"os.path.join",
"numpy.gradient",
"numpy.sqrt"
] | [((1119, 1145), 'os.path.abspath', 'os.path.abspath', (['tomo_path'], {}), '(tomo_path)\n', (1134, 1145), False, 'import os\n'), ((1154, 1178), 'os.path.isdir', 'os.path.isdir', (['tomo_path'], {}), '(tomo_path)\n', (1167, 1178), False, 'import os\n'), ((1989, 2014), 'os.path.exists', 'os.path.exists', (['metr_file'], ... |
import numpy as np
def apply_cross_fade(clips, cross_fade_ms, sr):
"""Concatenate audio clips with a cross fade."""
num_clips = len(clips)
cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000))
fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples
# if not is_even(cross_fade_sam... | [
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"numpy.floor",
"numpy.zeros",
"numpy.iinfo",
"numpy.arange"
] | [((579, 600), 'numpy.zeros', 'np.zeros', (['num_samples'], {}), '(num_samples)\n', (587, 600), True, 'import numpy as np\n'), ((1363, 1400), 'matplotlib.pyplot.plot', 'plt.plot', (['time_x', 'x'], {'label': '"""Original"""'}), "(time_x, x, label='Original')\n", (1371, 1400), True, 'import matplotlib.pyplot as plt\n'), ... |
import numpy as np
import cv2
import os
from conv import *
import multiprocessing
from multiprocessing import Pool
from itertools import product
from numba import njit
from functools import partial
import math
import sklearn
from sklearn import linear_model
def load_images_from_folder(folder):
image... | [
"functools.partial",
"numpy.asarray",
"numpy.zeros",
"sklearn.linear_model.LogisticRegression",
"numpy.max",
"numpy.array",
"multiprocessing.Pool",
"numpy.random.rand",
"multiprocessing.Process",
"os.path.join",
"os.listdir",
"cv2.resize",
"numpy.sqrt"
] | [((348, 366), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (358, 366), False, 'import os\n'), ((593, 611), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (603, 611), False, 'import os\n'), ((1960, 1967), 'multiprocessing.Pool', 'Pool', (['(4)'], {}), '(4)\n', (1964, 1967), False, 'from multi... |
import unittest
import numpy as np
from src.square_matrix_multiply import square_matrix_multiply
class TestStrassenMultiply(unittest.TestCase):
def test_square_1(self):
matrix_a = np.array([[1, 3],
[7, 5]])
matrix_b = np.array([[6, 8],
[4... | [
"numpy.array",
"src.square_matrix_multiply.square_matrix_multiply"
] | [((196, 222), 'numpy.array', 'np.array', (['[[1, 3], [7, 5]]'], {}), '([[1, 3], [7, 5]])\n', (204, 222), True, 'import numpy as np\n'), ((271, 297), 'numpy.array', 'np.array', (['[[6, 8], [4, 2]]'], {}), '([[6, 8], [4, 2]])\n', (279, 297), True, 'import numpy as np\n'), ((347, 377), 'numpy.array', 'np.array', (['[[18, ... |
import matplotlib.pyplot as plt
import numpy as np
from numpy.lib.function_base import angle
radius = 100 # curvature radius of the mirror in mm (must be positive)
angle_d = 30 # maximum angle of incidence of the incident beam in degrees
num_rays = 21 # number of rays
source_pos = 80 # source position in mm (mu... | [
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.ylim",
"numpy.isnan",
"matplotlib.pyplot.figure",
"numpy.sin",
"numpy.array",
"numpy.tan",
"numpy.linspace",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"n... | [((399, 433), 'numpy.linspace', 'np.linspace', (['(-radius)', 'radius', '(1000)'], {}), '(-radius, radius, 1000)\n', (410, 433), True, 'import numpy as np\n'), ((1428, 1455), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(13, 8)'}), '(figsize=(13, 8))\n', (1438, 1455), True, 'import matplotlib.pyplot as p... |
"""
Tests for the loading of surface maps for the GPROF-NN data processing.
"""
from datetime import datetime
import pytest
import numpy as np
from gprof_nn.data.surface import (read_land_mask,
read_autosnow,
read_emissivity_classes)
from gprof_nn.... | [
"gprof_nn.data.surface.read_land_mask",
"gprof_nn.data.surface.read_emissivity_classes",
"gprof_nn.data.surface.read_autosnow",
"numpy.isclose",
"pytest.mark.skipif",
"gprof_nn.data.preprocessor.has_preprocessor",
"numpy.all"
] | [((383, 401), 'gprof_nn.data.preprocessor.has_preprocessor', 'has_preprocessor', ([], {}), '()\n', (399, 401), False, 'from gprof_nn.data.preprocessor import has_preprocessor\n'), ((405, 477), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not HAS_PREPROCESSOR)'], {'reason': '"""Preprocessor missing."""'}), "(not HAS_... |
""" gyrodata.py
Run one motor with a sinusoidal speed input and an attached gyro.
This example shows how use the gyro to measure angular position and velocity
by attaching it to the motor shaft.
Setup:
Connect one large motor to port 'A'
Connect the gyro sensor to port number 1.
Notes:
1. Remember ther... | [
"pyev3.brick.LegoEV3",
"time.perf_counter",
"pyev3.utils.plot_line",
"numpy.sin",
"pyev3.devices.Motor",
"pyev3.devices.Gyro",
"numpy.gradient"
] | [((1049, 1058), 'pyev3.brick.LegoEV3', 'LegoEV3', ([], {}), '()\n', (1056, 1058), False, 'from pyev3.brick import LegoEV3\n'), ((1067, 1087), 'pyev3.devices.Motor', 'Motor', (['ev3'], {'port': '"""A"""'}), "(ev3, port='A')\n", (1072, 1087), False, 'from pyev3.devices import Gyro, Motor\n'), ((1095, 1139), 'pyev3.device... |
import numpy as np
import pytest
from numpy.testing import assert_allclose
from pybraw import _pybraw, verify
class CapturingCallback(_pybraw.BlackmagicRawCallback):
def ReadComplete(self, job, result, frame):
self.frame = frame
def ProcessComplete(self, job, result, processed_image):
self.pr... | [
"numpy.testing.assert_allclose",
"numpy.transpose",
"pybraw._pybraw.VariantCreateU32",
"numpy.array",
"pytest.mark.parametrize"
] | [((701, 1005), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""format,max_val,is_planar,channels"""', '[(_pybraw.blackmagicRawResourceFormatBGRAU8, 2 ** 8, False, [2, 1, 0, 3]),\n (_pybraw.blackmagicRawResourceFormatRGBF32Planar, 1, True, [0, 1, 2]),\n (_pybraw.blackmagicRawResourceFormatRGBU16Planar,... |
import math
import time
import pickle
import sys
import os
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from datasets.data_utils import project_image_to_rect, compute_box_3d
def adjust_coord_for_view(points):
return points[:, [2, 0, 1]] * np.array([1, -1, -1])
def... | [
"datasets.data_utils.compute_box_3d",
"matplotlib.pyplot.show",
"matplotlib.pyplot.figure",
"numpy.arange",
"datasets.data_utils.project_image_to_rect",
"numpy.array"
] | [((872, 915), 'datasets.data_utils.compute_box_3d', 'compute_box_3d', (['center', 'dimension', 'angle', 'P'], {}), '(center, dimension, angle, P)\n', (886, 915), False, 'from datasets.data_utils import project_image_to_rect, compute_box_3d\n'), ((934, 955), 'numpy.arange', 'np.arange', (['(0)', '(70)', '(0.1)'], {}), '... |
import numpy as np
def integrate_displacement(displ_img_to_img):
"""Sum the image-to-image displacement value to
obtain image-to-reference displacement,
add zeros at the begining
Parameters
----------
displ_img_to_img : 3D array
3D array of shape `(nbr images - 1, nbr points, 2)`
... | [
"numpy.stack",
"numpy.full",
"numpy.zeros_like",
"numpy.ones_like",
"numpy.linalg.lstsq",
"numpy.transpose",
"numpy.ones",
"numpy.einsum",
"numpy.isnan",
"numpy.cumsum",
"numpy.vstack",
"numpy.linspace",
"numpy.matmul",
"numpy.eye",
"numpy.concatenate"
] | [((516, 565), 'numpy.concatenate', 'np.concatenate', (['[zeros, displ_img_to_img]'], {'axis': '(0)'}), '([zeros, displ_img_to_img], axis=0)\n', (530, 565), True, 'import numpy as np\n'), ((592, 621), 'numpy.cumsum', 'np.cumsum', (['displ_zero'], {'axis': '(0)'}), '(displ_zero, axis=0)\n', (601, 621), True, 'import nump... |
import os
import sys
from datetime import datetime, timedelta
import numpy as np
data_path = "../../dat4figs_JAMES/Fig06"
os.makedirs( data_path, exist_ok=True )
USE_ARCH_DAT = True
#USE_ARCH_DAT = False
quick_hist = False
quick_bar = True
quick_bar = False
def d4_computation_time_nparray( top='' ):
dirs = [ ... | [
"numpy.load",
"numpy.argmax",
"matplotlib.pyplot.clf",
"numpy.isnan",
"os.path.isfile",
"numpy.mean",
"numpy.arange",
"os.path.join",
"numpy.nanmean",
"numpy.copy",
"numpy.std",
"matplotlib.pyplot.close",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show",
"numpy.savez",
"os.scandi... | [((124, 161), 'os.makedirs', 'os.makedirs', (['data_path'], {'exist_ok': '(True)'}), '(data_path, exist_ok=True)\n', (135, 161), False, 'import os\n'), ((1638, 1655), 'numpy.array', 'np.array', (['scale_l'], {}), '(scale_l)\n', (1646, 1655), True, 'import numpy as np\n'), ((2270, 2293), 'numpy.zeros', 'np.zeros', (['sc... |
#!/usr/bin/env python3
from __future__ import print_function
import sys
sys.path.append('./method')
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pints
import pints.io
import pints.plot
import model as m
import parametertransform
import priors
"""
Run fit.... | [
"numpy.random.seed",
"pints.GaussianLogLikelihood",
"sys.path.append",
"numpy.copy",
"numpy.std",
"matplotlib.pyplot.close",
"numpy.append",
"numpy.loadtxt",
"pints.LogPosterior",
"pints.io.save_samples",
"pints.plot.pairwise",
"importlib.import_module",
"pints.UniformLogPrior",
"os.path.b... | [((72, 99), 'sys.path.append', 'sys.path.append', (['"""./method"""'], {}), "('./method')\n", (87, 99), False, 'import sys\n'), ((147, 168), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (161, 168), False, 'import matplotlib\n'), ((672, 708), 'sys.path.append', 'sys.path.append', (['"""./mmt-mod... |
import tensorflow as tf
import numpy as np
input = tf.placeholder(dtype=tf.float32,shape=[5,5,3])
filter = tf.constant(value=1, shape=[3,3,3,5], dtype=tf.float32)
conv0 = tf.nn.atrous_conv2d(input,filters=filter,rate=2,padding='VALID')
with tf.Session() as sess:
img = np.array([3,5,5,3])
out = sess.run(conv0,... | [
"tensorflow.Session",
"tensorflow.constant",
"tensorflow.nn.atrous_conv2d",
"tensorflow.placeholder",
"numpy.array"
] | [((52, 101), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': 'tf.float32', 'shape': '[5, 5, 3]'}), '(dtype=tf.float32, shape=[5, 5, 3])\n', (66, 101), True, 'import tensorflow as tf\n'), ((108, 166), 'tensorflow.constant', 'tf.constant', ([], {'value': '(1)', 'shape': '[3, 3, 3, 5]', 'dtype': 'tf.float32'}),... |
#!/usr/bin/env python3
import numpy as np
import matplotlib
#matplotlib.use('Agg')
import matplotlib.pyplot as plt
import sys, os, glob
import re
# Output data format
from configurations import *
design_pt_to_plot=2
#################################################################################
#### Try to figure... | [
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.yscale",
"matplotlib.pyplot.show",
"numpy.ceil",
"matplotlib.pyplot.ylim",
"numpy.dtype",
"matplotlib.pyplot.figure",
"glob.glob",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.errorb... | [((4351, 4373), 'glob.glob', 'glob.glob', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (4360, 4373), False, 'import sys, os, glob\n'), ((2720, 2772), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(2 * nb_of_cols, 2 * nb_of_rows)'}), '(figsize=(2 * nb_of_cols, 2 * nb_of_rows))\n', (2730, 2772), True, 'import ... |
from __future__ import division
import random
import pprint
import sys
import time
import numpy as np
from optparse import OptionParser
import pickle
import os
import re
import shutil
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.keras.optimizers import Adam, SGD
from tensorflow.ker... | [
"pickle.dump",
"optparse.OptionParser",
"tensorflow.keras.metrics.CategoricalAccuracy",
"tensorflow.python.ops.numpy_ops.np_config.enable_numpy_behavior",
"tensorflow.keras.optimizers.SGD",
"numpy.random.randint",
"numpy.mean",
"pprint.pprint",
"keras_frcnn.losses.RpnClassificationLoss",
"keras_fr... | [((578, 606), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(40000)'], {}), '(40000)\n', (599, 606), False, 'import sys\n'), ((663, 696), 'tensorflow.python.ops.numpy_ops.np_config.enable_numpy_behavior', 'np_config.enable_numpy_behavior', ([], {}), '()\n', (694, 696), False, 'from tensorflow.python.ops.numpy_op... |
# Copyright (C) 2017-2019 <NAME>
# SPDX-License-Identifier: Apache-2.0
import dolfin
import numpy
from ocellaris import Simulation, setup_simulation
import pytest
from helpers import skip_in_parallel
ISO_INPUT = """
ocellaris:
type: input
version: 1.0
mesh:
type: Rectangle
Nx: 4
Ny: 4
probes:
- ... | [
"ocellaris.Simulation",
"numpy.arctan2",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"pytest.skip",
"dolfin.plot",
"matplotlib.pyplot.colorbar",
"numpy.diff",
"pytest.mark.parametrize",
"ocellaris.setup_simulation",
"dolfin.cells",
"matplotlib.pyplot.savefig"
] | [((772, 816), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""degree"""', '[0, 1, 2]'], {}), "('degree', [0, 1, 2])\n", (795, 816), False, 'import pytest\n'), ((1936, 1974), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""degree"""', '[1]'], {}), "('degree', [1])\n", (1959, 1974), False, 'import... |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import numpy as np
def load_octmi_dat(acquisitionName, basePath="."):
# Vérification de l'existence du fichier
datFilePath = os.path.join(os.path.normpath(basePath), acquisitionName + "_MI.dat")
if not os.path.exists(datFilePath):
print("Co... | [
"numpy.zeros",
"os.path.exists",
"os.path.normpath"
] | [((206, 232), 'os.path.normpath', 'os.path.normpath', (['basePath'], {}), '(basePath)\n', (222, 232), False, 'import os\n'), ((274, 301), 'os.path.exists', 'os.path.exists', (['datFilePath'], {}), '(datFilePath)\n', (288, 301), False, 'import os\n'), ((951, 965), 'numpy.zeros', 'np.zeros', (['nval'], {}), '(nval)\n', (... |
#!\usr\bin\python
# coding=utf-8
# Author: youngfeng
# Update: 07/16/2018
"""
Flash, proposed by Nair et al. (arXiv '18), which aims to find the (near) optimal configuration in unevaluated set.
STEP 1: select 80%% of original data as dataset
STEP 2: split the dataset into training set (30 configs) and unevaluated set ... | [
"pandas.read_csv",
"random.shuffle",
"numpy.min",
"sklearn.tree.DecisionTreeRegressor"
] | [((1632, 1655), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', ([], {}), '()\n', (1653, 1655), False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((2411, 2447), 'numpy.min', 'np.min', (['[sf[0] for sf in select_few]'], {}), '([sf[0] for sf in select_few])\n', (2417, 2447), True, 'import numpy a... |
"""
Author: <NAME>
GitHub: wafflescore
"""
from minisom import MiniSom, asymptotic_decay
import numpy as np
import matplotlib.pyplot as plt
import itertools
from skimage import measure
from skimage.segmentation import random_walker
from skimage import filters
from scipy.spatial import distance
from collections impor... | [
"random.sample",
"matplotlib.pyplot.cm.rainbow",
"acse_9_irp_wafflescore.MiscHelpers.ext_eval_entropy",
"numpy.shape",
"numpy.argpartition",
"matplotlib.pyplot.figure",
"skimage.measure.label",
"numpy.arange",
"numpy.unique",
"numpy.full",
"matplotlib.pyplot.colorbar",
"random.seed",
"numpy.... | [((468, 582), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s | %(levelname)s : %(message)s"""', 'level': 'logging.INFO', 'stream': 'sys.stdout'}), "(format='%(asctime)s | %(levelname)s : %(message)s',\n level=logging.INFO, stream=sys.stdout)\n", (487, 582), False, 'import logging\n'), ... |
import numpy as np
class DecisionTreeClassifierTranspiler(object):
def __init__(self, model):
self.model = model
self.build_classes()
self.build_feature_idx()
self.build_right_nodes()
self.build_thresholds()
def build_feature_idx(self):
self.features_idx = ','... | [
"numpy.argmax"
] | [((488, 516), 'numpy.argmax', 'np.argmax', (['class_aux'], {'axis': '(1)'}), '(class_aux, axis=1)\n', (497, 516), True, 'import numpy as np\n')] |
import numpy as np
import pickle as pkl
def function_generator(init_num):
seq = np.array([], dtype='int')
n = init_num
seq = np.append(seq, n)
while True:
if ((n%2)==0):
next_number = n/2
next_number = np.asarray(next_number, dtype='int')
seq = np.append(seq... | [
"numpy.append",
"numpy.asarray",
"numpy.array"
] | [((1092, 1119), 'numpy.asarray', 'np.asarray', (['output_seq_data'], {}), '(output_seq_data)\n', (1102, 1119), True, 'import numpy as np\n'), ((1130, 1149), 'numpy.asarray', 'np.asarray', (['x_train'], {}), '(x_train)\n', (1140, 1149), True, 'import numpy as np\n'), ((1160, 1179), 'numpy.asarray', 'np.asarray', (['y_tr... |
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by <NAME> (<EMAIL>)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import divis... | [
"torch.distributed.is_initialized",
"numpy.argmax",
"torch.distributed.get_rank",
"os.path.basename",
"logging.StreamHandler",
"numpy.zeros",
"time.strftime",
"pathlib.Path",
"torch.optim.Adam",
"torch.distributed.get_world_size",
"numpy.bincount",
"logging.getLogger",
"torch.optim.SGD"
] | [((491, 518), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (508, 518), False, 'import logging\n'), ((1111, 1145), 'torch.distributed.get_world_size', 'torch.distributed.get_world_size', ([], {}), '()\n', (1143, 1145), False, 'import torch\n'), ((1239, 1267), 'torch.distributed.get_rank'... |
import numpy as np
from openmdao.api import CaseReader
from optigurator.utils import recording_filename
def get_case_reader(data_dir, problem_constants):
return CaseReader(recording_filename(data_dir, problem_constants.id))
def generate_valid_points(problem_constants, crm):
for (i, case_id) in enumerate(cr... | [
"numpy.matrix",
"numpy.size",
"numpy.argmax",
"numpy.zeros",
"numpy.array",
"optigurator.utils.recording_filename",
"numpy.delete"
] | [((3474, 3496), 'numpy.size', 'np.size', (['Pareto_points'], {}), '(Pareto_points)\n', (3481, 3496), True, 'import numpy as np\n'), ((3519, 3544), 'numpy.size', 'np.size', (['Pareto_points', '(0)'], {}), '(Pareto_points, 0)\n', (3526, 3544), True, 'import numpy as np\n'), ((3701, 3731), 'numpy.delete', 'np.delete', (['... |
import os
from typing import Dict
from abc import ABC
from easy_sdm.data import ShapefileRegion
import geopandas as gpd
import numpy as np
import pandas as pd
import requests
from easy_sdm.configs import configs
from easy_sdm.utils import logger
from typing import Dict, Optional
from pathlib import Path
class GBIFOc... | [
"pandas.DataFrame",
"pandas.notnull",
"easy_sdm.utils.logger.logging.info",
"geopandas.points_from_xy",
"numpy.array",
"requests.get"
] | [((1336, 1378), 'requests.get', 'requests.get', (['self.base_url'], {'params': 'params'}), '(self.base_url, params=params)\n', (1348, 1378), False, 'import requests\n'), ((1460, 1567), 'easy_sdm.utils.logger.logging.info', 'logger.logging.info', (['f"""API call failed at offset {offset} with a status code of {r.status_... |
from __future__ import division
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.fpn import Head
from chainercv.links.model.fpn import head_loss_post
from chainercv.links.model.fpn import head_loss_pre
def _random_array(xp, sh... | [
"numpy.random.uniform",
"chainercv.links.model.fpn.head_loss_pre",
"chainercv.links.model.fpn.Head",
"chainer.testing.parameterize",
"chainercv.links.model.fpn.head_loss_post",
"chainer.testing.run_module"
] | [((414, 499), 'chainer.testing.parameterize', 'testing.parameterize', (["{'n_class': 1 + 1}", "{'n_class': 5 + 1}", "{'n_class': 20 + 1}"], {}), "({'n_class': 1 + 1}, {'n_class': 5 + 1}, {'n_class': 20 +\n 1})\n", (434, 499), False, 'from chainer import testing\n'), ((8017, 8055), 'chainer.testing.run_module', 'test... |
#!/usr/bin/env python
from itertools import izip
import numpy as np
import h5py
from progress.bar import Bar
import sys
import rospy
import rosbag
from sensor_msgs.msg import Imu, Image
def main():
if len(sys.argv) < 2:
print("Usage: {} dataset_name".format(sys.argv[0]))
exit(1)
file_name = sys.argv[1]
... | [
"sensor_msgs.msg.Image",
"numpy.transpose",
"sensor_msgs.msg.Imu",
"rospy.Time.from_sec",
"itertools.izip"
] | [((474, 546), 'itertools.izip', 'izip', (["log_file['times']", "log_file['fiber_accel']", "log_file['fiber_gyro']"], {}), "(log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro'])\n", (478, 546), False, 'from itertools import izip\n'), ((737, 744), 'sensor_msgs.msg.Image', 'Image', ([], {}), '()\n', (742, 7... |
"""
Obtain the single photoelectron response for an SiPM. Can be used as an input
to sim_telarray after normalisation with Konrads script
"""
import argparse
from argparse import ArgumentDefaultsHelpFormatter as Formatter
import numpy as np
from scipy.special import binom
from scipy.stats import norm
from IPython impor... | [
"scipy.special.binom",
"argparse.ArgumentParser",
"os.makedirs",
"numpy.power",
"os.path.exists",
"scipy.stats.norm.pdf",
"numpy.arange",
"numpy.linspace",
"numpy.column_stack",
"matplotlib.pyplot.semilogy",
"os.path.join",
"matplotlib.pyplot.savefig",
"numpy.sqrt"
] | [((1223, 1249), 'numpy.power', 'np.power', (['(1 - pap)', 'N[:, 0]'], {}), '(1 - pap, N[:, 0])\n', (1231, 1249), True, 'import numpy as np\n'), ((1321, 1348), 'numpy.sqrt', 'np.sqrt', (['(K * spe_sigma ** 2)'], {}), '(K * spe_sigma ** 2)\n', (1328, 1348), True, 'import numpy as np\n'), ((1364, 1385), 'numpy.sqrt', 'np.... |
import matplotlib.pyplot as plt
import numpy as np
nus_lpf,mu_lpf=np.load("clpf.npz",allow_pickle=True)["arr_0"]
nus_modit,mu_modit=np.load("cmodit4500.npz",allow_pickle=True)["arr_0"]
fig=plt.figure(figsize=(8,4))
plt.plot(nus_modit,mu_modit,label="MODIT",color="C1")
plt.plot(nus_lpf,mu_lpf,label="DIRECT",ls="dashe... | [
"numpy.load",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] | [((192, 218), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 4)'}), '(figsize=(8, 4))\n', (202, 218), True, 'import matplotlib.pyplot as plt\n'), ((218, 274), 'matplotlib.pyplot.plot', 'plt.plot', (['nus_modit', 'mu_modit'], {'label': '"""MODIT"""', 'color': '"""C1"""'}), "(nus_modit, mu_modit, label='... |
import numpy as np
from ligo.skymap import kde
import matplotlib
matplotlib.use('Agg')
from matplotlib.colors import to_rgb
from matplotlib import pyplot as plt
from mpl_toolkits.basemap import Basemap
#matplotlib.rc('text', usetex=True)
def greedy(density):
i,j = np.shape(density)
idx = np.argsort(density.fla... | [
"numpy.zeros",
"matplotlib.colors.to_rgb",
"numpy.shape",
"numpy.append",
"numpy.indices",
"matplotlib.use",
"numpy.sin",
"numpy.mean",
"numpy.cos",
"numpy.arange",
"mpl_toolkits.basemap.Basemap"
] | [((65, 86), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (79, 86), False, 'import matplotlib\n'), ((270, 287), 'numpy.shape', 'np.shape', (['density'], {}), '(density)\n', (278, 287), True, 'import numpy as np\n'), ((396, 413), 'numpy.append', 'np.append', (['c', '(1.0)'], {}), '(c, 1.0)\n', (4... |
import numpy as np
def align_depth_to_rgb(
depth,
bgr_cameramodel,
depth_cameramodel,
depth_to_rgb_transform):
"""Align depth image to color image.
Parameters
----------
depth : numpy.ndarray
depth image in meter order.
bgr_cameramodel : cameramodels.Pinhol... | [
"numpy.zeros",
"numpy.isnan",
"numpy.where",
"numpy.array",
"numpy.matmul"
] | [((780, 855), 'numpy.zeros', 'np.zeros', (['(bgr_cameramodel.height, bgr_cameramodel.width)'], {'dtype': 'np.float32'}), '((bgr_cameramodel.height, bgr_cameramodel.width), dtype=np.float32)\n', (788, 855), True, 'import numpy as np\n'), ((893, 908), 'numpy.isnan', 'np.isnan', (['depth'], {}), '(depth)\n', (901, 908), T... |
import numpy as np
import torch
def compute_lid(x, x_train, k, exclude_self=False):
"""
Calculate LID using the estimation from [1]
[1] Ma et al., "Characterizing Adversarial Subspaces Using
Local Intrinsic Dimensionality," ICLR 2018.
"""
with torch.no_grad():
x = x.view((x.size(... | [
"torch.no_grad",
"numpy.zeros",
"numpy.ceil",
"torch.log"
] | [((2835, 2858), 'numpy.zeros', 'np.zeros', (['(batch_size,)'], {}), '((batch_size,))\n', (2843, 2858), True, 'import numpy as np\n'), ((276, 291), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (289, 291), False, 'import torch\n'), ((1899, 1930), 'numpy.ceil', 'np.ceil', (['(num_total / batch_size)'], {}), '(num_t... |
#imports
import haversine as hs
import pandas as pd
import numpy as np
import random
import time
from concurrent import futures
import grpc
import databroker_pb2_grpc
import databroker_pb2
port = 8061
class Databroker(databroker_pb2_grpc.DatabrokerServicer):
def __init__(self):
self.current_row = 0
... | [
"pandas.DataFrame",
"pandas.read_csv",
"haversine.haversine",
"time.sleep",
"numpy.rad2deg",
"databroker_pb2.Features",
"concurrent.futures.ThreadPoolExecutor",
"numpy.ndarray.tobytes"
] | [((3015, 3057), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(10)'}), '(max_workers=10)\n', (3041, 3057), False, 'from concurrent import futures\n'), ((372, 409), 'pandas.read_csv', 'pd.read_csv', (['"""./data/no2_testset.csv"""'], {}), "('./data/no2_testset.csv')\n", (38... |
''' imports '''
# filesystem management
import os
# tensors and nn modules
import torch
# array handling
import numpy as np
# midi file import and parse
from mido import MidiFile
class MelodyDataset(torch.utils.data.Dataset):
''' dataset class for midi files '''
def __init__(self, dir_path: str, cache... | [
"numpy.pad",
"numpy.zeros",
"numpy.where",
"os.path.join",
"os.listdir",
"numpy.concatenate"
] | [((5349, 5369), 'numpy.zeros', 'np.zeros', (['M.shape[1]'], {}), '(M.shape[1])\n', (5357, 5369), True, 'import numpy as np\n'), ((5430, 5446), 'numpy.where', 'np.where', (['(M != 0)'], {}), '(M != 0)\n', (5438, 5446), True, 'import numpy as np\n'), ((3972, 4001), 'numpy.zeros', 'np.zeros', (['(128)'], {'dtype': 'np.int... |
import cv2
import math
import imutils
import numpy as np
import warnings
from sklearn.cluster import KMeans
from skimage.morphology import *
from skimage.util import *
class OD_CV:
def loadImage(self, filepath):
return cv2.imread(filepath)
def resizeImage(self, image, kar, width, height... | [
"cv2.GaussianBlur",
"numpy.argmax",
"cv2.getPerspectiveTransform",
"cv2.arcLength",
"cv2.approxPolyDP",
"numpy.ones",
"numpy.argmin",
"numpy.histogram",
"cv2.boxPoints",
"cv2.rectangle",
"imutils.resize",
"cv2.erode",
"cv2.minAreaRect",
"numpy.unique",
"cv2.warpPerspective",
"numpy.zer... | [((245, 265), 'cv2.imread', 'cv2.imread', (['filepath'], {}), '(filepath)\n', (255, 265), False, 'import cv2\n'), ((519, 554), 'numpy.zeros', 'np.zeros', (['image.shape[:2]', 'np.uint8'], {}), '(image.shape[:2], np.uint8)\n', (527, 554), True, 'import numpy as np\n'), ((571, 625), 'cv2.drawContours', 'cv2.drawContours'... |
import glob
import cv2
import numpy as np
def globimgs(path, globs:list):
"""returns a list of files with path with globing with more than one extensions"""
imgs = []
for i in globs:
imgs.extend(glob.glob(path + i))
paths = []
for path in imgs:
paths.append(path.replace("\\", "/"))
return paths
def scan... | [
"cv2.medianBlur",
"cv2.threshold",
"numpy.ones",
"glob.glob",
"cv2.normalize",
"cv2.absdiff"
] | [((408, 439), 'cv2.medianBlur', 'cv2.medianBlur', (['dilated_img', '(15)'], {}), '(dilated_img, 15)\n', (422, 439), False, 'import cv2\n'), ((522, 625), 'cv2.normalize', 'cv2.normalize', (['diff_img', 'norm_img'], {'alpha': '(0)', 'beta': '(255)', 'norm_type': 'cv2.NORM_MINMAX', 'dtype': 'cv2.CV_8UC1'}), '(diff_img, no... |
#!/usr/bin/env python
import getopt, sys, os
import numpy as np
import pyfits
from pylab import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid.inset_locator import mark_inset
#fname_ext = '/home/nbarbey/data/csh/output/ngc6946_c... | [
"pyfits.fitsopen",
"matplotlib.pyplot.show",
"mpl_toolkits.axes_grid.inset_locator.mark_inset",
"matplotlib.pyplot.yticks",
"numpy.isnan",
"matplotlib.pyplot.figure",
"mpl_toolkits.axes_grid.inset_locator.zoomed_inset_axes",
"matplotlib.pyplot.xticks"
] | [((544, 565), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)', '[5, 4]'], {}), '(1, [5, 4])\n', (554, 565), True, 'import matplotlib.pyplot as plt\n'), ((842, 873), 'mpl_toolkits.axes_grid.inset_locator.zoomed_inset_axes', 'zoomed_inset_axes', (['ax', '(2)'], {'loc': '(3)'}), '(ax, 2, loc=3)\n', (859, 873), False, 'f... |
# rough copy of https://github.com/geohot/tinygrad/blob/master/examples/mnist_gan.py
from simplegrad import Tensor, Device, Adam
import numpy as np
import itertools as it
from torchvision.utils import make_grid, save_image
import torch
from abc import abstractmethod
import os
def leakyrelu(x, neg_slope=0.2):
retu... | [
"numpy.random.uniform",
"os.stat",
"numpy.random.randn",
"os.rename",
"tempfile.gettempdir",
"numpy.zeros",
"gzip.decompress",
"numpy.prod",
"os.path.isfile",
"numpy.random.randint",
"requests.get",
"simplegrad.Tensor",
"os.path.join",
"simplegrad.Adam",
"torch.tensor"
] | [((1961, 1987), 'os.rename', 'os.rename', (["(fp + '.tmp')", 'fp'], {}), "(fp + '.tmp', fp)\n", (1970, 1987), False, 'import requests, tempfile, os\n'), ((2523, 2578), 'simplegrad.Adam', 'Adam', (['generator.params'], {'learning_rate': '(0.0002)', 'beta1': '(0.5)'}), '(generator.params, learning_rate=0.0002, beta1=0.5)... |
import numpy as np
import torch
import math
import ray
import copy
import networks
import global_config
def play_one_game(model, env_func, config, temperature, save=False, filename = ''):
game_history = GameHistory()
game = env_func(max_steps = config.max_moves, window_size = config.observation_shape[1])
... | [
"ray.remote",
"torch.tensor",
"math.sqrt",
"numpy.argmax",
"torch.exp",
"numpy.random.choice",
"math.log",
"torch.no_grad",
"networks.support_to_scalar"
] | [((7362, 7387), 'ray.remote', 'ray.remote', (['play_one_game'], {}), '(play_one_game)\n', (7372, 7387), False, 'import ray\n'), ((518, 533), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (531, 533), False, 'import torch\n'), ((1616, 1639), 'numpy.argmax', 'np.argmax', (['visit_counts'], {}), '(visit_counts)\n', (... |
import numpy
import sys
import scipy
from scipy.signal import find_peaks_cwt
import matplotlib.pyplot as plt
from headbang.params import DEFAULTS
from headbang.util import find_closest
openpose_install_path = "/home/sevagh/thirdparty-repos/openpose"
openpose_dir = openpose_install_path
sys.path.append(openpose_dir + ... | [
"matplotlib.pyplot.title",
"numpy.abs",
"numpy.nan_to_num",
"numpy.isnan",
"matplotlib.pyplot.figure",
"numpy.arange",
"sys.path.append",
"pyopenpose.WrapperPython",
"matplotlib.pyplot.show",
"numpy.median",
"numpy.asarray",
"pyopenpose.VectorDatum",
"matplotlib.pyplot.ylabel",
"matplotlib... | [((289, 345), 'sys.path.append', 'sys.path.append', (["(openpose_dir + '/build/python/openpose')"], {}), "(openpose_dir + '/build/python/openpose')\n", (304, 345), False, 'import sys\n'), ((1059, 1077), 'pyopenpose.WrapperPython', 'op.WrapperPython', ([], {}), '()\n', (1075, 1077), True, 'import pyopenpose as op\n'), (... |
import sys
cmd_folder = "../../../vis" # nopep8
if cmd_folder not in sys.path: # nopep8
sys.path.insert(0, cmd_folder)
from tile_mov import tile_movie
from make_mov import make_all, get_particle_trajectories
import pylab as plt
import numpy as np
from mpl_toolkits.axes_grid1 import make_axes_locatable
import matp... | [
"pylab.close",
"scipy.signal.savgol_filter",
"numpy.meshgrid",
"pylab.rcParams.update",
"numpy.zeros",
"sys.path.insert",
"make_mov.make_all",
"numpy.rot90",
"pylab.figure",
"matplotlib.gridspec.GridSpec",
"numpy.concatenate",
"numpy.sqrt"
] | [((349, 458), 'pylab.rcParams.update', 'plt.rcParams.update', (["{'text.usetex': True, 'font.family': 'sans-serif', 'font.sans-serif': [\n 'Helvetica']}"], {}), "({'text.usetex': True, 'font.family': 'sans-serif',\n 'font.sans-serif': ['Helvetica']})\n", (368, 458), True, 'import pylab as plt\n'), ((94, 124), 'sy... |
# Copyright 2020 Yalfoosh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softw... | [
"numpy.argmax",
"numpy.square",
"numpy.argmin",
"numpy.around",
"numpy.mean",
"numpy.tile",
"numpy.reshape",
"numpy.eye",
"numpy.delete",
"numpy.vstack",
"numpy.sqrt"
] | [((7922, 7943), 'numpy.reshape', 'np.reshape', (['start', '(-1)'], {}), '(start, -1)\n', (7932, 7943), True, 'import numpy as np\n'), ((8803, 8843), 'numpy.tile', 'np.tile', (['start'], {'reps': '(start.shape[0], 1)'}), '(start, reps=(start.shape[0], 1))\n', (8810, 8843), True, 'import numpy as np\n'), ((8911, 8937), '... |
import matplotlib.pyplot as plt
from multiprocessing import Pool, Manager, cpu_count
from functools import partial
import numpy as np
from bs4 import BeautifulSoup
from colour import Color
import copy
import math
import re
import time
from consts import QWERTY, THUMBS, COORDS
CACHE = {}
def cleanhtml(raw_html):
... | [
"colour.Color",
"functools.partial",
"copy.deepcopy",
"math.sqrt",
"multiprocessing.Manager",
"time.time",
"numpy.arange",
"multiprocessing.Pool",
"bs4.BeautifulSoup",
"matplotlib.pyplot.subplots",
"re.sub",
"multiprocessing.cpu_count"
] | [((328, 359), 'bs4.BeautifulSoup', 'BeautifulSoup', (['raw_html', '"""lxml"""'], {}), "(raw_html, 'lxml')\n", (341, 359), False, 'from bs4 import BeautifulSoup\n'), ((526, 558), 're.sub', 're.sub', (['"""[^а-я]+"""', '""""""', 'lowercase'], {}), "('[^а-я]+', '', lowercase)\n", (532, 558), False, 'import re\n'), ((1829,... |
import numpy as np
import pandas as pd
def get_bootstrap_indices(data, cluster_by=None, seed=None, n_draws=1000):
"""Draw positional indices for the construction of bootstrap samples.
Storing the positional indices instead of the full bootstrap samples saves a lot
of memory for datasets with many variabl... | [
"numpy.random.randint",
"numpy.random.seed"
] | [((646, 666), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (660, 666), True, 'import numpy as np\n'), ((750, 800), 'numpy.random.randint', 'np.random.randint', (['(0)', 'n_obs'], {'size': '(n_draws, n_obs)'}), '(0, n_obs, size=(n_draws, n_obs))\n', (767, 800), True, 'import numpy as np\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.