code stringlengths 31 1.05M | apis list | extract_api stringlengths 97 1.91M |
|---|---|---|
import cv2, sys, os
import numpy as np
haar_file = 'haarcascade_frontalface_default.xml'
datasets = 'datasets'
print('Recognizing Face Please Be in sufficient Lights...')
(images, lables, names, id) = ([], [], {}, 0)
for (subdirs, dirs, files) in os.walk(datasets):
for subdir in dirs:
names[id] = subdir
su... | [
"cv2.face.LBPHFaceRecognizer_create",
"os.path.join",
"cv2.putText",
"cv2.cvtColor",
"cv2.waitKey",
"cv2.destroyAllWindows",
"os.walk",
"cv2.VideoCapture",
"cv2.rectangle",
"cv2.imread",
"numpy.array",
"cv2.CascadeClassifier",
"cv2.imshow",
"cv2.inRange",
"os.listdir",
"cv2.resize"
] | [((252, 269), 'os.walk', 'os.walk', (['datasets'], {}), '(datasets)\n', (259, 269), False, 'import cv2, sys, os\n'), ((645, 681), 'cv2.face.LBPHFaceRecognizer_create', 'cv2.face.LBPHFaceRecognizer_create', ([], {}), '()\n', (679, 681), False, 'import cv2, sys, os\n'), ((727, 759), 'cv2.CascadeClassifier', 'cv2.CascadeC... |
# -*- coding: utf-8 -*-
from functools import partial
import numpy as np
import pandas as pd
def summarize_results(results):
values = []
for df in results:
values.append(df.pd_dataframe().values)
df = df.pd_dataframe()
columns = df.columns
return (
pd.DataFrame(np.mean(values, ... | [
"numpy.std",
"functools.partial",
"numpy.mean"
] | [((1004, 1146), 'functools.partial', 'partial', (['_run_backtest'], {'model': 'model', 'x_test': 'x', 'y_test': 'y', 'start': 'start', 'stride': 'stride', 'horizon': 'horizon', 'enable_mc_dropout': 'enable_mc_dropout'}), '(_run_backtest, model=model, x_test=x, y_test=y, start=start, stride\n =stride, horizon=horizon... |
from dataclasses import dataclass
import h5pickle as h5py
import json
import numpy as np
from numpy import ndarray
from pathlib import Path
from typing import List
import random
from robolfd.types import Transition
import robosuite
from robosuite.utils.mjcf_utils import postprocess_model_xml
import itertools
from tqd... | [
"robosuite.make",
"json.loads",
"numpy.clip",
"h5pickle.File",
"robosuite.utils.mjcf_utils.postprocess_model_xml",
"numpy.array",
"multiprocessing.Pool",
"numpy.concatenate"
] | [((802, 841), 'json.loads', 'json.loads', (["f['data'].attrs['env_info']"], {}), "(f['data'].attrs['env_info'])\n", (812, 841), False, 'import json\n'), ((853, 1015), 'robosuite.make', 'robosuite.make', ([], {'has_renderer': '(False)', 'has_offscreen_renderer': '(False)', 'ignore_done': '(True)', 'use_camera_obs': '(Fa... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 11 18:55:01 2019
@author: kenneth
"""
from __future__ import absolute_import
import numpy as np
from Utils.utils import EvalR
from Utils.Loss import loss
from Utils.kernels import Kernels
class kernelridge(EvalR, loss, Kernels):
def __init__(s... | [
"sklearn.preprocessing.StandardScaler",
"sklearn.kernel_ridge.KernelRidge",
"sklearn.model_selection.train_test_split",
"Utils.kernels.Kernels.cosine",
"Utils.kernels.Kernels.polynomial",
"sklearn.datasets.load_boston",
"Utils.kernels.Kernels.sigmoid",
"Utils.kernels.Kernels.rbf",
"Utils.kernels.Ker... | [((2019, 2056), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.3)'}), '(X, y, test_size=0.3)\n', (2035, 2056), False, 'from sklearn.model_selection import train_test_split\n'), ((2237, 2276), 'sklearn.kernel_ridge.KernelRidge', 'KernelRidge', ([], {'alpha': '(1.0)', 'kern... |
"""
Main file
"""
import argparse
import logging
import random
import gym
from tqdm import trange
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from common_definitions import CHECKPOINTS_PATH, TOTAL_EPISODES, TF_LOG_DIR, UNBALANCE_P
from model import Brain
from utils import Tensorboard
... | [
"tensorflow.expand_dims",
"matplotlib.pyplot.show",
"gym.make",
"argparse.ArgumentParser",
"logging.basicConfig",
"tensorflow.keras.metrics.Mean",
"matplotlib.pyplot.plot",
"tqdm.trange",
"numpy.square",
"logging.getLogger",
"logging.info",
"random.random",
"numpy.mean",
"tensorflow.keras.... | [((351, 372), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (370, 372), False, 'import logging\n'), ((434, 584), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""Deep Deterministic Policy Gradient (DDPG)"""', 'description': '"""Deep Deterministic Policy Gradient (DDPG) in Tensor... |
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers import Dense, Activation
import numpy as np
import argparse
import random
import gym
import sys
from collections import deque
from keras import backend as K
from keras.layers import Input, Dense
from keras.models import Model
fro... | [
"argparse.ArgumentParser",
"numpy.argmax",
"random.sample",
"keras.models.Model",
"tensorflow.ConfigProto",
"keras.layers.Input",
"keras.backend.tensorflow_backend.set_session",
"tensorflow.GPUOptions",
"keras.backend.concatenate",
"random.randint",
"keras.utils.plot_model",
"numpy.reshape",
... | [((360, 386), 'gym.make', 'gym.make', (['"""MountainCar-v0"""'], {}), "('MountainCar-v0')\n", (368, 386), False, 'import gym\n'), ((7552, 7614), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Linear Q network parser"""'}), "(description='Linear Q network parser')\n", (7575, 7614), False,... |
from pykinect2 import PyKinectV2
from pykinect2.PyKinectV2 import *
from pykinect2 import PyKinectRuntime
import ctypes
import _ctypes
import pygame
import sys
import numpy as np
import cv2
#if sys.hexversion >= 0x03000000:
# import _thread as thread
#else:
# import thread
class DepthRuntime(object):
def __i... | [
"numpy.dstack",
"pygame.quit",
"numpy.multiply",
"pygame.Surface",
"pygame.event.get",
"pygame.display.set_mode",
"cv2.createBackgroundSubtractorKNN",
"numpy.nditer",
"ctypes.memmove",
"pygame.init",
"pygame.display.flip",
"pykinect2.PyKinectRuntime.PyKinectRuntime",
"pygame.display.update",... | [((341, 354), 'pygame.init', 'pygame.init', ([], {}), '()\n', (352, 354), False, 'import pygame\n'), ((430, 449), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (447, 449), False, 'import pygame\n'), ((607, 626), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (624, 626), False, 'import pygame\... |
import numpy as np
from .strategy import Strategy
from sklearn.neighbors import NearestNeighbors
import pickle
from datetime import datetime
class CoreSet(Strategy):
def __init__(self, X, Y, idxs_lb, net, handler, args, tor=1e-4):
super(CoreSet, self).__init__(X, Y, idxs_lb, net, handler, args)
self.tor = tor
d... | [
"ipdb.set_trace",
"numpy.append",
"numpy.where",
"numpy.arange",
"datetime.datetime.now",
"numpy.delete",
"numpy.sqrt"
] | [((502, 516), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (514, 516), False, 'from datetime import datetime\n'), ((711, 728), 'numpy.sqrt', 'np.sqrt', (['dist_mat'], {}), '(dist_mat)\n', (718, 728), True, 'import numpy as np\n'), ((813, 827), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (82... |
#!/usr/bin/python
import os, math
import pandas as pd
import numpy as np
np.random.seed(42)
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import torch.optim as optim
torch.manual_seed(42)
from sklearn.metrics import roc_auc_score
from sklearn.model_selection i... | [
"pandas.DataFrame",
"torch.nn.Dropout",
"numpy.random.seed",
"pandas.read_csv",
"torch.manual_seed",
"torch.nn.init.xavier_uniform_",
"torch.nn.BatchNorm1d",
"math.floor",
"sklearn.metrics.roc_auc_score",
"sklearn.model_selection.ParameterSampler",
"torch.nn.Linear",
"numpy.random.permutation"... | [((74, 92), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (88, 92), True, 'import numpy as np\n'), ((225, 246), 'torch.manual_seed', 'torch.manual_seed', (['(42)'], {}), '(42)\n', (242, 246), False, 'import torch\n'), ((450, 506), 'pandas.DataFrame', 'pd.DataFrame', (['df_bin'], {'columns': 'df.colum... |
import explanes as el
import numpy as np
import pandas as pd
np.random.seed(0)
experiment = el.experiment.Experiment()
experiment.project.name = 'example'
experiment.path.output = '/tmp/'+experiment.project.name+'/'
experiment.factor.f1 = [1, 2]
experiment.factor.f2 = [1, 2, 3]
experiment.metric.m1 = ['mean', 'std']
... | [
"explanes.experiment.Experiment",
"numpy.random.seed",
"pandas.DataFrame",
"numpy.random.randn"
] | [((62, 79), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (76, 79), True, 'import numpy as np\n'), ((94, 120), 'explanes.experiment.Experiment', 'el.experiment.Experiment', ([], {}), '()\n', (118, 120), True, 'import explanes as el\n'), ((883, 937), 'pandas.DataFrame', 'pd.DataFrame', (['settingDescrip... |
"""
Generate a golden NPZ file from a dicom ZIP archive.
"""
import argparse
import numpy as np
from dicom_numpy.zip_archive import combined_series_from_zip
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--output', help='Output golden NPZ file', required=False)
parser.ad... | [
"dicom_numpy.zip_archive.combined_series_from_zip",
"numpy.savez_compressed",
"argparse.ArgumentParser"
] | [((192, 217), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (215, 217), False, 'import argparse\n'), ((576, 611), 'dicom_numpy.zip_archive.combined_series_from_zip', 'combined_series_from_zip', (['input_zip'], {}), '(input_zip)\n', (600, 611), False, 'from dicom_numpy.zip_archive import combin... |
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 28 00:02:08 2017
@author: kht
"""
import tensorflow as tf
import translate as tl
import numpy as np
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
initial = tf.constant(0.1, ... | [
"tensorflow.train.Saver",
"tensorflow.argmax",
"tensorflow.Session",
"numpy.zeros",
"tensorflow.constant",
"tensorflow.placeholder",
"tensorflow.cast",
"tensorflow.Variable",
"tensorflow.matmul",
"translate.self_decode",
"tensorflow.initialize_all_variables",
"tensorflow.log",
"tensorflow.In... | [((410, 426), 'translate.self_decode', 'tl.self_decode', ([], {}), '()\n', (424, 426), True, 'import translate as tl\n'), ((685, 708), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {}), '()\n', (706, 708), True, 'import tensorflow as tf\n'), ((775, 814), 'tensorflow.placeholder', 'tf.placeholder', (['t... |
import torch
from torch import nn
import os.path
import torchvision.transforms as transforms
from EnlightenGAN.data.base_dataset import BaseDataset, get_transform
from EnlightenGAN.data.image_folder import make_dataset
import random
from PIL import Image
import PIL
from pdb import set_trace as st
import numpy as np
fro... | [
"random.shuffle",
"EnlightenGAN.data.image_folder.make_dataset",
"numpy.round",
"numpy.unique",
"skimage.color.rgb2lab",
"torch.ones",
"random.randint",
"torch.nn.ReflectionPad2d",
"torch.zeros",
"random.random",
"torch.max",
"torch.unsqueeze",
"skimage.feature.canny",
"torch.min",
"Enli... | [((1959, 1987), 'random.shuffle', 'random.shuffle', (['self.A_paths'], {}), '(self.A_paths)\n', (1973, 1987), False, 'import random\n'), ((3292, 3316), 'EnlightenGAN.data.image_folder.make_dataset', 'make_dataset', (['self.dir_A'], {}), '(self.dir_A)\n', (3304, 3316), False, 'from EnlightenGAN.data.image_folder import ... |
from datetime import datetime, date
import math
import numpy as np
import time
import sys
import requests
import re
from ortools.linear_solver import pywraplp
# if len(sys.argv) == 1:
# symbols = ['UPRO', 'TMF']
# else:
# symbols = sys.argv[1].split(',')
# for i in range(len(symbols)):
# ... | [
"ortools.linear_solver.pywraplp.Solver.CreateSolver",
"numpy.std",
"numpy.floor",
"datetime.date.today",
"time.time",
"numpy.array",
"requests.get",
"math.log",
"numpy.sqrt",
"re.compile"
] | [((491, 502), 'time.time', 'time.time', ([], {}), '()\n', (500, 502), False, 'import time\n'), ((1902, 1919), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1914, 1919), False, 'import requests\n'), ((1982, 2043), 're.compile', 're.compile', (['""".*"CrumbStore":\\\\{"crumb":"(?P<crumb>[^"]+)"\\\\}"""'], {}... |
#!/usr/bin/env python
# =============================================================================
# MODULE DOCSTRING
# =============================================================================
"""
Test objects and function in the module reweighting.
"""
# ===================================================... | [
"tempfile.TemporaryDirectory",
"pint.UnitRegistry",
"os.path.dirname",
"numpy.random.RandomState",
"numpy.isnan",
"os.path.join",
"numpy.all"
] | [((825, 839), 'numpy.random.RandomState', 'RandomState', (['(0)'], {}), '(0)\n', (836, 839), False, 'from numpy.random import RandomState\n'), ((849, 868), 'pint.UnitRegistry', 'pint.UnitRegistry', ([], {}), '()\n', (866, 868), False, 'import pint\n'), ((2557, 2582), 'os.path.dirname', 'os.path.dirname', (['__file__'],... |
import re
import gzip
import numpy as np
from zipfile import ZipFile
def load_corpus(corpus_file, load_tags=False):
if corpus_file.endswith('.gz'):
corpus = []
with gzip.open(corpus_file, 'r') as f:
for line in f:
corpus.append(line.decode("utf-8").split())
elif cor... | [
"numpy.array",
"zipfile.ZipFile",
"gzip.open",
"re.match"
] | [((1602, 1621), 'gzip.open', 'gzip.open', (['filename'], {}), '(filename)\n', (1611, 1621), False, 'import gzip\n'), ((2824, 2838), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (2832, 2838), True, 'import numpy as np\n'), ((187, 214), 'gzip.open', 'gzip.open', (['corpus_file', '"""r"""'], {}), "(corpus_file, ... |
"""
Test that data encoded with earlier versions can still be decoded correctly.
"""
from __future__ import absolute_import, division, print_function
import pathlib
import unittest
import numpy as np
import h5py
TEST_DATA_DIR = pathlib.Path(__file__).parent / "data"
OUT_FILE_TEMPLATE = "regression_%s.h5"
VERSIO... | [
"unittest.main",
"pathlib.Path",
"h5py.File",
"numpy.all"
] | [((791, 806), 'unittest.main', 'unittest.main', ([], {}), '()\n', (804, 806), False, 'import unittest\n'), ((234, 256), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (246, 256), False, 'import pathlib\n'), ((528, 553), 'h5py.File', 'h5py.File', (['file_name', '"""r"""'], {}), "(file_name, 'r')\n",... |
import pytest
from io import StringIO
import numpy as np
import pandas as pd
import sandy
__author__ = "<NAME>"
#####################
# Test initialization
#####################
def test_from_file_1_column():
vals = '1\n5\n9'
file = StringIO(vals)
with pytest.raises(Exception):
... | [
"sandy.Pert",
"io.StringIO",
"sandy.Pert.from_file",
"numpy.testing.assert_array_equal",
"pytest.fixture",
"pytest.raises"
] | [((526, 556), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (540, 556), False, 'import pytest\n'), ((260, 274), 'io.StringIO', 'StringIO', (['vals'], {}), '(vals)\n', (268, 274), False, 'from io import StringIO\n'), ((435, 449), 'io.StringIO', 'StringIO', (['vals'], {}), '(v... |
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# U... | [
"unittest.main",
"numpy.zeros_like",
"pyscf.x2c.sfx2c1e.SpinFreeX2C",
"numpy.asarray",
"numpy.zeros",
"numpy.einsum",
"pyscf.lib.light_speed",
"pyscf.gto.M",
"pyscf.x2c.sfx2c1e_grad._gen_first_order_quantities",
"functools.reduce",
"numpy.sqrt"
] | [((6199, 6321), 'pyscf.gto.M', 'gto.M', ([], {'verbose': '(0)', 'atom': "[['O', (0.0, 0.0, 0.0001)], [1, (0.0, -0.757, 0.587)], [1, (0.0, 0.757, 0.587)]\n ]", 'basis': '"""3-21g"""'}), "(verbose=0, atom=[['O', (0.0, 0.0, 0.0001)], [1, (0.0, -0.757, 0.587)],\n [1, (0.0, 0.757, 0.587)]], basis='3-21g')\n", (6204, 6... |
from datastack import DataTable, DataColumn, label, col, desc
import pytest
import numpy as np
def test_one():
tbl = (DataTable(a=(1,2,1,2,3,1), b=(4,5,6,3,2,1),c=(6,7,8,1,2,3))
.order_by(desc(label("b")))
)
exp = DataTable(a=(1,2,1,2,3,1), b=(6,5,4,3,2,1), c=(8,7,6,1,2,3))
assert tbl... | [
"datastack.DataTable",
"datastack.DataTable.from_dict",
"numpy.array",
"datastack.label"
] | [((245, 320), 'datastack.DataTable', 'DataTable', ([], {'a': '(1, 2, 1, 2, 3, 1)', 'b': '(6, 5, 4, 3, 2, 1)', 'c': '(8, 7, 6, 1, 2, 3)'}), '(a=(1, 2, 1, 2, 3, 1), b=(6, 5, 4, 3, 2, 1), c=(8, 7, 6, 1, 2, 3))\n', (254, 320), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((732, 807), 'datastac... |
import unittest, random
from models.point import Point
from models.segment import Segment
import numpy as np
class TestSegmentMethods(unittest.TestCase):
def test_new(self):
with self.assertRaises(ValueError) as context:
Segment([])
def test_extremums(self):
a = Point(random.ran... | [
"numpy.arctan",
"random.randint",
"models.segment.Segment",
"models.point.Point"
] | [((509, 527), 'models.segment.Segment', 'Segment', (['[a, b, c]'], {}), '([a, b, c])\n', (516, 527), False, 'from models.segment import Segment\n'), ((654, 667), 'models.point.Point', 'Point', (['(10)', '(20)'], {}), '(10, 20)\n', (659, 667), False, 'from models.point import Point\n'), ((680, 693), 'models.point.Point'... |
# -*- coding: utf-8 -*-
# Copyright 2018 <NAME>
# Distributed under the terms of the Apache License 2.0
"""
Test Aerial Objects
#####################
"""
import six
import json
import uuid
import numpy as np
from itertools import cycle
from dronedirector.aerial import AerialObject, Drone, SinusoidalDrone
class CaliRe... | [
"uuid.uuid4",
"json.loads",
"numpy.isclose",
"numpy.arange",
"itertools.cycle"
] | [((1186, 1220), 'numpy.isclose', 'np.isclose', (["msg['altitude']", '(100.0)'], {}), "(msg['altitude'], 100.0)\n", (1196, 1220), True, 'import numpy as np\n'), ((1288, 1300), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1298, 1300), False, 'import uuid\n'), ((930, 945), 'json.loads', 'json.loads', (['msg'], {}), '(ms... |
import os
import cv2
import time
import imutils
import pyrebase
import numpy as np
from utils import *
import sys
import dlib
from skimage import io
#################### Initialize ####################
print("Start initializing")
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
emotion_dict = {0: "Angry", 1: "Disgusted", ... | [
"os.path.basename",
"numpy.argmax",
"numpy.empty",
"cv2.imread",
"dlib.get_frontal_face_detector",
"dlib.shape_predictor",
"cv2.resize"
] | [((616, 648), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (646, 648), False, 'import dlib\n'), ((661, 697), 'dlib.shape_predictor', 'dlib.shape_predictor', (['predictor_path'], {}), '(predictor_path)\n', (681, 697), False, 'import dlib\n'), ((1460, 1508), 'cv2.imread', 'cv2.imr... |
# laser_path_utils.py
"""Utility functions for working with paths for laser cutting"""
import numpy as np
import svgpathtools.svgpathtools as SVGPT
# it's imporatant to clone and install the repo manually. The pip/pypi version is outdated
from laser_svg_utils import tree_to_tempfile
from laser_clipper import point_o... | [
"svgpathtools.svgpathtools.svg2paths",
"numpy.angle",
"svgpathtools.svgpathtools.parse_path",
"laser_svg_utils.tree_to_tempfile",
"laser_clipper.point_on_loops",
"laser_clipper.point_inside_loop",
"svgpathtools.svgpathtools.Path"
] | [((476, 506), 'svgpathtools.svgpathtools.svg2paths', 'SVGPT.svg2paths', (['temp_svg.name'], {}), '(temp_svg.name)\n', (491, 506), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((645, 667), 'laser_svg_utils.tree_to_tempfile', 'tree_to_tempfile', (['tree'], {}), '(tree)\n', (661, 667), False, 'from laser_svg_util... |
from _pytest.mark import param
import pytest
import numpy as np
from bayesian_mmm.sampling.stan_model_generator import StanModelGenerator
from bayesian_mmm.sampling.sampler import Sampler
from bayesian_mmm.sampling.stan_model_wrapper import StanModelWrapper
MAX_LAG = 4
SPENDS = np.array([[10, 20], [0, 8], [1, 30], [5... | [
"bayesian_mmm.sampling.stan_model_generator.StanModelGenerator",
"pytest.mark.parametrize",
"numpy.array",
"bayesian_mmm.sampling.sampler.Sampler"
] | [((281, 327), 'numpy.array', 'np.array', (['[[10, 20], [0, 8], [1, 30], [5, 40]]'], {}), '([[10, 20], [0, 8], [1, 30], [5, 40]])\n', (289, 327), True, 'import numpy as np\n'), ((344, 490), 'numpy.array', 'np.array', (['[[[10, 0, 0, 0], [20, 0, 0, 0]], [[0, 10, 0, 0], [8, 20, 0, 0]], [[1, 0, 10,\n 0], [30, 8, 20, 0]]... |
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for Kurtosis
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import StatMoments, StatMomentsDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances... | [
"numpy.testing.assert_almost_equal",
"numpy.allclose"
] | [((584, 656), 'numpy.allclose', 'np.allclose', (['self.tester.kurtosis_hist[1]', "computed_data['kurtosis_val']"], {}), "(self.tester.kurtosis_hist[1], computed_data['kurtosis_val'])\n", (595, 656), True, 'import numpy as np\n'), ((699, 771), 'numpy.allclose', 'np.allclose', (['self.tester.skewness_hist[1]', "computed_... |
# -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:light
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.11.2
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
... | [
"json.loads",
"logging.basicConfig",
"slogdata.show_times",
"numpy.where",
"pandas.read_sql",
"logging.getLogger",
"slogdata.mysql_socket"
] | [((723, 860), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'stream': 'stderr', 'format': '"""%(asctime)s %(levelname)s: %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(level=logging.INFO, stream=stderr, format=\n '%(asctime)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %... |
#! /usr/bin/env python3
#
# Copyright 2019 Garmin Ltd. or its subsidiaries
#
# SPDX-License-Identifier: Apache-2.0
import os
import sys
import glob
import re
from scipy import stats
import numpy
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(THIS_DIR, 'poky', 'scripts', 'lib'))
f... | [
"numpy.average",
"os.path.basename",
"numpy.std",
"scipy.stats.ttest_rel",
"os.path.realpath",
"re.match",
"os.path.join",
"numpy.sqrt"
] | [((224, 250), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (240, 250), False, 'import os\n'), ((268, 316), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""poky"""', '"""scripts"""', '"""lib"""'], {}), "(THIS_DIR, 'poky', 'scripts', 'lib')\n", (280, 316), False, 'import os\n'), ((1754, 1... |
import time
import threading
import numpy as np
from common import preprocess_one_image_fn, draw_outputs, load_classes, generate_colors
from yolo_utils import yolo_eval
from priority_queue import PriorityQueue
class YOLOv3Thread(threading.Thread):
def __init__(self, runner: "Runner", deque_input, lock_input,
... | [
"common.preprocess_one_image_fn",
"common.draw_outputs",
"yolo_utils.yolo_eval",
"priority_queue.PriorityQueue",
"numpy.empty",
"common.generate_colors",
"common.load_classes"
] | [((651, 696), 'common.load_classes', 'load_classes', (['"""./model_data/adas_classes.txt"""'], {}), "('./model_data/adas_classes.txt')\n", (663, 696), False, 'from common import preprocess_one_image_fn, draw_outputs, load_classes, generate_colors\n'), ((719, 752), 'common.generate_colors', 'generate_colors', (['self.cl... |
import warnings
warnings.filterwarnings('ignore')
import tensorflow as tf
from tensorflow.examples.tutorials import mnist
import numpy as np
import os
import random
from scipy import misc
import time
import sys
#from draw import viz_data, x, A, B, read_n, T
#from drawCopy1 import viz_data, x, A, B, read_n, T
#from dra... | [
"tensorflow.train.Saver",
"warnings.filterwarnings",
"tensorflow.examples.tutorials.mnist.input_data.read_data_sets",
"tensorflow.ConfigProto",
"random.randrange",
"numpy.array",
"tensorflow.InteractiveSession"
] | [((16, 49), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (39, 49), False, 'import warnings\n'), ((486, 502), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (500, 502), True, 'import tensorflow as tf\n'), ((554, 595), 'tensorflow.InteractiveSession', 'tf.In... |
import numpy as np
import mpnum as mp
import tmps
from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state
import time
from scipy.special import factorial
import math
def get_spin_initial_state(theta, mpa_type='mps'):
"""
Returns the initial state for the... | [
"scipy.special.factorial",
"numpy.abs",
"numpy.log",
"tmps.utils.state_reduction_as_ndarray",
"mpnum.chain",
"tmps.utils.convert.to_mparray",
"time.perf_counter",
"time.clock",
"numpy.max",
"tmps.chain.thermal.from_hamiltonian",
"numpy.sin",
"numpy.arange",
"numpy.cos",
"numpy.exp",
"tmp... | [((551, 597), 'tmps.utils.convert.to_mparray', 'convert.to_mparray', (['(ground + excited)', 'mpa_type'], {}), '(ground + excited, mpa_type)\n', (569, 597), False, 'from tmps.utils import state_reduction_as_ndarray, convert, broadcast_number_ground_state, get_thermal_state\n'), ((905, 968), 'tmps.utils.broadcast_number... |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import os
from dataclasses import dataclass, field
from typing import List, Tuple
import numpy as np
import torch
import torch.nn... | [
"torch.nn.Dropout",
"fairseq.modules.SamePad",
"torch.nn.GLU",
"torch.nn.functional.dropout",
"torch.cat",
"fairseq.modules.Fp32GroupNorm",
"torch.nn.init.constant_",
"torch.arange",
"torch.nn.utils.weight_norm",
"torch.nn.functional.normalize",
"torch.no_grad",
"os.path.join",
"torch.flatte... | [((963, 1000), 'fairseq.dataclass.ChoiceEnum', 'ChoiceEnum', (["['default', 'layer_norm']"], {}), "(['default', 'layer_norm'])\n", (973, 1000), False, 'from fairseq.dataclass import ChoiceEnum, FairseqDataclass\n'), ((1032, 1086), 'fairseq.dataclass.ChoiceEnum', 'ChoiceEnum', (["['static', 'uniform', 'normal', 'poisson... |
from keras.models import Sequential
from keras.models import Model
from keras.layers import Cropping2D, Conv2D, MaxPool2D, Flatten, Dense, Dropout, ELU, BatchNormalization, Lambda
from keras.layers import concatenate
import numpy as np
import tensorflow as tf
def to_yuv(img, in_cspace='RGB'):
img_float = tf.cast(... | [
"tensorflow.image.rgb_to_yuv",
"tensorflow.image.bgr_to_yuv",
"keras.layers.Cropping2D",
"keras.layers.Dropout",
"numpy.asarray",
"keras.layers.MaxPool2D",
"keras.layers.Flatten",
"keras.models.Model",
"keras.layers.ELU",
"tensorflow.cast",
"keras.layers.Dense",
"keras.layers.Lambda",
"keras... | [((2546, 2582), 'keras.models.Model', 'Model', ([], {'inputs': 'img', 'outputs': 'out_steer'}), '(inputs=img, outputs=out_steer)\n', (2551, 2582), False, 'from keras.models import Model\n'), ((312, 342), 'tensorflow.cast', 'tf.cast', (['img'], {'dtype': 'tf.float32'}), '(img, dtype=tf.float32)\n', (319, 342), True, 'im... |
import os
import pickle
import cv2
import numpy as np
import streamlit as st
import tensorflow as tf
import grpc
from tensorflow_serving.apis import (
prediction_service_pb2_grpc,
predict_pb2
)
from consts import (
TRAIN_FD,
TRAIN_PKL_FP,
TRAIN_LABEL_FP
)
@st.cache
def load_prec_embs():
with... | [
"os.path.join",
"tensorflow_serving.apis.predict_pb2.PredictRequest",
"numpy.std",
"os.walk",
"numpy.expand_dims",
"tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub",
"pickle.load",
"numpy.mean",
"tensorflow.contrib.util.make_ndarray",
"tensorflow.contrib.util.make_tensor_... | [((679, 692), 'os.walk', 'os.walk', (['root'], {}), '(root)\n', (686, 692), False, 'import os\n'), ((373, 387), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (384, 387), False, 'import pickle\n'), ((454, 468), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (465, 468), False, 'import pickle\n'), ((1261, 1324)... |
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
# Open the image
img = np.array(Image.open('house.jpg')).astype(np.uint8)
# Apply gray scale
gray_img = np.round(0.299 * img[:, :, 0] +
0.587 * img[:, :, 1] +
0.114 * img[:, :, 2]).astype(np.uint... | [
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.imshow",
"numpy.zeros",
"PIL.Image.open",
"matplotlib.pyplot.figure",
"numpy.array",
"matplotlib.pyplot.imsave",
"numpy.round"
] | [((400, 446), 'numpy.array', 'np.array', (['[[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]]'], {}), '([[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]])\n', (408, 446), True, 'import numpy as np\n'), ((465, 511), 'numpy.array', 'np.array', (['[[-1, -1, -1], [0, 0, 0], [1, 1, 1]]'], {}), '([[-1, -1, -1], [0, 0, 0], [1, 1, 1]])\n', (473, 511), ... |
"""
SAVGOL INTERP.
--------------
"""
import argparse
from pathlib import Path
import matplotlib
import numpy as np
from embers.rf_tools.align_data import savgol_interp
from embers.rf_tools.colormaps import spectral
from matplotlib import pyplot as plt
matplotlib.use("Agg")
_spec, _ = spectral()
parser = argparse.A... | [
"argparse.ArgumentParser",
"matplotlib.pyplot.plot",
"numpy.median",
"matplotlib.pyplot.scatter",
"embers.rf_tools.colormaps.spectral",
"matplotlib.pyplot.legend",
"embers.rf_tools.align_data.savgol_interp",
"pathlib.Path",
"matplotlib.use",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.rcPa... | [((256, 277), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (270, 277), False, 'import matplotlib\n'), ((289, 299), 'embers.rf_tools.colormaps.spectral', 'spectral', ([], {}), '()\n', (297, 299), False, 'from embers.rf_tools.colormaps import spectral\n'), ((310, 407), 'argparse.ArgumentParser', ... |
import numpy as np
from config import GOPARAMETERS
def stone_features(board_state):
# 16 planes, where every other plane represents the stones of a particular color
# which means we track the stones of the last 8 moves.
features = np.zeros([16, GOPARAMETERS.N, GOPARAMETERS.N], dtype=np.uint8)
num_del... | [
"numpy.zeros",
"numpy.ones",
"numpy.cumsum",
"numpy.tile",
"numpy.rollaxis",
"numpy.concatenate"
] | [((245, 307), 'numpy.zeros', 'np.zeros', (['[16, GOPARAMETERS.N, GOPARAMETERS.N]'], {'dtype': 'np.uint8'}), '([16, GOPARAMETERS.N, GOPARAMETERS.N], dtype=np.uint8)\n', (253, 307), True, 'import numpy as np\n'), ((390, 433), 'numpy.cumsum', 'np.cumsum', (['board_state.board_deltas'], {'axis': '(0)'}), '(board_state.boar... |
from numpy import array, copy, concatenate
from torch import Tensor
from botorch.acquisition.multi_objective.monte_carlo import (
qExpectedHypervolumeImprovement, qNoisyExpectedHypervolumeImprovement
)
from botorch.posteriors import GPyTorchPosterior, Posterior, DeterministicPosterior
from gpytorch.distributions im... | [
"numpy.copy",
"gpytorch.lazy.BlockDiagLazyTensor",
"gpytorch.distributions.MultitaskMultivariateNormal",
"torch.cat",
"numpy.concatenate"
] | [((2408, 2442), 'torch.cat', 'torch.cat', (['[baseline_X, X]'], {'dim': '(-2)'}), '([baseline_X, X], dim=-2)\n', (2417, 2442), False, 'import torch\n'), ((2482, 2510), 'numpy.copy', 'copy', (['self.X_baseline_string'], {}), '(self.X_baseline_string)\n', (2486, 2510), False, 'from numpy import array, copy, concatenate\n... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from scipy import linalg
from numpy.testing import assert_almost_equal
from megamix.online import GaussianMixture
from megamix.online.base import _log_normal_matrix
from megamix.online import dist_matrix
from megamix.utils_testing import checking
from sc... | [
"numpy.sum",
"megamix.online.dist_matrix",
"scipy.linalg.cholesky",
"numpy.argmin",
"numpy.exp",
"scipy.special.logsumexp",
"megamix.online.GaussianMixture",
"numpy.random.randn",
"numpy.testing.assert_almost_equal",
"numpy.empty_like",
"numpy.finfo",
"pytest.raises",
"megamix.utils_testing.... | [((595, 634), 'megamix.utils_testing.checking.remove', 'checking.remove', (["(self.file_name + '.h5')"], {}), "(self.file_name + '.h5')\n", (610, 634), False, 'from megamix.utils_testing import checking\n'), ((699, 739), 'numpy.random.randn', 'np.random.randn', (['self.n_points', 'self.dim'], {}), '(self.n_points, self... |
import numpy as np
import random
from time import time
random.seed(42)
def semi_greedy_construction(window, number_items, weight_max, values_items, weight_items):
efficiency = np.divide(values_items, weight_items)
items = {}
for i in range(number_items):
items[i] = efficiency[i], values_items[i], weight_items[i... | [
"numpy.zeros",
"numpy.divide",
"random.seed",
"random.randint"
] | [((56, 71), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (67, 71), False, 'import random\n'), ((179, 216), 'numpy.divide', 'np.divide', (['values_items', 'weight_items'], {}), '(values_items, weight_items)\n', (188, 216), True, 'import numpy as np\n'), ((816, 854), 'numpy.zeros', 'np.zeros', (['number_items'... |
import numpy as np
m,n = [int(i) for i in '2 7'.strip().split(' ')]
data1=[
'0.18 0.89 109.85',
'1.0 0.26 155.72',
'0.92 0.11 137.66',
'0.07 0.37 76.17',
'0.85 0.16 139.75',
'0.99 0.41 162.6',
'0.87 0.47 151.77'
]
X = []
Y = []
for item in data1:
data = item.strip().split(' ')
X.append(data[:m])
Y.append(data... | [
"numpy.dot",
"numpy.mean",
"numpy.array"
] | [((467, 485), 'numpy.array', 'np.array', (['X', 'float'], {}), '(X, float)\n', (475, 485), True, 'import numpy as np\n'), ((489, 507), 'numpy.array', 'np.array', (['Y', 'float'], {}), '(Y, float)\n', (497, 507), True, 'import numpy as np\n'), ((515, 537), 'numpy.array', 'np.array', (['X_new', 'float'], {}), '(X_new, fl... |
import argparse
import yaml
import os
from glob import glob
import inspect
import sys
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import time
import numpy as np
import torch
from torch.ut... | [
"argparse.ArgumentParser",
"segmentation_dataset.RawChromosomeDataset",
"models.UNet.UNet",
"yaml.dump",
"numpy.mean",
"models.Segnet.SegNet",
"models.AttentionUnet.AttU_Net",
"loss.DiceLoss",
"os.path.join",
"torch.utils.data.DataLoader",
"os.path.dirname",
"torch.load",
"models.FCN.FCN_Res... | [((196, 224), 'os.path.dirname', 'os.path.dirname', (['current_dir'], {}), '(current_dir)\n', (211, 224), False, 'import os\n'), ((226, 256), 'sys.path.insert', 'sys.path.insert', (['(0)', 'parent_dir'], {}), '(0, parent_dir)\n', (241, 256), False, 'import sys\n'), ((3355, 3398), 'torch.load', 'torch.load', (['model_na... |
import os
import struct
import numpy as np
import xarray as xr
import netCDF4 as ds
from pathlib import Path
import matplotlib.pyplot as plt
import struct
import itertools
import Homogenizer_GUI
from enum import Enum
from collections import OrderedDict
import pickle
class UserPrefs(Enum):
ScanFolde... | [
"matplotlib.pyplot.title",
"Homogenizer_GUI.Homogenizer_GUI",
"numpy.polyfit",
"numpy.angle",
"numpy.fft.ifft2",
"matplotlib.pyplot.imshow",
"os.path.exists",
"matplotlib.pyplot.colorbar",
"numpy.reshape",
"numpy.conj",
"matplotlib.pyplot.show",
"struct.unpack",
"itertools.tee",
"matplotli... | [((13222, 13237), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (13233, 13237), False, 'from collections import OrderedDict\n'), ((13260, 13275), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (13271, 13275), False, 'from collections import OrderedDict\n'), ((13545, 13578), 'Homog... |
"""Optimization
* :function:`.single_nested_cvrs`
* :function:`.dual_nested_cvrs`
* :function:`.single_cv`
* :function:`.chi2_test`
"""
# data wrangling
import numpy as np
import pandas as pd
from itertools import product
from scipy import stats
# validation
from sklearn.metrics import balanced_accuracy... | [
"pandas.DataFrame",
"sklearn.metrics.accuracy_score",
"sklearn.preprocessing.MinMaxScaler",
"sklearn.metrics.balanced_accuracy_score",
"sklearn.model_selection.KFold",
"sklearn.metrics.roc_auc_score",
"sklearn.metrics.f1_score",
"numpy.mean"
] | [((3295, 3323), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'hp_set'}), '(columns=hp_set)\n', (3307, 3323), True, 'import pandas as pd\n'), ((3350, 3400), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['hp_hat', 't_bcr', 'v_bcr']"}), "(columns=['hp_hat', 't_bcr', 'v_bcr'])\n", (3362, 3400), True, 'impo... |
from netCDF4 import Dataset
from dataclasses import dataclass, field
import os
import pickle
import sys
import shutil
import numpy as np
from variables import modelvar
@dataclass
class VariableInfo():
nickname: str = ""
dimensions: tuple = field(default_factory=lambda: ())
name: str = ""
units: str = ... | [
"netCDF4.Dataset",
"pickle.dump",
"os.path.join",
"os.makedirs",
"os.path.isdir",
"dataclasses.field",
"numpy.arange",
"shutil.copyfile",
"os.path.expanduser",
"numpy.prod"
] | [((250, 284), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : ())'}), '(default_factory=lambda : ())\n', (255, 284), False, 'from dataclasses import dataclass, field\n'), ((3871, 3890), 'numpy.prod', 'np.prod', (['grid.procs'], {}), '(grid.procs)\n', (3878, 3890), True, 'import numpy as np\n'), ((5009... |
import logging as log
import os
import base64
import json
import numpy as np
from paprika.restraints import DAT_restraint
from parmed.amber import AmberParm
from parmed import Structure
# https://stackoverflow.com/questions/27909658/json-encoder-and-decoder-for-complex-numpy-arrays
# https://stackoverflow.com/a/24375... | [
"paprika.restraints.DAT_restraint",
"logging.debug",
"json.loads",
"logging.warning",
"numpy.frombuffer",
"numpy.ascontiguousarray",
"base64.b64decode",
"json.dumps",
"logging.info",
"base64.b64encode",
"os.path.join"
] | [((2779, 2829), 'logging.debug', 'log.debug', (['"""Saving restraint information as JSON."""'], {}), "('Saving restraint information as JSON.')\n", (2788, 2829), True, 'import logging as log\n'), ((3099, 3152), 'logging.debug', 'log.debug', (['"""Loading restraint information from JSON."""'], {}), "('Loading restraint ... |
import numpy
from scipy.interpolate import InterpolatedUnivariateSpline as interpolate
from scipy.interpolate import interp1d
from cosmo4d.lab import (UseComplexSpaceOptimizer,
NBodyModel, LPTModel, ZAModel,
LBFGS, ParticleMesh)
#from cosmo4d.lab import mapbias as map
f... | [
"sys.path.append",
"cosmo4d.lab.ParticleMesh",
"nbodykit.lab.BigFileCatalog",
"nbodykit.cosmology.Cosmology.from_dict",
"yaml.load",
"scipy.interpolate.InterpolatedUnivariateSpline",
"solve.solve",
"os.makedirs",
"getbiasparams.eval_bfit",
"nbodykit.lab.BigFileMesh",
"cosmo4d.lab.NBodyModel",
... | [((716, 738), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (731, 738), False, 'import sys, os, json, yaml\n'), ((739, 767), 'sys.path.append', 'sys.path.append', (['"""../utils/"""'], {}), "('../utils/')\n", (754, 767), False, 'import sys, os, json, yaml\n'), ((1003, 1022), 'HImodels.ModelA',... |
from re import L
import sys
from typing import List
from tensorflow.python.ops.gen_array_ops import gather
sys.path.append('.')
import json
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from random import randint, randrange
from environment.base.base import BaseEnvironment
from envi... | [
"tensorflow.maximum",
"tensorflow.reshape",
"numpy.ones",
"sys.path.append",
"numpy.full",
"tensorflow.nn.softmax",
"numpy.zeros_like",
"tensorflow.random.uniform",
"environment.custom.resource_v3.resource.Resource",
"tensorflow_probability.distributions.Categorical",
"environment.custom.resourc... | [((109, 129), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (124, 129), False, 'import sys\n'), ((1760, 1823), 'numpy.full', 'np.full', (['(1, self.num_features)', 'self.EOS_CODE'], {'dtype': '"""float32"""'}), "((1, self.num_features), self.EOS_CODE, dtype='float32')\n", (1767, 1823), True, 'impo... |
import os
import numpy as np
import matplotlib.pyplot as plt
try:
import python_scripts.nalu.io as nalu
except ImportError:
raise ImportError('Download https://github.com/lawsonro3/python_scripts/blob/master/python_scripts/nalu/nalu_functions.py')
if __name__ == '__main__':
root_dir = '/Users/mlawson/Goog... | [
"matplotlib.pyplot.title",
"matplotlib.pyplot.loglog",
"matplotlib.pyplot.plot",
"os.path.isdir",
"python_scripts.nalu.io.read_log",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.text",
"numpy.append",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.array",
"numpy.arange",
"matplotlib.pypl... | [((662, 687), 'python_scripts.nalu.io.read_log', 'nalu.read_log', (['file_gC_13'], {}), '(file_gC_13)\n', (675, 687), True, 'import python_scripts.nalu.io as nalu\n'), ((706, 742), 'numpy.mean', 'np.mean', (['t_gC_13[375:425, :]'], {'axis': '(0)'}), '(t_gC_13[375:425, :], axis=0)\n', (713, 742), True, 'import numpy as ... |
# -*- coding: utf-8 -*-
# COPYRIGHT 2017 <NAME>
# Truth network model analysis
from __future__ import print_function
import numpy as np
import tellurium as te
import antimony
import generate
import util
import clustering
def classify(setup, s_arr, c_arr):
"""
Ground truth classification. Returns initial per... | [
"numpy.array_equal",
"numpy.abs",
"util.perturbRate",
"generate.generateAntimonyNew",
"numpy.array",
"antimony.clearPreviousLoads",
"tellurium.loada",
"util.getPersistantOrder",
"clustering.getListOfCombinations"
] | [((785, 814), 'antimony.clearPreviousLoads', 'antimony.clearPreviousLoads', ([], {}), '()\n', (812, 814), False, 'import antimony\n'), ((1014, 1079), 'generate.generateAntimonyNew', 'generate.generateAntimonyNew', (['setup.t_net', 't_s', 't_k', 's_arr', 'c_arr'], {}), '(setup.t_net, t_s, t_k, s_arr, c_arr)\n', (1042, 1... |
"""
test_const_ionization.py
Author: <NAME>
Affiliation: University of Colorado at Boulder
Created on: Thu Oct 16 14:46:48 MDT 2014
Description:
"""
import ares
import numpy as np
import matplotlib.pyplot as pl
from ares.physics.CrossSections import PhotoIonizationCrossSection as sigma
s_per_yr = ares.physics.Co... | [
"numpy.abs",
"matplotlib.pyplot.close",
"numpy.allclose",
"matplotlib.pyplot.draw",
"matplotlib.pyplot.figure",
"ares.physics.CrossSections.PhotoIonizationCrossSection",
"numpy.exp",
"ares.simulations.RaySegment"
] | [((976, 1011), 'ares.simulations.RaySegment', 'ares.simulations.RaySegment', ([], {}), '(**pars)\n', (1003, 1011), False, 'import ares\n'), ((1095, 1124), 'matplotlib.pyplot.figure', 'pl.figure', (['(1)'], {'figsize': '(8, 12)'}), '(1, figsize=(8, 12))\n', (1104, 1124), True, 'import matplotlib.pyplot as pl\n'), ((1392... |
import scipy.io
import numpy as np
import sys
import os.path
import matplotlib.pyplot as plt
trans = [139.62,119.43,36.48,14.5]
mdata = []
def avgWaveSpeed(data,ampStart,ampEnd,freq,transducers,index1,index2):
total = 0
count = 0
print(data)
zer = highestPoint(data,ampStart,0)[0]
tz = np.arange(a... | [
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"random.randint",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.setp",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.figure",
"numpy.arange",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] | [((4374, 4384), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4382, 4384), True, 'import matplotlib.pyplot as plt\n'), ((309, 346), 'numpy.arange', 'np.arange', (['ampStart', 'ampEnd', '(1 / freq)'], {}), '(ampStart, ampEnd, 1 / freq)\n', (318, 346), True, 'import numpy as np\n'), ((2359, 2372), 'matplotlib.... |
#!/usr/bin/env python
# coding: utf-8
# # Registration 101
#
# Image registration is a critical tool in longitudinal monitoring:
#
# - Estimation of local changes
# - Comparison to same animal (less variance)
# - [3R's](https://www.nc3rs.org.uk/the-3rs)
#
#
#
# ## Goal of tutorial:
# - Introduce the concept of aligni... | [
"numpy.sum",
"numpy.abs",
"matplotlib.pyplot.figure",
"numpy.mean",
"ipywidgets.fixed",
"numpy.fft.ifft2",
"matplotlib.get_backend",
"sys.path.append",
"image_viewing.overlay_RGB",
"numpy.fft.ifftshift",
"numpy.copy",
"numpy.identity",
"image_viewing.horizontal_pane",
"scipy.ndimage.interp... | [((688, 719), 'sys.path.append', 'sys.path.append', (['"""reg101_files"""'], {}), "('reg101_files')\n", (703, 719), False, 'import sys\n'), ((1790, 1813), 'image_viewing.horizontal_pane', 'horizontal_pane', (['images'], {}), '(images)\n', (1805, 1813), False, 'from image_viewing import horizontal_pane, overlay_RGB, ove... |
# Python-bioformats is distributed under the GNU General Public
# License, but this file is licensed under the more permissive BSD
# license. See the accompanying file LICENSE for details.
#
# Copyright (c) 2009-2014 Broad Institute
# All rights reserved.
'''formatwriter.py - mechanism to wrap a bioformats WriterWrap... | [
"os.remove",
"javabridge.static_call",
"javabridge.get_env",
"javabridge.get_static_field",
"numpy.random.rand",
"javabridge.make_new",
"javabridge.make_method",
"javabridge.make_instance",
"numpy.array",
"wx.PySimpleApp",
"os.path.split",
"numpy.ascontiguousarray",
"javabridge.detach",
"j... | [((4342, 4357), 'javabridge.get_env', 'jutil.get_env', ([], {}), '()\n', (4355, 4357), True, 'import javabridge as jutil\n'), ((8923, 8938), 'javabridge.get_env', 'jutil.get_env', ([], {}), '()\n', (8936, 8938), True, 'import javabridge as jutil\n'), ((9239, 9385), 'javabridge.make_instance', 'jutil.make_instance', (['... |
# Copyright 2016 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, softw... | [
"numpy.pad",
"numpy.power",
"numpy.asarray"
] | [((1683, 1704), 'numpy.asarray', 'np.asarray', (['relevance'], {}), '(relevance)\n', (1693, 1704), True, 'import numpy as np\n'), ((3342, 3363), 'numpy.asarray', 'np.asarray', (['relevance'], {}), '(relevance)\n', (3352, 3363), True, 'import numpy as np\n'), ((3465, 3498), 'numpy.pad', 'np.pad', (['rel', '(0, pad)', '"... |
import nerdle_cfg
import re
import luigi
import d6tflow
import itertools
import pandas as pd
import numpy as np
#helper functions
def check_len_int(nerdle):
nerdle_str = ''.join(nerdle)
try:
return all(len(x)==len(str(int(x))) for x in re.split('\+|\-|\*|\/|==',nerdle_str))
except:
return ... | [
"pandas.DataFrame",
"re.split",
"itertools.combinations_with_replacement",
"numpy.array",
"pandas.Series",
"luigi.IntParameter"
] | [((762, 782), 'luigi.IntParameter', 'luigi.IntParameter', ([], {}), '()\n', (780, 782), False, 'import luigi\n'), ((1223, 1241), 'pandas.Series', 'pd.Series', (['nerdles'], {}), '(nerdles)\n', (1232, 1241), True, 'import pandas as pd\n'), ((1262, 1286), 'pandas.DataFrame', 'pd.DataFrame', (['nerdle_ser'], {}), '(nerdle... |
from ..helpers import eos
from ..helpers import alfaFunctions
from ..helpers.eosHelpers import A_fun, B_fun, getCubicCoefficients, getMixFugacity,getMixFugacityCoef, dAdT_fun
from ..solvers.cubicSolver import cubic_solver
from ..helpers import temperatureCorrelations as tempCorr
from ..helpers import mixing_rules
from... | [
"numpy.absolute",
"numpy.sum",
"numpy.log",
"scipy.integrate.quad",
"numpy.array"
] | [((680, 689), 'numpy.array', 'array', (['tc'], {}), '(tc)\n', (685, 689), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((698, 707), 'numpy.array', 'array', (['pc'], {}), '(pc)\n', (703, 707), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((723, 738), 'numpy.array', 'array', ... |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import math
import numpy as np
import tokenization
import six
import tensorflow as tf
from tensorflow import logging
class EvalResults(object):
def __init__(self, capacity):... | [
"math.exp",
"tokenization.printable_text",
"csv.reader",
"tensorflow.logging.info",
"json.dumps",
"collections.defaultdict",
"numpy.mean",
"tensorflow.gfile.GFile",
"collections.namedtuple",
"tokenization.BasicTokenizer",
"collections.OrderedDict",
"tokenization.convert_to_unicode",
"six.ite... | [((30082, 30149), 'tensorflow.logging.info', 'logging.info', (["('Writing predictions to: %s' % output_prediction_file)"], {}), "('Writing predictions to: %s' % output_prediction_file)\n", (30094, 30149), False, 'from tensorflow import logging\n'), ((30156, 30212), 'tensorflow.logging.info', 'logging.info', (["('Writin... |
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import all_call.train
import numpy as np
import json
import sys
import pandas as pd
import re
import os
from glob import glob
from arguments import yaml_reader
# default parameters for inference
DEFAULT_MODEL_PARAMS = (-0.0107736, 0.00244419, 0.0, 0.0044... | [
"pandas.DataFrame",
"os.path.abspath",
"numpy.zeros_like",
"numpy.load",
"argparse.ArgumentParser",
"json.load",
"pandas.read_csv",
"os.path.dirname",
"arguments.yaml_reader.save_arguments",
"os.path.exists",
"numpy.zeros",
"glob.glob",
"arguments.yaml_reader.load_arguments",
"re.search",
... | [((757, 816), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'formatter_class': 'RawDescriptionHelpFormatter'}), '(formatter_class=RawDescriptionHelpFormatter)\n', (771, 816), False, 'from argparse import ArgumentParser, RawDescriptionHelpFormatter\n'), ((3315, 3336), 'os.path.abspath', 'os.path.abspath', (['path']... |
# coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
import Transform as Transform
import DiffDriveRobot
class Wheel(object):
"""docstring for Wheel."""
def __init__(self):
super(Wheel, self).__init__()
self.speed = 0
def setSpeed(self, speed):
self.speed = speed
... | [
"numpy.arctan2",
"matplotlib.pyplot.plot",
"numpy.transpose",
"Transform.rotate",
"numpy.sin",
"numpy.array",
"numpy.cos",
"numpy.sqrt"
] | [((587, 680), 'numpy.array', 'np.array', (['[[-150, -150], [-150, 150], [150, 150], [150, -150], [-150, -150]]'], {'dtype': 'float'}), '([[-150, -150], [-150, 150], [150, 150], [150, -150], [-150, -150]],\n dtype=float)\n', (595, 680), True, 'import numpy as np\n'), ((3291, 3321), 'matplotlib.pyplot.plot', 'plt.plot... |
"""
Created on 7/17/16 10:08 AM
@author: <NAME>, <NAME>
"""
from __future__ import division, print_function, absolute_import
import numpy as np
import psutil
import joblib
import time as tm
import h5py
import itertools
from numbers import Number
from multiprocessing import cpu_count
try:
from mpi4py import MPI
... | [
"numpy.floor",
"numpy.random.randint",
"numpy.mean",
"pyUSID.io.io_utils.recommend_cpu_cores",
"mpi4py.MPI.COMM_WORLD.barrier",
"numpy.unique",
"psutil.cpu_count",
"multiprocessing.cpu_count",
"mpi4py.MPI.Get_processor_name",
"mpi4py.MPI.COMM_WORLD.Get_size",
"pyUSID.io.io_utils.get_available_me... | [((4798, 4822), 'mpi4py.MPI.Get_processor_name', 'MPI.Get_processor_name', ([], {}), '()\n', (4820, 4822), False, 'from mpi4py import MPI\n'), ((5083, 5100), 'numpy.array', 'np.array', (['recvbuf'], {}), '(recvbuf)\n', (5091, 5100), True, 'import numpy as np\n'), ((5122, 5140), 'numpy.unique', 'np.unique', (['recvbuf']... |
import numpy as np
class TicTacToeGame:
def __init__(self, size):
self.m_SizeSize = size;
self.m_Grid = np.zeros((size, size), np.int8)
self.m_Grid.fill(-1)
self.m_CurentPlayer = 0
def Move(self, player, row, col):
if self.IsMoveAllowed(player, row, col) =... | [
"numpy.zeros"
] | [((134, 165), 'numpy.zeros', 'np.zeros', (['(size, size)', 'np.int8'], {}), '((size, size), np.int8)\n', (142, 165), True, 'import numpy as np\n')] |
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import unittest
from unittest import TestCase
import pkgutil
import io
import numpy as np
import pandas as pd
from kats.consts import... | [
"unittest.main",
"pkgutil.get_data",
"io.BytesIO",
"pandas.DataFrame",
"numpy.sum",
"kats.models.harmonic_regression.HarmonicRegressionModel",
"os.getcwd",
"kats.models.harmonic_regression.HarmonicRegressionParams",
"pandas.Series",
"kats.models.harmonic_regression.HarmonicRegressionModel.fourier_... | [((606, 646), 'pkgutil.get_data', 'pkgutil.get_data', (['ROOT', '(path + file_name)'], {}), '(ROOT, path + file_name)\n', (622, 646), False, 'import pkgutil\n'), ((1861, 1876), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1874, 1876), False, 'import unittest\n'), ((670, 693), 'io.BytesIO', 'io.BytesIO', (['data... |
import argparse
import torch
import numpy as np
import os
import data
from networks import domain_generator, domain_classifier
from utils import util
def optimize(opt):
dataset_name = 'cifar10'
generator_name = 'stylegan2-cc' # class conditional stylegan
transform = data.get_transform(dataset_name, 'imv... | [
"numpy.save",
"argparse.ArgumentParser",
"data.get_dataset",
"os.makedirs",
"networks.domain_classifier.define_classifier",
"data.get_transform",
"os.path.isfile",
"networks.domain_generator.define_generator",
"torch.no_grad",
"os.path.join",
"utils.util.set_requires_grad"
] | [((283, 324), 'data.get_transform', 'data.get_transform', (['dataset_name', '"""imval"""'], {}), "(dataset_name, 'imval')\n", (301, 324), False, 'import data\n'), ((337, 422), 'data.get_dataset', 'data.get_dataset', (['dataset_name', 'opt.partition'], {'load_w': '(False)', 'transform': 'transform'}), '(dataset_name, op... |
'''Provide fundamental geometry calculations used by the scheduling.
'''
import math
import numpy as np
import brahe.data_models as bdm
from brahe.utils import fcross
from brahe.constants import RAD2DEG
from brahe.coordinates import sECEFtoENZ, sENZtoAZEL, sECEFtoGEOD, sGEODtoECEF
from brahe.relative_coordinates impo... | [
"brahe.coordinates.sENZtoAZEL",
"brahe.relative_coordinates.rCARTtoRTN",
"numpy.asarray",
"brahe.coordinates.sECEFtoGEOD",
"brahe.coordinates.sGEODtoECEF",
"brahe.coordinates.sECEFtoENZ",
"brahe.utils.fcross",
"numpy.array",
"numpy.linalg.norm",
"numpy.sign",
"numpy.dot"
] | [((923, 943), 'numpy.asarray', 'np.asarray', (['sat_ecef'], {}), '(sat_ecef)\n', (933, 943), True, 'import numpy as np\n'), ((959, 979), 'numpy.asarray', 'np.asarray', (['loc_ecef'], {}), '(loc_ecef)\n', (969, 979), True, 'import numpy as np\n'), ((1038, 1101), 'brahe.coordinates.sECEFtoENZ', 'sECEFtoENZ', (['loc_ecef[... |
import numpy as np
class LidarTools(object):
'''
Collection of helpers for processing LiDAR point cloud.
'''
def get_bev(self, points, resolution=0.1, pixel_values=None, generate_img=None):
'''
Returns bird's eye view of a LiDAR point cloud for a given resolution.
Optional pixe... | [
"numpy.full_like",
"numpy.arctan2",
"numpy.logical_and",
"numpy.floor",
"numpy.zeros",
"numpy.argwhere",
"numpy.sqrt"
] | [((2055, 2076), 'numpy.full_like', 'np.full_like', (['x', '(True)'], {}), '(x, True)\n', (2067, 2076), True, 'import numpy as np\n'), ((1428, 1477), 'numpy.zeros', 'np.zeros', (['[img_height, img_width]'], {'dtype': 'np.uint8'}), '([img_height, img_width], dtype=np.uint8)\n', (1436, 1477), True, 'import numpy as np\n')... |
import argparse
import os
import os.path as osp
import pickle
import shutil
import tempfile
import mmcv
import torch
import torch.distributed as dist
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, load_checkpoint
from mmdet.apis import init_dist... | [
"mmcv.runner.get_dist_info",
"argparse.ArgumentParser",
"mmcv.mkdir_or_exist",
"torch.full",
"torch.distributed.all_gather",
"mmcv.Config.fromfile",
"shutil.rmtree",
"torch.no_grad",
"os.path.join",
"mmcv.imread",
"numpy.full",
"mmdet.models.build_detector",
"cv2.imwrite",
"os.path.exists"... | [((781, 803), 'mmcv.image.imread', 'mmcv.image.imread', (['img'], {}), '(img)\n', (798, 803), False, 'import mmcv\n'), ((4896, 4911), 'mmcv.runner.get_dist_info', 'get_dist_info', ([], {}), '()\n', (4909, 4911), False, 'from mmcv.runner import get_dist_info, load_checkpoint\n'), ((5631, 5646), 'mmcv.runner.get_dist_inf... |
import numpy as np
import gym
import torch
import random
from argparse import ArgumentParser
import os
import pandas as pd
import matplotlib.pyplot as plt
plt.style.use('ggplot')
from scipy.ndimage.filters import gaussian_filter1d
class Stats():
def __init__(self, num_episodes=20000, num_states = 6, log_dir... | [
"scipy.ndimage.filters.gaussian_filter1d",
"argparse.ArgumentParser",
"random.sample",
"matplotlib.pyplot.style.use",
"numpy.mean",
"numpy.exp",
"os.path.join",
"collections.deque",
"pandas.DataFrame",
"numpy.std",
"matplotlib.pyplot.cla",
"numpy.linspace",
"pandas.Series",
"matplotlib.pyp... | [((163, 186), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (176, 186), True, 'import matplotlib.pyplot as plt\n'), ((1243, 1284), 'numpy.mean', 'np.mean', (['overall_stats_q_learning'], {'axis': '(0)'}), '(overall_stats_q_learning, axis=0)\n', (1250, 1284), True, 'import numpy... |
import os
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import math
import argparse
import math
import h5py
import numpy as np
import tensorflow as tf
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
# tf.logging.set_verbosity(tf.logging.ERROR)
import socket
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
RO... | [
"os.mkdir",
"numpy.sum",
"argparse.ArgumentParser",
"numpy.argmax",
"tensorflow.maximum",
"tensorflow.ConfigProto",
"tensorflow.Variable",
"sys.stdout.flush",
"os.path.join",
"provider.loadDataFile",
"sys.path.append",
"os.path.abspath",
"os.path.dirname",
"tensorflow.to_int64",
"os.path... | [((329, 354), 'os.path.dirname', 'os.path.dirname', (['BASE_DIR'], {}), '(BASE_DIR)\n', (344, 354), False, 'import os\n'), ((355, 380), 'sys.path.append', 'sys.path.append', (['BASE_DIR'], {}), '(BASE_DIR)\n', (370, 380), False, 'import sys\n'), ((381, 406), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(R... |
# -*- coding: utf-8 -*-
"""
201901, Dr. <NAME>, Beijing & Xinglong, NAOC
202101-? Dr. <NAME> & Dr./Prof. <NAME>
Light_Curve_Pipeline
v3 (2021A) Upgrade from former version, remove unused code
"""
import numpy as np
import matplotlib
#matplotlib.use('Agg')
from matplotlib import pyplot as plt
from .JZ_... | [
"numpy.isscalar",
"matplotlib.pyplot.close",
"matplotlib.pyplot.figure",
"numpy.where"
] | [((655, 697), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(nx / 50.0, ny / 50.0)'}), '(figsize=(nx / 50.0, ny / 50.0))\n', (665, 697), True, 'from matplotlib import pyplot as plt\n'), ((959, 978), 'numpy.where', 'np.where', (['(err < 0.1)'], {}), '(err < 0.1)\n', (967, 978), True, 'import numpy as np\n'... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 15 15:16:06 2018
@author: Arpit
"""
import numpy as np
import matplotlib.pyplot as plt
import threading
from settings import charts_folder
class GraphPlot:
lock = threading.Lock()
def __init__(self, name, xCnt=1, yCnt=1, labels=None):
... | [
"matplotlib.pyplot.plot",
"numpy.empty",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"threading.Lock",
"matplotlib.pyplot.figure"
] | [((239, 255), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (253, 255), False, 'import threading\n'), ((471, 502), 'numpy.empty', 'np.empty', (['(yCnt,)'], {'dtype': 'object'}), '((yCnt,), dtype=object)\n', (479, 502), True, 'import numpy as np\n'), ((781, 793), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}... |
import os
import random
import numpy as np
from scipy.spatial.distance import cdist
import cv2
import time
import torch
import torch.distributed as dist
import torch.nn as nn
import torch.nn.functional as F
# import torch.multiprocessing as mp
from torch.utils.data import DataLoader
from torch.optim import Adam, SGD
... | [
"package.loss.regularization._Regularization",
"numpy.stack",
"numpy.multiply",
"numpy.copy",
"torch.utils.data.DataLoader",
"torch.load",
"time.time",
"numpy.mean",
"package.loss.cmt_loss._CMT_loss",
"package.args.cmt_args.parse_config",
"torch.cuda.empty_cache",
"torch.nn.kneighbors",
"num... | [((1129, 1145), 'numpy.mean', 'np.mean', (['matches'], {}), '(matches)\n', (1136, 1145), True, 'import numpy as np\n'), ((1185, 1202), 'numpy.copy', 'np.copy', (['inputArr'], {}), '(inputArr)\n', (1192, 1202), True, 'import numpy as np\n'), ((1365, 1391), 'numpy.multiply', 'np.multiply', (['dup', 'inputArr'], {}), '(du... |
#!/usr/bin/env python
# coding: utf-8
# This software component is licensed by ST under BSD 3-Clause license,
# the "License"; You may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
# https://opensource.org/licenses/BSD-3-Clause
... | [
"numpy.load",
"tensorflow.lite.TFLiteConverter.from_keras_model_file"
] | [((723, 773), 'numpy.load', 'np.load', (['"""Asc_quant_representative_data_dummy.npz"""'], {}), "('Asc_quant_representative_data_dummy.npz')\n", (730, 773), True, 'import numpy as np\n'), ((1075, 1153), 'tensorflow.lite.TFLiteConverter.from_keras_model_file', 'tf.lite.TFLiteConverter.from_keras_model_file', (['"""Sessi... |
import numpy as np
from .utils import Timer
def run(size='large', repeats=3 ):
sizes = {'huge': 28000, 'large': 15000, 'small': 6000, 'tiny': 2000, 'test': 2}
n = sizes[size]
A = np.array(np.random.rand(n,n))
A = A@A.T
num_runs = repeats
print('num_runs =', num_runs)
results = []
... | [
"numpy.random.rand",
"numpy.linalg.cholesky"
] | [((208, 228), 'numpy.random.rand', 'np.random.rand', (['n', 'n'], {}), '(n, n)\n', (222, 228), True, 'import numpy as np\n'), ((417, 438), 'numpy.linalg.cholesky', 'np.linalg.cholesky', (['A'], {}), '(A)\n', (435, 438), True, 'import numpy as np\n')] |
'''Module to load and use GloVe Models.
Code Inspiration from:
https://www.kaggle.com/jhoward/improved-lstm-baseline-glove-dropout
'''
import os
import numpy as np
import pandas as pd
import urllib.request
from zipfile import ZipFile
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.cluster import... | [
"numpy.pad",
"pandas.DataFrame",
"sklearn.cluster.KMeans",
"numpy.asarray",
"os.path.realpath",
"numpy.array",
"pandas.Series",
"numpy.random.normal"
] | [((354, 380), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (370, 380), False, 'import os\n'), ((4413, 4485), 'numpy.random.normal', 'np.random.normal', (['self.emb_mean', 'self.emb_std', '(nb_words, self.emb_size)'], {}), '(self.emb_mean, self.emb_std, (nb_words, self.emb_size))\n', (4429... |
"""
This file contains a function to generate a single synthetic tree, prepared for
multiprocessing.
"""
import pandas as pd
import numpy as np
# import dill as pickle
# import gzip
from syn_net.data_generation.make_dataset import synthetic_tree_generator
from syn_net.utils.data_utils import ReactionSet
path_reactio... | [
"syn_net.utils.data_utils.ReactionSet",
"pandas.read_csv",
"numpy.random.seed",
"syn_net.data_generation.make_dataset.synthetic_tree_generator"
] | [((584, 597), 'syn_net.utils.data_utils.ReactionSet', 'ReactionSet', ([], {}), '()\n', (595, 597), False, 'from syn_net.utils.data_utils import ReactionSet\n'), ((807, 824), 'numpy.random.seed', 'np.random.seed', (['_'], {}), '(_)\n', (821, 824), True, 'import numpy as np\n'), ((844, 904), 'syn_net.data_generation.make... |
import logging
import numpy as np
import tensorflow as tf
from collections import OrderedDict
import utils
from clf_model_multitask import predict
def get_latest_checkpoint_and_log(logdir, filename):
init_checkpoint_path = utils.get_latest_model_checkpoint_path(logdir, filename)
logging.info('Checkpoint pat... | [
"tensorflow.random_uniform",
"tensorflow.train.Saver",
"tensorflow.gather",
"tensorflow.global_variables_initializer",
"numpy.asarray",
"logging.info",
"utils.get_latest_model_checkpoint_path",
"tensorflow.placeholder",
"numpy.array",
"clf_model_multitask.predict",
"tensorflow.Graph",
"collect... | [((231, 287), 'utils.get_latest_model_checkpoint_path', 'utils.get_latest_model_checkpoint_path', (['logdir', 'filename'], {}), '(logdir, filename)\n', (269, 287), False, 'import utils\n'), ((292, 350), 'logging.info', 'logging.info', (["('Checkpoint path: %s' % init_checkpoint_path)"], {}), "('Checkpoint path: %s' % i... |
from simulations import simulation, simulation2
from pandas import DataFrame
from pandas import Series
from pandas import concat
from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, Bidirectional
from keras.laye... | [
"pandas.DataFrame",
"matplotlib.pyplot.show",
"math.sqrt",
"matplotlib.pyplot.plot",
"pandas.concat",
"keras.models.Sequential",
"matplotlib.pyplot.legend",
"sklearn.preprocessing.MinMaxScaler",
"keras.layers.LSTM",
"simulations.simulation.Simulation",
"keras.layers.Dense",
"numpy.array",
"p... | [((2515, 2540), 'simulations.simulation2.Simulator', 'simulation2.Simulator', (['(50)'], {}), '(50)\n', (2536, 2540), False, 'from simulations import simulation, simulation2\n'), ((2561, 2690), 'simulations.simulation.Simulation', 'simulation.Simulation', (['[[1, 1]]', '[[0.1, [0.2, 0.1], [15, 2], [30, 2]]]', '[[70.0, ... |
# MIT License
#
# Copyright (c) 2017 <NAME> and (c) 2020 Google LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to u... | [
"logging.Formatter",
"torch.set_num_threads",
"numpy.mean",
"third_party.a2c_ppo_acktr.algo.PPO",
"torch.device",
"third_party.a2c_ppo_acktr.algo.A2C_ACKTR",
"third_party.a2c_ppo_acktr.storage.RolloutStorage",
"torch.no_grad",
"os.path.join",
"third_party.a2c_ppo_acktr.utils.get_vec_normalize",
... | [((1657, 1687), 'sys.path.append', 'sys.path.append', (['"""third_party"""'], {}), "('third_party')\n", (1672, 1687), False, 'import sys\n'), ((1725, 1735), 'third_party.a2c_ppo_acktr.arguments.get_args', 'get_args', ([], {}), '()\n', (1733, 1735), False, 'from third_party.a2c_ppo_acktr.arguments import get_args\n'), (... |
import os
import logging
logging.basicConfig(level=logging.INFO)
import numpy as np
import matplotlib.pyplot as plt
from stompy.grid import paver
from stompy.spatial.linestring_utils import upsample_linearring,resample_linearring
from stompy.grid import paver
from stompy.spatial import field,constrained_delaunay,wkb2... | [
"stompy.spatial.field.PyApolloniusField",
"stompy.spatial.linestring_utils.upsample_linearring",
"logging.basicConfig",
"stompy.grid.paver.Paving",
"os.path.dirname",
"stompy.spatial.field.ConstantField",
"matplotlib.pyplot.axis",
"stompy.spatial.field.XYZField",
"matplotlib.pyplot.figure",
"numpy... | [((25, 64), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (44, 64), False, 'import logging\n'), ((541, 595), 'numpy.array', 'np.array', (['[[0, 0], [1000, 0], [1000, 1000], [0, 1000]]'], {}), '([[0, 0], [1000, 0], [1000, 1000], [0, 1000]])\n', (549, 595), True,... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: <NAME>
from collections import defaultdict
import os
import re
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import numpy as np
from pandas import DataFrame
import scipy.stats
import seaborn as sns
import lda_metrics
N_PROPS_LIST... | [
"pandas.DataFrame",
"seaborn.heatmap",
"seaborn.factorplot",
"re.match",
"collections.defaultdict",
"matplotlib.pyplot.figure",
"matplotlib.use",
"numpy.mean",
"seaborn.set",
"seaborn.FacetGrid",
"matplotlib.pyplot.savefig"
] | [((138, 159), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (152, 159), False, 'import matplotlib\n'), ((426, 470), 'seaborn.set', 'sns.set', ([], {'style': '"""whitegrid"""', 'context': '"""poster"""'}), "(style='whitegrid', context='poster')\n", (433, 470), True, 'import seaborn as sns\n'), ((... |
from flask import Flask
from flask import request, jsonify
import numpy as np
import torch
from flask_cors import CORS, cross_origin
import socket
import argparse
import random
import json
import re
from tokenize_code import tokenize_code
from serverHelpers import notebook_to_frontend
from gensim.models.doc2vec impo... | [
"argparse.ArgumentParser",
"json.loads",
"flask_cors.CORS",
"flask.Flask",
"RetrievalDB_doc2vec.inferenceRNN_doc2vec",
"socket.gethostbyname",
"RetrievalDB_CodeBERT.RetrievalDB_CodeBERT",
"socket.gethostname",
"numpy.random.randint",
"flask.jsonify",
"RetrievalDB_CodeBERT.inferenceRNN_CodeBERT",... | [((835, 880), 'RetrievalDB_CodeBERT.RetrievalDB_CodeBERT', 'RetrievalDB_CodeBERT', (['PATH_TO_CODEBERT_MODELS'], {}), '(PATH_TO_CODEBERT_MODELS)\n', (855, 880), False, 'from RetrievalDB_CodeBERT import RetrievalDB_CodeBERT, inferenceRNN_CodeBERT\n'), ((887, 902), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\... |
import pysmurf
#S = pysmurf.SmurfControl(make_logfile=False,setup=False,epics_root='test_epics',cfg_file='/usr/local/controls/Applications/smurf/pysmurf/pysmurf/cfg_files/experiment_fp28_smurfsrv04.cfg')
import numpy as np
import time
Vrange=np.linspace(0,0.195/6.,100)+S.get_tes_bias_bipolar(3)
Vrange=[Vrange,Vrang... | [
"numpy.array",
"numpy.linspace",
"time.sleep"
] | [((246, 278), 'numpy.linspace', 'np.linspace', (['(0)', '(0.195 / 6.0)', '(100)'], {}), '(0, 0.195 / 6.0, 100)\n', (257, 278), True, 'import numpy as np\n'), ((336, 352), 'numpy.array', 'np.array', (['Vrange'], {}), '(Vrange)\n', (344, 352), True, 'import numpy as np\n'), ((447, 464), 'time.sleep', 'time.sleep', (['(0.... |
# Reference Book: Python Data Science Handbook (page:(70-77))
# Date(13 April, 2019) Day-3, Time = 3:25 PM
# This section covers the use of Boolean masks to examine and manipulate values
# within NumPy arrays.
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn; seaborn.set() #set... | [
"numpy.count_nonzero",
"numpy.sum",
"numpy.median",
"pandas.read_csv",
"numpy.random.RandomState",
"numpy.any",
"numpy.max",
"numpy.array",
"numpy.arange",
"seaborn.set",
"numpy.all"
] | [((302, 315), 'seaborn.set', 'seaborn.set', ([], {}), '()\n', (313, 315), False, 'import seaborn\n'), ((1941, 1966), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5]'], {}), '([1, 2, 3, 4, 5])\n', (1949, 1966), True, 'import numpy as np\n'), ((2867, 2891), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {})... |
from numpy.testing._private.utils import assert_allclose
from sysidentpy.polynomial_basis import PolynomialNarmax
from sysidentpy.utils.generate_data import get_miso_data, get_siso_data
import numpy as np
from numpy.testing import assert_almost_equal, assert_array_equal
from numpy.testing import assert_raises
from sysi... | [
"numpy.testing.assert_raises",
"numpy.testing.assert_almost_equal",
"sysidentpy.polynomial_basis.SimulatePolynomialNarmax",
"numpy.array",
"sysidentpy.utils.generate_data.get_siso_data"
] | [((428, 454), 'sysidentpy.polynomial_basis.SimulatePolynomialNarmax', 'SimulatePolynomialNarmax', ([], {}), '()\n', (452, 454), False, 'from sysidentpy.polynomial_basis import SimulatePolynomialNarmax\n'), ((467, 513), 'numpy.array', 'np.array', (['[[1001, 0], [2001, 1001], [2002, 0]]'], {}), '([[1001, 0], [2001, 1001]... |
"""
Unit and regression test for the tau_screened_coulomb method.
"""
from ThermoElectric import tau_screened_coulomb
import numpy as np
from pytest import approx
def test_tau_screened_coulomb():
energy = np.array([[0.1]])
e_eff_mass = np.array([[0.23 * 9.109e-31]])
dielectric = 11.7
imp = np.array(... | [
"pytest.approx",
"numpy.array",
"ThermoElectric.tau_screened_coulomb"
] | [((213, 230), 'numpy.array', 'np.array', (['[[0.1]]'], {}), '([[0.1]])\n', (221, 230), True, 'import numpy as np\n'), ((248, 278), 'numpy.array', 'np.array', (['[[0.23 * 9.109e-31]]'], {}), '([[0.23 * 9.109e-31]])\n', (256, 278), True, 'import numpy as np\n'), ((311, 330), 'numpy.array', 'np.array', (['[[1e+23]]'], {})... |
# - * - coding: utf-8 - * -
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.patches
def ecg_fixpeaks(rpeaks, sampling_rate=1000, iterative=True, show=False):
"""Correct R-peaks location based on their interval (RRi).
Identify erroneous inter-beat-intervals. Lipponen &... | [
"numpy.pad",
"pandas.DataFrame",
"numpy.abs",
"numpy.concatenate",
"numpy.logical_and",
"numpy.ravel",
"numpy.zeros",
"numpy.insert",
"numpy.any",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.arange",
"numpy.array",
"numpy.max",
"numpy.min",
"numpy.delete",
"numpy.all",
"numpy.... | [((3895, 3911), 'numpy.ravel', 'np.ravel', (['rpeaks'], {}), '(rpeaks)\n', (3903, 3911), True, 'import numpy as np\n'), ((4246, 4261), 'numpy.mean', 'np.mean', (['rr[1:]'], {}), '(rr[1:])\n', (4253, 4261), True, 'import numpy as np\n'), ((4513, 4539), 'numpy.ediff1d', 'np.ediff1d', (['rr'], {'to_begin': '(0)'}), '(rr, ... |
import numpy as np
import matplotlib.pyplot as plt
import sys
import math
import random
import operator
def euclidean(x, x_p):
return ((x[0] - x_p[0]) ** 2 + (x[1] - x_p[1]) ** 2) ** 0.5
def greatest_euclidean(data, centers):
maxi = {}
for x in centers:
for x_p in data:
euc = euclidean... | [
"numpy.random.uniform",
"matplotlib.pyplot.show",
"random.randint",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"operator.itemgetter"
] | [((862, 886), 'random.randint', 'random.randint', (['(0)', '(N - 1)'], {}), '(0, N - 1)\n', (876, 886), False, 'import random\n'), ((2213, 2246), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x1"""'], {'color': '"""#1C2833"""'}), "('x1', color='#1C2833')\n", (2223, 2246), True, 'import matplotlib.pyplot as plt\n'), (... |
# func.py
import numpy as np
from numba import njit, jit, prange
#------------------------ Distance Functions -----------------------#
def corr_dist(A):
return 1 - np.corrcoef(A)
def abs_diff(A):
target_matrix = np.zeros((len(A), len(A)))
mat_dim = target_matrix.shape[0]
for r in range(mat_dim):
for c in ra... | [
"numpy.subtract",
"numpy.log",
"numpy.corrcoef",
"numba.njit",
"numpy.zeros",
"numpy.sqrt"
] | [((2052, 2071), 'numba.njit', 'njit', ([], {'parallel': '(True)'}), '(parallel=True)\n', (2056, 2071), False, 'from numba import njit, jit, prange\n'), ((1644, 1666), 'numpy.sqrt', 'np.sqrt', (['weighted_dist'], {}), '(weighted_dist)\n', (1651, 1666), True, 'import numpy as np\n'), ((1776, 1793), 'numpy.subtract', 'np.... |
import warnings
import biorbd_casadi as biorbd
import numpy as np
from scipy import interpolate
from bioptim import (
OdeSolver,
Node,
OptimalControlProgram,
ConstraintFcn,
DynamicsFcn,
ObjectiveFcn,
QAndQDotBounds,
QAndQDotAndQDDotBounds,
ConstraintList,
ObjectiveList,
Dyna... | [
"bioptim.BoundsList",
"bioptim.OdeSolver.COLLOCATION",
"bioptim.ObjectiveList",
"bioptim.PhaseTransitionList",
"bioptim.InitialGuessList",
"biorbd_casadi.Model",
"numpy.zeros",
"bioptim.QAndQDotBounds",
"numpy.array",
"bioptim.OptimalControlProgram",
"numpy.linspace",
"bioptim.ConstraintList",... | [((864, 887), 'bioptim.OdeSolver.COLLOCATION', 'OdeSolver.COLLOCATION', ([], {}), '()\n', (885, 887), False, 'from bioptim import OdeSolver, Node, OptimalControlProgram, ConstraintFcn, DynamicsFcn, ObjectiveFcn, QAndQDotBounds, QAndQDotAndQDDotBounds, ConstraintList, ObjectiveList, DynamicsList, Bounds, BoundsList, Ini... |
import os
from pathlib import Path
import numpy as np
import pandas as pd
import spacy
from spacy.compat import pickle
import lz4.frame
from tqdm import tqdm
from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard, EarlyStopping
from ehr_classification.tokenizer import get_features, get_custom_tokenizer
fro... | [
"ehr_classification.classifier_model.compile_lstm",
"tensorflow.keras.callbacks.TensorBoard",
"plac.call",
"tensorflow.keras.callbacks.ModelCheckpoint",
"ehr_classification.tokenizer.get_custom_tokenizer",
"pathlib.Path",
"tensorflow.keras.callbacks.EarlyStopping",
"pandas.read_parquet",
"numpy.arra... | [((1076, 1110), 'ehr_classification.tokenizer.get_custom_tokenizer', 'get_custom_tokenizer', (['word_vectors'], {}), '(word_vectors)\n', (1096, 1110), False, 'from ehr_classification.tokenizer import get_features, get_custom_tokenizer\n'), ((1163, 1288), 'ehr_classification.classifier_model.compile_lstm', 'compile_lstm... |
import math
import warnings
from collections import OrderedDict
from enum import Enum
import efel
import matplotlib.pyplot as plt
import numpy as np
from lib.Model import Model
from lib.NrnModel import NrnModel
class Level(Enum):
HIGH = 0.5
MID = 5.0
LOW = 10.0
VLOW = 50.0
EFEL_NAME_MAP = {
... | [
"matplotlib.pyplot.show",
"warnings.filterwarnings",
"efel.setDoubleSetting",
"efel.setIntSetting",
"efel.getFeatureValues",
"numpy.mean",
"math.isclose",
"numpy.random.rand",
"matplotlib.pyplot.subplots"
] | [((5889, 5903), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (5901, 5903), True, 'import matplotlib.pyplot as plt\n'), ((6657, 6667), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6665, 6667), True, 'import matplotlib.pyplot as plt\n'), ((2056, 2121), 'efel.setDoubleSetting', 'efel.setDoub... |
#!/usr/bin/env python3.7
import unittest
import numpy
import os
import librosa
import soundfile
import sys
from tempfile import TemporaryDirectory
def main():
dest = "tests/test_1_note_Csharp3.wav"
tone = librosa.tone(138.59, sr=22050, length=44100)
soundfile.write(dest, tone, 22050)
print("Created {... | [
"librosa.tone",
"numpy.zeros",
"soundfile.write"
] | [((216, 260), 'librosa.tone', 'librosa.tone', (['(138.59)'], {'sr': '(22050)', 'length': '(44100)'}), '(138.59, sr=22050, length=44100)\n', (228, 260), False, 'import librosa\n'), ((265, 299), 'soundfile.write', 'soundfile.write', (['dest', 'tone', '(22050)'], {}), '(dest, tone, 22050)\n', (280, 299), False, 'import so... |
import numpy as np
import itertools
from scintillations.stream import modulate as apply_turbulence
from scintillations.stream import transverse_speed
from streaming.stream import Stream, BlockStream
from streaming.signal import *
import streaming.signal
import logging
from acoustics.signal import impulse_response_real... | [
"numpy.log2",
"acoustics.signal.impulse_response_real_even"
] | [((2442, 2478), 'acoustics.signal.impulse_response_real_even', 'impulse_response_real_even', (['s', 'ntaps'], {}), '(s, ntaps)\n', (2468, 2478), False, 'from acoustics.signal import impulse_response_real_even\n'), ((4702, 4712), 'numpy.log2', 'np.log2', (['x'], {}), '(x)\n', (4709, 4712), True, 'import numpy as np\n')] |
#!/usr/bin/env python
#Copyright (c) 2014, <NAME> <<EMAIL>>
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#
#* Redistributions of source code must retain the above copyright notice, this
# list of... | [
"os.mkdir",
"subprocess.Popen",
"os.remove",
"optparse.OptionParser",
"converter.Img_conv",
"numpy.ones",
"os.kill",
"threading.Event",
"glob.glob",
"cola.ComponentLabeling",
"threading.Semaphore",
"re.compile"
] | [((21936, 21950), 'optparse.OptionParser', 'OptionParser', ([], {}), '()\n', (21948, 21950), False, 'from optparse import OptionParser\n'), ((2045, 2080), 'glob.glob', 'glob.glob', (["(ccl_c_dir + '/img/*.pbm')"], {}), "(ccl_c_dir + '/img/*.pbm')\n", (2054, 2080), False, 'import glob\n'), ((2130, 2152), 're.compile', '... |
"""
Simple audio clustering
1. Get the embeddings - at an interval of 0.5s each
2. Get the VAD - variable interval
3. Get embeddings for a VAD interval -> Take average of the embeddings
4. Get the ground truth for embedding for each speaker - marked 0.5s interval
5. L2 Normalize the embeddings before taking a distance ... | [
"pandas.DataFrame",
"yaml.load",
"json.load",
"argparse.ArgumentParser",
"os.makedirs",
"yaml.dump",
"os.path.exists",
"isat_diarization.gen_embeddings",
"utils.print_list",
"pickle.load",
"numpy.linalg.norm",
"numpy.dot",
"os.path.join"
] | [((1913, 1946), 'numpy.linalg.norm', 'np.linalg.norm', (['embeddings'], {'ord': '(2)'}), '(embeddings, ord=2)\n', (1927, 1946), True, 'import numpy as np\n'), ((6918, 6932), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (6930, 6932), True, 'import pandas as pd\n'), ((7031, 7078), 'os.path.join', 'os.path.join',... |
import numpy as np
from distributions.distribution import Distribution
class NonParametric(Distribution):
"""
Provides functions for a non-parametric forecast distribution.
"""
@staticmethod
def pdf(x, pdf_x, x_eval):
pass
@staticmethod
def cdf(x, cdf_x, x_eval):
"""
... | [
"numpy.trapz",
"numpy.maximum",
"numpy.searchsorted",
"numpy.array",
"numpy.arange"
] | [((522, 548), 'numpy.searchsorted', 'np.searchsorted', (['x_eval', 'x'], {}), '(x_eval, x)\n', (537, 548), True, 'import numpy as np\n'), ((619, 654), 'numpy.maximum', 'np.maximum', (['(0)', '(insertion_points - 1)'], {}), '(0, insertion_points - 1)\n', (629, 654), True, 'import numpy as np\n'), ((1061, 1135), 'numpy.t... |
import numpy as np
from math import log, sqrt, ceil
import random
import string
from copy import copy
import pyximport
from tabulate import tabulate
pyximport.install()
from ..util import math_functions
import matplotlib.pyplot as plt
import textwrap
from textwrap import dedent
from multiprocessing import Pool
from ... | [
"matplotlib.pyplot.show",
"numpy.sum",
"numpy.vectorize",
"numpy.copy",
"numpy.multiply",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.plot",
"math.sqrt",
"copy.copy",
"random.choice",
"numpy.logical_and",
"textwrap.TextWrapper",
"numpy.array",
"tabulate.tabulate",
"numpy.arange",
"... | [((149, 168), 'pyximport.install', 'pyximport.install', ([], {}), '()\n', (166, 168), False, 'import pyximport\n'), ((10474, 10486), 'copy.copy', 'copy', (['ranges'], {}), '(ranges)\n', (10478, 10486), False, 'from copy import copy\n'), ((10860, 10872), 'copy.copy', 'copy', (['ranges'], {}), '(ranges)\n', (10864, 10872... |
import numpy as np
a = np.array([
[1, 2, 3],
[4, 5, 6]
])
print("print(a)")
print(a)
print()
print("print(a.T)")
print(a.T)
print()
print("print(a.dot(2))")
print(a.dot(2))
print()
print("print(a.dot(np.array([2, 2, 2])))")
print(a.dot(np.array([2, 2, 2])))
print()
| [
"numpy.array"
] | [((24, 56), 'numpy.array', 'np.array', (['[[1, 2, 3], [4, 5, 6]]'], {}), '([[1, 2, 3], [4, 5, 6]])\n', (32, 56), True, 'import numpy as np\n'), ((250, 269), 'numpy.array', 'np.array', (['[2, 2, 2]'], {}), '([2, 2, 2])\n', (258, 269), True, 'import numpy as np\n')] |
import random
import numpy
import torch
from backobs.integration import extend as backobs_extend
from backobs.integration import (
extend_with_access_unreduced_loss as backobs_extend_with_access_unreduced_loss,
)
def set_deepobs_seed(seed=0):
"""Set all seeds used by DeepOBS."""
random.seed(seed)
nu... | [
"numpy.random.seed",
"torch.manual_seed",
"numpy.logical_not",
"numpy.isclose",
"random.seed",
"backobs.integration.extend_with_access_unreduced_loss",
"torch.allclose",
"backobs.integration.extend"
] | [((296, 313), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (307, 313), False, 'import random\n'), ((318, 341), 'numpy.random.seed', 'numpy.random.seed', (['seed'], {}), '(seed)\n', (335, 341), False, 'import numpy\n'), ((346, 369), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (363,... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.