code
stringlengths
31
1.05M
apis
list
extract_api
stringlengths
97
1.91M
#Author: <NAME> #Contact: <EMAIL> #Date: Aug 02, 2020 import numpy as np def cal_emp_cdf(insamples): ''' This is function to calcualte emperical CDF of Dirichlet distributed facies proportion samples. Variables: insamples - input samples of facies proportions, 3D array, [n_seis_feature...
[ "numpy.count_nonzero", "numpy.asarray" ]
[((598, 614), 'numpy.asarray', 'np.asarray', (['cdfs'], {}), '(cdfs)\n', (608, 614), True, 'import numpy as np\n'), ((469, 516), 'numpy.count_nonzero', 'np.count_nonzero', (['(samples[j, 0] > samples[:, 0])'], {}), '(samples[j, 0] > samples[:, 0])\n', (485, 516), True, 'import numpy as np\n')]
import cv2 import numpy as np import matplotlib.pyplot as plt from skimage.filters import gabor import mahotas as mt import pandas as pd from glob import glob from skimage.feature import local_binary_pattern def fun1(img_mask,Label): count = 0 gaborenergy1 = [] gaborentropy1 = [] w1=[] ...
[ "numpy.uint8", "numpy.sqrt", "numpy.array", "mahotas.features.haralick", "numpy.mean", "numpy.histogram", "cv2.threshold", "cv2.arcLength", "cv2.contourArea", "cv2.minAreaRect", "pandas.DataFrame", "glob.glob", "cv2.drawContours", "numpy.ones", "cv2.boxPoints", "cv2.boundingRect", "n...
[((879, 893), 'glob.glob', 'glob', (['img_mask'], {}), '(img_mask)\n', (883, 893), False, 'from glob import glob\n'), ((8435, 8454), 'pandas.DataFrame', 'pd.DataFrame', (['dict1'], {}), '(dict1)\n', (8447, 8454), True, 'import pandas as pd\n'), ((1041, 1055), 'cv2.imread', 'cv2.imread', (['fn'], {}), '(fn)\n', (1051, 1...
from __future__ import annotations from typing import Any import numpy as np AR_i8: np.ndarray[Any, np.dtype[np.int_]] = np.arange(10) ar_iter = np.lib.Arrayterator(AR_i8) ar_iter.var ar_iter.buf_size ar_iter.start ar_iter.stop ar_iter.step ar_iter.shape ar_iter.flat ar_iter.__array__() for i in ar_iter: pass...
[ "numpy.lib.Arrayterator", "numpy.arange" ]
[((124, 137), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (133, 137), True, 'import numpy as np\n'), ((148, 174), 'numpy.lib.Arrayterator', 'np.lib.Arrayterator', (['AR_i8'], {}), '(AR_i8)\n', (167, 174), True, 'import numpy as np\n')]
import numpy as np import eqsig from liquepy.element.models import ShearTest from liquepy.element import assess def test_with_one_cycle_no_dissipation(): strs = np.array([0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0]) tau = np.array([0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0]) ...
[ "liquepy.element.assess.calc_diss_energy_fd", "numpy.isclose", "liquepy.element.assess.get_energy_peaks_for_cyclic_loading", "liquepy.element.models.ShearTest", "numpy.array", "numpy.linspace", "liquepy.element.assess.average_of_absolute_via_trapz", "numpy.cos", "numpy.sin", "liquepy.element.asses...
[((169, 237), 'numpy.array', 'np.array', (['[0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0]'], {}), '([0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0])\n', (177, 237), True, 'import numpy as np\n'), ((248, 316), 'numpy.array', 'np.array', (['[0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0...
import numpy as np import cv2 def nms(bboxs, thresh): # get all parameters x1, y1, x2, y2, scores = [bboxs[:, i] for i in range(len(bboxs[0]))] # calculate all areas of boxed areas = (x2 - x1 + 1) * (y2 - y1 + 1) # sort boxes according to their class score sorted_index = scores.argsort()[::...
[ "cv2.rectangle", "numpy.minimum", "cv2.imshow", "numpy.array", "numpy.zeros", "numpy.maximum", "cv2.waitKey" ]
[((1286, 1316), 'numpy.zeros', 'np.zeros', (['(850, 850)', 'np.uint8'], {}), '((850, 850), np.uint8)\n', (1294, 1316), True, 'import numpy as np\n'), ((1458, 1483), 'cv2.imshow', 'cv2.imshow', (['pic_name', 'pic'], {}), '(pic_name, pic)\n', (1468, 1483), False, 'import cv2\n'), ((1487, 1501), 'cv2.waitKey', 'cv2.waitKe...
# Copyright (c) Facebook, Inc. and its affiliates. import os ''' This forces the environment to use only 1 cpu when running. This could be helpful when launching multiple environment simulatenously. ''' os.environ['OPENBLAS_NUM_THREADS'] = '1' os.environ['MKL_NUM_THREADS'] = '1' # os.environ['CUDA_VISIBLE_DEVICES'] ...
[ "pybullet_data.getDataPath", "fairmotion.ops.math.projectionOnVector", "numpy.array", "copy.deepcopy", "numpy.linalg.norm", "sim_agent.get_root_state", "render_module.gl.glPushAttrib", "render_module.bullet_render.render_contacts", "render_module.initialize", "render_module.gl.glBlendFunc", "fai...
[((14150, 14165), 'render_module.initialize', 'rm.initialize', ([], {}), '()\n', (14163, 14165), True, 'import render_module as rm\n'), ((2242, 2317), 'bullet.bullet_client.BulletClient', 'bullet_client.BulletClient', ([], {'connection_mode': 'pb.DIRECT', 'options': '""" --opengl2"""'}), "(connection_mode=pb.DIRECT, op...
import logging import os import sys sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), "../../../"))) sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), "../../../../FedML"))) try: from fedml_core.distributed.client.client_manager import ClientManager from fedml_core.distributed.communicat...
[ "numpy.zeros", "os.getcwd", "logging.info", "time.time" ]
[((1855, 1887), 'numpy.zeros', 'np.zeros', (['(self.params_count, 1)'], {}), '((self.params_count, 1))\n', (1863, 1887), True, 'import numpy as np\n'), ((5430, 5503), 'logging.info', 'logging.info', (["('#######training########### round_id = %d' % self.round_idx)"], {}), "('#######training########### round_id = %d' % s...
# -*- coding: utf-8 -*- import pandas as pd import re import pickle from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer import numpy as np from dl_architecture import make_charvec, build_model from keras.callbacks import ModelCheckpoint from keras import backend as K from sklearn.preprocessi...
[ "bm25.BM25Transformer", "sklearn.feature_extraction.text.TfidfTransformer", "sklearn.metrics.f1_score", "pickle.dump", "keras.callbacks.ModelCheckpoint", "sklearn.feature_extraction.text.CountVectorizer", "numpy.array", "collections.defaultdict", "keras.backend.clear_session", "dl_architecture.bui...
[((663, 715), 're.sub', 're.sub', (['"""[\\\\w\\\\.-]+@[\\\\w\\\\.-]+"""', 'replace_token', 'text'], {}), "('[\\\\w\\\\.-]+@[\\\\w\\\\.-]+', replace_token, text)\n", (669, 715), False, 'import re\n'), ((856, 890), 're.sub', 're.sub', (['regex', 'replace_token', 'text'], {}), '(regex, replace_token, text)\n', (862, 890)...
from RNNs import QIFExpAddNoiseSyns import numpy as np import pickle import matplotlib.pyplot as plt from scipy.ndimage import gaussian_filter1d # STEP 0: Define simulation condition ##################################### # parse worker indices from script arguments idx_cond = 570 # STEP 1: Load pre-generated RNN par...
[ "numpy.mean", "RNNs.QIFExpAddNoiseSyns", "scipy.ndimage.gaussian_filter1d", "numpy.sqrt", "numpy.round", "matplotlib.pyplot.colorbar", "numpy.max", "numpy.sum", "matplotlib.pyplot.tight_layout", "matplotlib.pyplot.title", "matplotlib.pyplot.subplots", "matplotlib.pyplot.legend", "matplotlib....
[((1264, 1354), 'RNNs.QIFExpAddNoiseSyns', 'QIFExpAddNoiseSyns', (['C', 'eta', 'J'], {'Delta': 'Delta', 'alpha': 'alpha', 'D': 'D', 'tau_s': 'tau_s', 'tau_a': 'tau_a'}), '(C, eta, J, Delta=Delta, alpha=alpha, D=D, tau_s=tau_s,\n tau_a=tau_a)\n', (1282, 1354), False, 'from RNNs import QIFExpAddNoiseSyns\n'), ((1484, ...
import pandas as pd import numpy as np from sklearn.feature_selection import RFE from sklearn.linear_model import LogisticRegression import os import json def load_data(data_path): with open(data_path, "r") as fp: data = json.load(fp) #convert list -> np.array() inputs = np.array(data["features"])...
[ "pandas.crosstab", "sklearn.linear_model.LogisticRegression", "json.load", "numpy.array", "sklearn.feature_selection.RFE", "os.path.abspath" ]
[((294, 320), 'numpy.array', 'np.array', (["data['features']"], {}), "(data['features'])\n", (302, 320), True, 'import numpy as np\n'), ((335, 356), 'numpy.array', 'np.array', (["data['mms']"], {}), "(data['mms'])\n", (343, 356), True, 'import numpy as np\n'), ((470, 487), 'pandas.crosstab', 'pd.crosstab', (['x', 'y'],...
from . import TorchModel, NUM_GESTURES import torch from torch import nn import numpy as np class ConvNet(TorchModel): def define_model(self, dim_in): self.conv = nn.Conv1d(dim_in[0], self.conv_filters, kernel_size=self.conv_kernel_size, stride=self.conv_stride, padding=self...
[ "torch.nn.MaxPool1d", "numpy.product", "torch.nn.ReLU", "torch.nn.Sigmoid", "torch.nn.LeakyReLU", "torch.nn.Softmax", "numpy.floor", "torch.nn.BatchNorm1d", "torch.nn.Linear", "torch.nn.functional.cross_entropy", "torch.nn.Conv1d" ]
[((178, 308), 'torch.nn.Conv1d', 'nn.Conv1d', (['dim_in[0]', 'self.conv_filters'], {'kernel_size': 'self.conv_kernel_size', 'stride': 'self.conv_stride', 'padding': 'self.conv_padding'}), '(dim_in[0], self.conv_filters, kernel_size=self.conv_kernel_size,\n stride=self.conv_stride, padding=self.conv_padding)\n', (187...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created: March 2020 Python without class! @author: <NAME> (RRCC) """ import numpy as np import matplotlib.pyplot as plt import argparse def readFile(fName): """ Returns ------- nDumps : TYPE DESCRIPTION. nPars : T...
[ "argparse.ArgumentParser", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.legend", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "numpy.zeros", "matplotlib.pyplot.title", "matplotlib.pyplot.xlim", "matplotlib.pyplot.ylim", "numpy.arange", "matplotlib.pyplot.show" ]
[((1063, 1088), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars)'], {}), '((nDumps, nPars))\n', (1071, 1088), True, 'import numpy as np\n'), ((1098, 1127), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars - 1)'], {}), '((nDumps, nPars - 1))\n', (1106, 1127), True, 'import numpy as np\n'), ((1135, 1164), 'numpy.zeros', 'np.zer...
from __future__ import print_function import json import logging import sys import time from utils.chronograph import Chronograph import grpc import numpy as np from grpc._channel import _Rendezvous import taranis_pb2 import taranis_pb2_grpc DB_NAME = 'db3' INDEX_NAME = 'basic_index' DIMENSION = 128 # dimension N...
[ "logging.getLogger", "logging.basicConfig", "taranis_pb2_grpc.TaranisStub", "logging.StreamHandler", "utils.chronograph.Chronograph", "grpc.insecure_channel", "numpy.random.randint", "taranis_pb2.SearchRequestModel", "taranis_pb2.VectorsQueryModel" ]
[((541, 560), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (558, 560), False, 'import logging\n'), ((661, 701), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'sys.stdout'}), '(stream=sys.stdout)\n', (682, 701), False, 'import logging\n'), ((5803, 5836), 'logging.basicConfig', 'logging...
# read delta G values from equilibrator_results.tsv infile = open('equilibrator_results.tsv', 'r') import numpy as np def read_dg(infile): dg_list = [] for line in infile: if not line.startswith("'"): # skips line with headers pass else: line = line.strip("\n") line_list = line.split("\t") dg = lin...
[ "numpy.array" ]
[((408, 425), 'numpy.array', 'np.array', (['dg_list'], {}), '(dg_list)\n', (416, 425), True, 'import numpy as np\n')]
import torch import numpy as np import torch.nn as nn import torch.distributed as dist import torch.nn.functional as F from torch import Tensor from typing import Any from typing import Dict from typing import Tuple from typing import Optional from cftool.misc import update_dict from cftool.misc import shallow_copy_d...
[ "torch.nn.GELU", "torch.nn.init.constant_", "torch.nn.Sequential", "numpy.array", "torch.nn.BatchNorm1d", "torch.sum", "torch.nn.init.trunc_normal_", "torch.nn.functional.softmax", "numpy.arange", "numpy.linspace", "torch.cuda.amp.autocast", "numpy.concatenate", "torch.nn.Identity", "torch...
[((1525, 1537), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1533, 1537), True, 'import numpy as np\n'), ((1714, 1767), 'numpy.arange', 'np.arange', (['(epochs * num_step_per_epoch - warmup_iters)'], {}), '(epochs * num_step_per_epoch - warmup_iters)\n', (1723, 1767), True, 'import numpy as np\n'), ((1904, 1947)...
from __future__ import absolute_import # external modules from past.builtins import basestring import numpy as num # ANUGA modules import anuga.utilities.log as log from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a, \ netcdf_float from .asc2dem import asc2dem ...
[ "numpy.where", "numpy.fliplr", "anuga.file.netcdf.NetCDFFile", "os.path.splitext", "numpy.linspace", "anuga.utilities.log.critical" ]
[((1287, 1322), 'anuga.file.netcdf.NetCDFFile', 'NetCDFFile', (['filename', 'netcdf_mode_r'], {}), '(filename, netcdf_mode_r)\n', (1297, 1322), False, 'from anuga.file.netcdf import NetCDFFile\n'), ((2004, 2044), 'numpy.where', 'num.where', (['(Z == NODATA_value)', 'num.nan', 'Z'], {}), '(Z == NODATA_value, num.nan, Z)...
from htm_rl.modules.htm.pattern_memory import PatternMemory from htm.bindings.sdr import SDR import numpy as np from tqdm import tqdm EPS = 1e-12 def get_labels(pm: PatternMemory, data, input_size): labels = dict() input_pattern = SDR(input_size) for i, item in enumerate(data): input_pattern.spa...
[ "numpy.intersect1d", "numpy.union1d", "numpy.random.choice", "htm.bindings.sdr.SDR", "htm_rl.modules.htm.pattern_memory.PatternMemory", "numpy.setdiff1d", "numpy.arange", "numpy.random.shuffle" ]
[((243, 258), 'htm.bindings.sdr.SDR', 'SDR', (['input_size'], {}), '(input_size)\n', (246, 258), False, 'from htm.bindings.sdr import SDR\n'), ((491, 506), 'htm.bindings.sdr.SDR', 'SDR', (['input_size'], {}), '(input_size)\n', (494, 506), False, 'from htm.bindings.sdr import SDR\n'), ((2326, 2349), 'htm_rl.modules.htm....
#!/usr/bin/env python # -*- coding: utf-8 -*- # # ade: # Asynchronous Differential Evolution. # # Copyright (C) 2018-19 by <NAME>, # http://edsuom.com/ade # # See edsuom.com for API documentation as well as information about # Ed's background and other projects, software and otherwise. # # Licensed under the Apache Li...
[ "numpy.abs", "asynqueue.process.ProcessQueue", "twisted.internet.reactor.stop", "numpy.square", "numpy.exp", "yampex.plot.Plotter", "ade.de.DifferentialEvolution", "asynqueue.process.ProcessQueue.cores", "twisted.internet.reactor.run", "ade.population.Population", "twisted.internet.reactor.callW...
[((9904, 9934), 'twisted.internet.reactor.callWhenRunning', 'reactor.callWhenRunning', (['r.run'], {}), '(r.run)\n', (9927, 9934), False, 'from twisted.internet import reactor, defer\n'), ((9939, 9952), 'twisted.internet.reactor.run', 'reactor.run', ([], {}), '()\n', (9950, 9952), False, 'from twisted.internet import r...
"""Calculations involving a pair of Cu atoms """ from typing import Union, Callable import numpy as np from ase import Atoms from ase.units import Ang try: from Morse import MorsePotential from util import map_func except ModuleNotFoundError: from .Morse import MorsePotential from .util import map_fun...
[ "ase.Atoms", "Morse.MorsePotential", "numpy.linspace", "util.map_func" ]
[((574, 590), 'Morse.MorsePotential', 'MorsePotential', ([], {}), '()\n', (588, 590), False, 'from Morse import MorsePotential\n'), ((599, 662), 'ase.Atoms', 'Atoms', (['"""2Cu"""'], {'positions': '[(0.0, 0.0, 0.0), (0.0, 0.0, d0 * Ang)]'}), "('2Cu', positions=[(0.0, 0.0, 0.0), (0.0, 0.0, d0 * Ang)])\n", (604, 662), Fa...
# -*- coding: utf-8 -*- """ Created on Fri May 5 16:20:14 2017 @author: <NAME> Program for WOS Cited References Analysis """ import pandas as pd import matplotlib.pyplot as plt import numpy as np from collections import Counter df = pd.read_pickle('concatenated.pkl') df = df.dropna(subset = ['PY','CR']) # Get rid...
[ "pandas.read_pickle", "matplotlib.pyplot.hist", "numpy.unique", "matplotlib.pyplot.xticks", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "collections.Counter", "numpy.array", "numpy.zeros", "matplotlib.pyplot.figure", "pandas.DataFrame", "numpy.arange" ]
[((238, 272), 'pandas.read_pickle', 'pd.read_pickle', (['"""concatenated.pkl"""'], {}), "('concatenated.pkl')\n", (252, 272), True, 'import pandas as pd\n'), ((413, 424), 'numpy.zeros', 'np.zeros', (['a'], {}), '(a)\n', (421, 424), True, 'import numpy as np\n'), ((1901, 1917), 'collections.Counter', 'Counter', (['journ...
import torch import torch.nn as nn import torch.nn.functional as F from getter import getter from CURE.CURE import CURELearner import numpy as np from matplotlib import pyplot as plt from pathlib import Path def lossplot(config: dict, save_path: str = None) -> None: """Plots the negative of the loss surface. One...
[ "matplotlib.pyplot.savefig", "torch.nn.CrossEntropyLoss", "CURE.CURE.CURELearner", "getter.getter", "pathlib.Path", "numpy.ones", "torch.nn.functional.normalize", "numpy.linspace", "matplotlib.pyplot.figure", "torch.autograd.grad", "matplotlib.pyplot.axes", "matplotlib.pyplot.pause", "torch....
[((500, 547), 'getter.getter', 'getter', (["config['dataset']", "config['model_name']"], {}), "(config['dataset'], config['model_name'])\n", (506, 547), False, 'from getter import getter\n'), ((1978, 1999), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1997, 1999), True, 'import torch.nn as nn\...
#!/usr/bin/python # encoding: utf-8 import torch import albumentations as A import torch.nn as nn from torch.autograd import Variable from torch.utils.data import Dataset import collections from PIL import Image, ImageFilter import matplotlib.pyplot as plt import math import random import numpy as np import cv2 import...
[ "numpy.uint8", "albumentations.MedianBlur", "albumentations.Blur", "torchvision.transforms.Grayscale", "torch.from_numpy", "numpy.array", "torchvision.transforms.ColorJitter", "tensorflow.load_op_library", "tensorflow.placeholder", "tensorflow.Session", "numpy.asarray", "data_gen.generate_rbox...
[((1965, 1998), 'numpy.asarray', 'np.asarray', (['img'], {'dtype': 'np.float32'}), '(img, dtype=np.float32)\n', (1975, 1998), True, 'import numpy as np\n'), ((2094, 2107), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (2102, 2107), True, 'import numpy as np\n'), ((2222, 2242), 'PIL.Image.fromarray', 'Image.froma...
import abc import numpy as np import xarray as xr from .registry import register class Regridder(object): """Generic regridder interface.""" __metaclass__ = abc.ABCMeta def __init__(self, input_grid, output_grid, method=None, **kwargs): self.input_grid = input_grid self.output_gri...
[ "xesmf.Regridder", "pyresample.geometry.SwathDefinition", "pyresample.kd_tree.get_neighbour_info", "numpy.zeros" ]
[((1591, 1667), 'xesmf.Regridder', 'xe.Regridder', (['self.input_grid', 'self.output_grid', 'self.method'], {}), '(self.input_grid, self.output_grid, self.method, **self._params)\n', (1603, 1667), True, 'import xesmf as xe\n'), ((2392, 2487), 'pyresample.geometry.SwathDefinition', 'geometry.SwathDefinition', ([], {'lon...
import os import numpy as np import tensorflow as tf from config import TRAINING_CONFIG from core import GameConfig as Game from core import Board class PolicyValueNetwork: def __init__(self, model_name=None): with tf.variable_scope("Dataset"): input_shape = Board().encoded_states().shape #...
[ "tensorflow.layers.flatten", "tensorflow.transpose", "tensorflow.contrib.layers.l2_regularizer", "numpy.log", "tensorflow.nn.softmax", "tensorflow.log", "os.path.exists", "tensorflow.Session", "tensorflow.placeholder", "tensorflow.layers.conv2d", "tensorflow.trainable_variables", "tensorflow.t...
[((2581, 2593), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2591, 2593), True, 'import tensorflow as tf\n'), ((2615, 2631), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (2629, 2631), True, 'import tensorflow as tf\n'), ((231, 259), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Data...
""" Atmosphere Spectral Response ============================ This class calculates the output flux of an astronomical object as a funtction of the atmosphere spectral response. """ import os import numpy as np import pandas as pd from scipy.interpolate import splev, splrep class Atmosphere_Spectral_Response: "...
[ "numpy.multiply", "numpy.asarray", "os.path.join", "scipy.interpolate.splrep", "scipy.interpolate.splev", "pandas.read_excel" ]
[((392, 477), 'os.path.join', 'os.path.join', (['"""Atmosphere_Spectral_Response"""', '"""atmosphere_spectral_response.xlsx"""'], {}), "('Atmosphere_Spectral_Response',\n 'atmosphere_spectral_response.xlsx')\n", (404, 477), False, 'import os\n'), ((837, 872), 'numpy.asarray', 'np.asarray', (['atm_wavelength_interval...
######################################################################## # # Date:Sept 2009 Authors: <NAME> # # <EMAIL> # # The Scripps Research Institute (TSRI) # Molecular Graphics Lab # La Jolla, CA 92037, USA # # Copyright: <NAME> and TSRI # #####################################################...
[ "numpy.array" ]
[((1112, 1129), 'numpy.array', 'numpy.array', (['mats'], {}), '(mats)\n', (1123, 1129), False, 'import numpy\n')]
from .road_list import * import numpy as np from litdrive.selfdriving.enums import ManeuverState def fitPolysToPoly(list_p1d_u, list_p1d_v, list_hdg, list_x, list_y): pspace=np.linspace(0.0, 1.0, 100) pnts_x=list() pnts_y=list() if(len(list_p1d_v)!=len(list_p1d_u) or len(list_p1d_v)!=len(list_hdg) or ...
[ "numpy.polyfit", "numpy.sin", "numpy.linspace", "numpy.cos", "numpy.concatenate", "numpy.poly1d" ]
[((180, 206), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', '(100)'], {}), '(0.0, 1.0, 100)\n', (191, 206), True, 'import numpy as np\n'), ((848, 870), 'numpy.concatenate', 'np.concatenate', (['pnts_x'], {}), '(pnts_x)\n', (862, 870), True, 'import numpy as np\n'), ((882, 904), 'numpy.concatenate', 'np.concatena...
import numpy as np from wholeslidedata.annotation.structures import Point from wholeslidedata.annotation.wholeslideannotation import WholeSlideAnnotation from wholeslidedata.image.wholeslideimage import WholeSlideImage from wholeslidedata.labels import Label def non_max_suppression_fast(boxes, overlapThresh): """...
[ "wholeslidedata.annotation.wholeslideannotation.WholeSlideAnnotation", "numpy.minimum", "numpy.where", "numpy.argsort", "numpy.array", "wholeslidedata.labels.Label", "numpy.maximum", "wholeslidedata.image.wholeslideimage.WholeSlideImage" ]
[((1044, 1058), 'numpy.argsort', 'np.argsort', (['y2'], {}), '(y2)\n', (1054, 1058), True, 'import numpy as np\n'), ((2459, 2509), 'numpy.array', 'np.array', (['[x - size, y - size, x + size, y + size]'], {}), '([x - size, y - size, x + size, y + size])\n', (2467, 2509), True, 'import numpy as np\n'), ((2712, 2755), 'w...
""" Functions to estimate observed ACA magnitudes """ import sys import traceback import logging import collections import scipy.stats import scipy.special import numpy as np import numba from astropy.table import Table, vstack from Chandra.Time import DateTime from cheta import fetch from Quaternion import Quat imp...
[ "logging.getLogger", "numpy.char.rstrip", "numpy.sqrt", "astropy.table.Table", "cxotime.CxoTime", "numpy.array", "numpy.nanmean", "sys.exc_info", "numpy.arctan2", "numpy.isfinite", "astropy.table.vstack", "cheta.fetch.Msidset", "numpy.arange", "Quaternion.Quat", "numpy.mean", "numpy.wh...
[((624, 661), 'logging.getLogger', 'logging.getLogger', (['"""agasc.supplement"""'], {}), "('agasc.supplement')\n", (641, 661), False, 'import logging\n'), ((1619, 1655), 'collections.defaultdict', 'collections.defaultdict', (['(lambda : -1)'], {}), '(lambda : -1)\n', (1642, 1655), False, 'import collections\n'), ((563...
"""Conversion tool from EDF, EDF+, BDF to FIF.""" # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # # License: BSD (3-clause) import calendar import datetime import os import re import numpy as np from ...utils import verbose, logger, warn from ..utils import _blk_read_lims from ..base import BaseRaw, _chec...
[ "numpy.uint8", "numpy.fromfile", "scipy.interpolate.interp1d", "numpy.argsort", "numpy.array", "numpy.arange", "datetime.datetime", "numpy.atleast_2d", "numpy.where", "numpy.max", "numpy.linspace", "numpy.concatenate", "numpy.min", "os.path.splitext", "re.findall", "numpy.intersect1d",...
[((22757, 22779), 're.findall', 're.findall', (['pat', 'annot'], {}), '(pat, annot)\n', (22767, 22779), False, 'import re\n'), ((22944, 22969), 're.findall', 're.findall', (['pat', 'annotmap'], {}), '(pat, annotmap)\n', (22954, 22969), False, 'import re\n'), ((23125, 23146), 'numpy.zeros', 'np.zeros', (['data_length'],...
from fastapi import FastAPI from typing import List from fastapi import FastAPI, UploadFile, File import numpy as np from starlette.requests import Request import io from PIL import Image import base64 import cv2 app = FastAPI() @app.post("/predict") async def analyse(image_file_read: bytes = File(...)): file = ba...
[ "fastapi.FastAPI", "base64.b64encode", "base64.b64decode", "cv2.imdecode", "numpy.frombuffer", "fastapi.File" ]
[((219, 228), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (226, 228), False, 'from fastapi import FastAPI, UploadFile, File\n'), ((295, 304), 'fastapi.File', 'File', (['...'], {}), '(...)\n', (299, 304), False, 'from fastapi import FastAPI, UploadFile, File\n'), ((318, 351), 'base64.b64encode', 'base64.b64encode', ...
# modules we'll need import numpy as np import os import glob import pandas as pd from subprocess import call from librosa import load, stft # config INPUT_DIR = "/mnt/d/datasets/Looking-to-Listen_small/all_wavs/" INPUT_DIR_VISUAL = "/mnt/d/datasets/Looking-to-Listen_small/all_vector/" OUTPUT_DIR = "/mnt/d/datasets/Lo...
[ "os.path.exists", "numpy.savez", "os.makedirs", "os.path.join", "numpy.max", "numpy.random.randint", "subprocess.call", "numpy.concatenate", "os.path.basename", "librosa.stft", "librosa.load" ]
[((774, 806), 'os.path.join', 'os.path.join', (['directory', '"""*.wav"""'], {}), "(directory, '*.wav')\n", (786, 806), False, 'import os\n'), ((886, 912), 'os.path.exists', 'os.path.exists', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (900, 912), False, 'import os\n'), ((922, 945), 'os.makedirs', 'os.makedirs', (['OUTPUT_...
from math import ceil import warnings from collections import namedtuple import numpy as np from .basis import conv_basis, delta_stim, boxcar_stim, make_nonlinear_raised_cosine __all__ = ['Design', 'Covariate'] class Design: covariates = {} bias = False def __init__(self, experiment): self.exp...
[ "numpy.histogram", "math.ceil", "numpy.ones", "numpy.isnan", "numpy.concatenate", "warnings.warn", "numpy.isinf", "numpy.arange" ]
[((6575, 6600), 'math.ceil', 'ceil', (['(duration / binwidth)'], {}), '(duration / binwidth)\n', (6579, 6600), False, 'from math import ceil\n'), ((3665, 3715), 'numpy.concatenate', 'np.concatenate', (['[trial[label] for trial in trials]'], {}), '([trial[label] for trial in trials])\n', (3679, 3715), True, 'import nump...
#!/usr/bin/env python3 # std from pathlib import Path import unittest # 3rd import numpy as np # ours from clusterking.util.testing import MyTestCase from clusterking.data.dwe import DataWithErrors class TestDataWithErrors(MyTestCase): def setUp(self): dpath = Path(__file__).parent / "data" / "test.sql...
[ "numpy.identity", "numpy.eye", "numpy.sqrt", "numpy.ones", "pathlib.Path", "numpy.count_nonzero", "numpy.zeros", "clusterking.data.dwe.DataWithErrors", "unittest.main" ]
[((4666, 4681), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4679, 4681), False, 'import unittest\n'), ((394, 415), 'clusterking.data.dwe.DataWithErrors', 'DataWithErrors', (['dpath'], {}), '(dpath)\n', (408, 415), False, 'from clusterking.data.dwe import DataWithErrors\n'), ((928, 944), 'numpy.zeros', 'np.zero...
import os import numpy as np import cv2 import matplotlib.pyplot as plt import matplotlib.animation as animation try: from urllib.request import urlopen from urllib.error import HTTPError except ImportError: from urllib2 import urlopen, HTTPError from load_inningdata import get_inning, get_p...
[ "os.path.exists", "cv2.vconcat", "xmlio.writer.writer", "urllib2.urlopen", "xmlio.parser.parse", "os.makedirs", "visualize.Pitch.batter_info", "visualize.Pitch.course", "xmlio.parser.parse_path", "os.path.join", "cv2.imshow", "load_games.get_games", "numpy.zeros", "visualize.Pitch.pitcher_...
[((2403, 2415), 'urllib2.urlopen', 'urlopen', (['url'], {}), '(url)\n', (2410, 2415), False, 'from urllib2 import urlopen, HTTPError\n'), ((2475, 2490), 'xmlio.parser.parse', 'parse', (['data_str'], {}), '(data_str)\n', (2480, 2490), False, 'from xmlio.parser import parse, parse_path\n'), ((2655, 2675), 'os.path.exists...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import pandas as pd import numpy as np import scipy import collections import itertools import json from bokeh.transform import linear_cmap, transform from bokeh.palettes import Set3, Viridis256 from bokeh.models import ( LinearColorMapper, ColumnDataSource, Ho...
[ "bokeh.transform.transform", "scipy.cluster.hierarchy.leaves_list", "bokeh.models.BasicTicker", "data.natural_earth", "bokeh.models.BoxSelectTool", "bokeh.plotting.figure", "numpy.reshape", "bokeh.models.LinearColorMapper", "numpy.linspace", "scipy.stats.zscore", "bokeh.models.ColumnDataSource",...
[((698, 724), 'numpy.linspace', 'np.linspace', (['start', 'end', 'n'], {}), '(start, end, n)\n', (709, 724), True, 'import matplotlib, numpy as np\n'), ((736, 807), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'matplotlib.colors.LinearSegmentedColormap.from_list', (['"""customcmap"""', 'cmap'], {}), "('custom...
import asyncio import cv2 import numpy as np from numpysocket import NumpySocket THREADS = 3 frames = [None] * THREADS async def send(sen, ack, i): global frames await sen.send_numpy(frames[i]) ack.receive_ack() async def main(): global frames # host_ip = '172.27.3.3' # host_ip = '172.27.3...
[ "numpy.array_split", "numpysocket.NumpySocket", "cv2.VideoCapture", "cv2.cvtColor" ]
[((362, 381), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (378, 381), False, 'import cv2\n'), ((443, 456), 'numpysocket.NumpySocket', 'NumpySocket', ([], {}), '()\n', (454, 456), False, 'from numpysocket import NumpySocket\n'), ((598, 611), 'numpysocket.NumpySocket', 'NumpySocket', ([], {}), '()\n',...
# pip install pycocotools opencv-python opencv-contrib-python # wget https://github.com/opencv/opencv_extra/raw/master/testdata/cv/ximgproc/model.yml.gz import os import copy import time import argparse import contextlib import multiprocessing import numpy as np import cv2 import cv2.ximgproc import matplotlib.patc...
[ "pycocotools.cocoeval.COCOeval", "numpy.array", "copy.deepcopy", "matplotlib.pyplot.imshow", "os.path.exists", "cv2.ximgproc.createStructuredEdgeDetection", "argparse.ArgumentParser", "numpy.asarray", "pycocotools.coco.COCO", "matplotlib.pyplot.close", "matplotlib.pyplot.axis", "cv2.ximgproc.s...
[((547, 559), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (557, 559), True, 'import matplotlib.pyplot as plt\n'), ((564, 579), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (574, 579), True, 'import matplotlib.pyplot as plt\n'), ((584, 599), 'matplotlib.pyplot.axis', 'plt.axis', ([...
# --------------------------------------------------------------------# # --------------------------------------------------------------------# # ---------- Made by <NAME> @ircam on 11/2015 # ---------- Analyse audio and return soudn features # ---------- to us this don't forget to include these lines before your scrip...
[ "eaSDIF.Entity", "fileio.sdif.FSdifLoadFile.FSdifLoadFile", "six.moves.range", "conversions.lin2db", "eaSDIF.Frame", "numpy.array", "numpy.zeros", "pandas.DataFrame", "numpy.loadtxt", "eaSDIF.Vector" ]
[((4218, 4233), 'eaSDIF.Entity', 'eaSDIF.Entity', ([], {}), '()\n', (4231, 4233), False, 'import eaSDIF\n'), ((4414, 4429), 'eaSDIF.Vector', 'eaSDIF.Vector', ([], {}), '()\n', (4427, 4429), False, 'import eaSDIF\n'), ((4442, 4456), 'eaSDIF.Frame', 'eaSDIF.Frame', ([], {}), '()\n', (4454, 4456), False, 'import eaSDIF\n'...
#!/usr/bin/env python # encoding: utf-8 """ response """ import numpy as np import scipy as sp from scipy import signal, interpolate import pandas as pd import warnings from .utils import (get_proper_interval, double_gamma_with_d, get_time_to_peak_from_timecourse, ...
[ "scipy.signal.convolve", "numpy.sqrt", "scipy.interpolate.interp1d", "numpy.array", "numpy.sin", "numpy.arange", "pandas.MultiIndex.from_product", "numpy.diff", "numpy.max", "numpy.linspace", "numpy.vstack", "pandas.DataFrame", "warnings.warn", "numpy.eye", "numpy.ones", "numpy.cos", ...
[((921, 941), 'numpy.eye', 'np.eye', (['n_regressors'], {}), '(n_regressors)\n', (927, 941), True, 'import numpy as np\n'), ((954, 983), 'numpy.vstack', 'np.vstack', (['(basis, basis[-1])'], {}), '((basis, basis[-1]))\n', (963, 983), True, 'import numpy as np\n'), ((1007, 1077), 'numpy.linspace', 'np.linspace', (['inte...
import numpy as np import torch import torch.utils.data as data import torch.nn.functional as F import os import cv2 import math import random import json import csv import pickle import os.path as osp from glob import glob import raft3d.projective_ops as pops from . import frame_utils from .augmentation import RGB...
[ "cv2.imwrite", "torch.manual_seed", "numpy.ones", "torch.utils.data.get_worker_info", "os.path.join", "torch.from_numpy", "random.seed", "numpy.array", "numpy.random.seed", "csv.reader", "numpy.random.uniform", "numpy.concatenate", "numpy.pad", "cv2.imread", "torch.cat" ]
[((1925, 1982), 'numpy.pad', 'np.pad', (['disp1', '((KITTIEval.crop, 0), (0, 0))'], {'mode': '"""edge"""'}), "(disp1, ((KITTIEval.crop, 0), (0, 0)), mode='edge')\n", (1931, 1982), True, 'import numpy as np\n'), ((1996, 2053), 'numpy.pad', 'np.pad', (['disp2', '((KITTIEval.crop, 0), (0, 0))'], {'mode': '"""edge"""'}), "...
""" Test the maximum a posteriori estimates """ import time import numpy as np from .test_model import prepare_dla_model def test_DLA_MAP(): # test 1 dla_gp = prepare_dla_model(plate=5309, mjd=55929, fiber_id=362, z_qso=3.166) tic = time.time() max_dlas = 4 log_likelihoods_dla = dla_gp.log_mode...
[ "numpy.nanargmax", "numpy.abs", "numpy.array", "numpy.isnan", "time.time" ]
[((249, 260), 'time.time', 'time.time', ([], {}), '()\n', (258, 260), False, 'import time\n'), ((353, 364), 'time.time', 'time.time', ([], {}), '()\n', (362, 364), False, 'import time\n'), ((556, 761), 'numpy.array', 'np.array', (['[[22.28420156, np.nan, np.nan, np.nan], [20.63417494, 22.28420156, np.nan,\n np.nan],...
""" """ import datetime import os # import sys import logging import numpy as np import scipy as sp import scipy.optimize # noqa import tqdm import h5py import zcode.inout as zio import zcode.math as zmath from . import spectra, radiation # , utils from . import PATH_DATA, MASS_EXTR, FEDD_EXTR, RADS_EXTR from . co...
[ "numpy.product", "zcode.plot.colormap", "numpy.log10", "numpy.sqrt", "numpy.log", "zcode.math.stats_str", "numpy.argsort", "numpy.array", "logging.log", "numpy.count_nonzero", "numpy.isfinite", "zcode.inout.get_file_size", "os.path.exists", "numpy.mean", "numpy.isscalar", "scipy.interp...
[((384, 442), 'numpy.seterr', 'np.seterr', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""', 'over': '"""raise"""'}), "(divide='ignore', invalid='ignore', over='raise')\n", (393, 442), True, 'import numpy as np\n'), ((1018, 1047), 'numpy.sqrt', 'np.sqrt', (['(C3 / C1 / ALPHA_VISC)'], {}), '(C3 / C1 / ALPHA_VIS...
import numpy as np def zShift(seq, pos): """Return components of Z curve shift. zCurve[0] = (A+G)-(C+T) # purine/pyrimidine zCurve[1] = (A+C)-(G+T) # amino/keto zCurve[2] = (A+T)-(G+C) # weak/strong """ if seq[pos] == "A": return np.array([1, 1, 1]) if seq[pos] == "G": ret...
[ "numpy.array" ]
[((265, 284), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (273, 284), True, 'import numpy as np\n'), ((324, 345), 'numpy.array', 'np.array', (['[1, -1, -1]'], {}), '([1, -1, -1])\n', (332, 345), True, 'import numpy as np\n'), ((385, 406), 'numpy.array', 'np.array', (['[-1, 1, -1]'], {}), '([-1, 1, ...
# coding: utf-8 -*- ''' GFS.py contains utility functions for GFS ''' __all__ = ['get_akbk', 'get_pcoord', 'read_atcf'] import numpy as _np import pandas as _pd def get_akbk(): ''' Returns ak,bk for 64 level GFS model vcoord is obtained from global_fcst.fd/gfsio_module.f ak,bk ...
[ "numpy.array", "pandas.to_datetime", "numpy.float", "pandas.read_csv" ]
[((417, 1206), 'numpy.array', '_np.array', (['[1.0, 0.99467099, 0.98863202, 0.98180002, 0.97408301, 0.96538502, 0.955603,\n 0.94463098, 0.93235999, 0.91867799, 0.90347999, 0.88666302, 0.86813903,\n 0.84783, 0.82568502, 0.80167699, 0.77581102, 0.748133, 0.71872902, \n 0.68773103, 0.655316, 0.621705, 0.58715999,...
from itertools import combinations from sklearn.metrics.pairwise import cosine_similarity import numpy as np embeddings = {} with open("scripts/etm_w2v_embedding.txt", "r") as file: for line in file.readlines(): splitted = line.split() word = splitted[0] embeddings[word] = np.array([float(n...
[ "sklearn.metrics.pairwise.cosine_similarity", "numpy.average", "numpy.argmax", "numpy.append", "numpy.array" ]
[((914, 926), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (922, 926), True, 'import numpy as np\n'), ((1429, 1452), 'numpy.argmax', 'np.argmax', (['similarities'], {}), '(similarities)\n', (1438, 1452), True, 'import numpy as np\n'), ((1311, 1346), 'numpy.append', 'np.append', (['similarities', 'similarity'], {}...
from __future__ import print_function, absolute_import import os.path as osp import numpy as np from ..utils.data import Dataset from ..utils.osutils import mkdir_if_missing from ..utils.serialization import write_json, read_json from ..utils.data.dataset import _pluck class SynergyReID(Dataset): md5 = '05050b5d...
[ "zipfile.ZipFile", "os.path.join", "numpy.asarray", "os.path.splitext", "os.path.isfile", "os.path.isdir", "os.path.basename" ]
[((1017, 1043), 'os.path.join', 'osp.join', (['self.root', '"""raw"""'], {}), "(self.root, 'raw')\n", (1025, 1043), True, 'import os.path as osp\n'), ((1127, 1168), 'os.path.join', 'osp.join', (['raw_dir', '"""synergyreid_data.zip"""'], {}), "(raw_dir, 'synergyreid_data.zip')\n", (1135, 1168), True, 'import os.path as ...
from PIL import Image import numpy as np import os def main(): img = Image(os.path.join('..', 'img', 'paras_prf_pic.jpeg')) aray = np.array(img) r, g, b = np.split(aray, 3, axis = 2) r = r.reshape(-1) g = g.reshape(-1) b = b.reshape(-1) bitmap = list(map(lambda x: 0.299*x[0]+0.587*x[1]+0.11...
[ "numpy.array", "numpy.split", "os.path.join" ]
[((140, 153), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (148, 153), True, 'import numpy as np\n'), ((168, 193), 'numpy.split', 'np.split', (['aray', '(3)'], {'axis': '(2)'}), '(aray, 3, axis=2)\n', (176, 193), True, 'import numpy as np\n'), ((80, 127), 'os.path.join', 'os.path.join', (['""".."""', '"""img"""...
import subprocess import PIL from PIL import Image import numpy as np import os import shutil import re script_path = os.path.dirname(os.path.realpath(__file__)) temp_img_dir_path = os.path.join(script_path, 'temp_imgs') def arr_to_mp4(arr, output_path, framerate=30, resolution_str=None, temp_dir=temp_img_dir_path): ...
[ "PIL.Image.fromarray", "os.path.join", "re.match", "os.path.realpath", "numpy.random.randint", "os.mkdir", "shutil.rmtree" ]
[((182, 220), 'os.path.join', 'os.path.join', (['script_path', '"""temp_imgs"""'], {}), "(script_path, 'temp_imgs')\n", (194, 220), False, 'import os\n'), ((134, 160), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (150, 160), False, 'import os\n'), ((1134, 1157), 'shutil.rmtree', 'shutil.r...
import sys sys.path.append('../') import caffe2_paths import numpy as np import glob from itertools import product import pinn.preproc as preproc import pinn.data_reader as data_reader import matplotlib.pyplot as plt import pickle import os # ----------------- Preprocessing -------------------- vds = np.concatenate((n...
[ "pinn.data_reader.write_db", "numpy.abs", "itertools.product", "numpy.column_stack", "os.path.isfile", "numpy.array", "numpy.linspace", "numpy.sum", "numpy.concatenate", "pinn.preproc.dc_iv_preproc", "numpy.expand_dims", "numpy.load", "sys.path.append", "pinn.preproc.compute_dc_meta", "g...
[((11, 33), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (26, 33), False, 'import sys\n'), ((396, 422), 'numpy.linspace', 'np.linspace', (['(-0.1)', '(0.3)', '(41)'], {}), '(-0.1, 0.3, 41)\n', (407, 422), True, 'import numpy as np\n'), ((442, 468), 'numpy.linspace', 'np.linspace', (['(-0.1)',...
# Copyright 2022 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to...
[ "numpy.where", "numpy.exp", "numpy.array", "numpy.zeros", "numpy.spacing" ]
[((2253, 2292), 'numpy.array', 'np.array', (["[k['score'] for k in kpts_db]"], {}), "([k['score'] for k in kpts_db])\n", (2261, 2292), True, 'import numpy as np\n'), ((2370, 2408), 'numpy.array', 'np.array', (["[k['area'] for k in kpts_db]"], {}), "([k['area'] for k in kpts_db])\n", (2378, 2408), True, 'import numpy as...
# import the necessary packages import numpy as np import cv2 cap=cv2.VideoCapture(1) def order_points(pts): # initialzie a list of coordinates that will be ordered # such that the first entry in the list is the top-left, # the second entry is the top-right, the third is the # bottom-right, and the fourth is the b...
[ "cv2.setMouseCallback", "numpy.sqrt", "cv2.getPerspectiveTransform", "numpy.diff", "numpy.argmax", "cv2.imshow", "numpy.array", "numpy.zeros", "cv2.warpPerspective", "cv2.destroyAllWindows", "cv2.VideoCapture", "cv2.circle", "numpy.argmin", "cv2.waitKey", "cv2.namedWindow" ]
[((66, 85), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(1)'], {}), '(1)\n', (82, 85), False, 'import cv2\n'), ((2964, 3006), 'numpy.array', 'np.array', (['[(0, 0), (0, 1), (1, 1), (1, 0)]'], {}), '([(0, 0), (0, 1), (1, 1), (1, 0)])\n', (2972, 3006), True, 'import numpy as np\n'), ((3787, 3810), 'cv2.destroyAllWindows',...
import numpy as np from deap import benchmarks from BayesOpt import BO from BayesOpt.Surrogate import RandomForest from BayesOpt.SearchSpace import ContinuousSpace, OrdinalSpace, NominalSpace from BayesOpt.base import Solution np.random.seed(42) def obj_func(x): x_r, x_i, x_d = np.array(x[:2]), x[2], x[3] i...
[ "BayesOpt.SearchSpace.ContinuousSpace", "BayesOpt.Surrogate.RandomForest", "BayesOpt.BO", "BayesOpt.SearchSpace.OrdinalSpace", "numpy.array", "numpy.sum", "BayesOpt.base.Solution", "numpy.random.seed", "BayesOpt.SearchSpace.NominalSpace" ]
[((230, 248), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (244, 248), True, 'import numpy as np\n'), ((745, 872), 'BayesOpt.base.Solution', 'Solution', (["[4.6827082694127835, 9.87885354178838, 5, 'A']"], {'var_name': "['r_0', 'r_1', 'i', 'd']", 'n_eval': '(1)', 'fitness': '(236.76575128)'}), "([4....
import numpy as num import scipy.sparse.linalg as alg import scipy.linalg as algnorm import scipy.sparse as smat import random # Operacje grafowe - może wydzielić ? def to_adiacency_row(neighbours, n): row = num.zeros(n) row[neighbours] = 1 return row def graph_to_matrix(graph): # Tworzy macierz rzadką...
[ "numpy.identity", "scipy.sparse.lil_matrix", "scipy.sparse.linalg.inv", "scipy.sparse.linalg.eigsh", "numpy.sum", "numpy.zeros", "scipy.linalg.norm", "scipy.sparse.linalg.norm", "random.random", "scipy.sparse.diags", "scipy.sparse.csr_matrix" ]
[((214, 226), 'numpy.zeros', 'num.zeros', (['n'], {}), '(n)\n', (223, 226), True, 'import numpy as num\n'), ((519, 569), 'scipy.sparse.csr_matrix', 'smat.csr_matrix', (['(data, (rows, cols))', '(n, n)', '"""d"""'], {}), "((data, (rows, cols)), (n, n), 'd')\n", (534, 569), True, 'import scipy.sparse as smat\n'), ((2878,...
import keras import pandas as pd import numpy as np from keras.models import Sequential from keras.layers import Dense from keras.utils import to_categorical import matplotlib.pyplot as plt import time start_time=time.time() location="dataforDl.csv" data=pd.read_csv(location) data_columns=data.columns xtrain = data[dat...
[ "pandas.read_csv", "numpy.argmax", "keras.models.Sequential", "keras.utils.to_categorical", "keras.layers.Dense", "time.time" ]
[((213, 224), 'time.time', 'time.time', ([], {}), '()\n', (222, 224), False, 'import time\n'), ((255, 276), 'pandas.read_csv', 'pd.read_csv', (['location'], {}), '(location)\n', (266, 276), True, 'import pandas as pd\n'), ((425, 447), 'pandas.read_csv', 'pd.read_csv', (['location1'], {}), '(location1)\n', (436, 447), T...
import subprocess import json import os import csv import numpy as np import pandas as pd import pysam from Bio import SeqIO from Bio.Seq import Seq from Bio.SeqRecord import SeqRecord def get_orf(input_genome, output_genome, orf): orf = int(orf) record = SeqIO.read(input_genome, 'fasta') record.seq = re...
[ "csv.DictWriter", "pandas.read_csv", "Bio.Seq.Seq", "pysam.AlignmentFile", "numpy.array", "numpy.arange", "os.remove", "os.path.exists", "subprocess.run", "Bio.SeqIO.read", "Bio.SeqIO.write", "os.mkdir", "numpy.random.seed", "pandas.DataFrame", "pysam.index", "numpy.ceil", "numpy.ran...
[((267, 300), 'Bio.SeqIO.read', 'SeqIO.read', (['input_genome', '"""fasta"""'], {}), "(input_genome, 'fasta')\n", (277, 300), False, 'from Bio import SeqIO\n'), ((339, 382), 'Bio.SeqIO.write', 'SeqIO.write', (['record', 'output_genome', '"""fasta"""'], {}), "(record, output_genome, 'fasta')\n", (350, 382), False, 'from...
import unittest import numpy as np import tensorflow as tf import tensorflow.keras as K from tensorflow.keras.layers import Dense, BatchNormalization, Dropout, Softmax from sklearn.metrics import accuracy_score from nncv.data_loader import * from nncv.loss import * class TestTFFunction(unittest.TestCase): _xyz ...
[ "sklearn.metrics.accuracy_score", "numpy.ones", "tensorflow.data.Dataset.from_tensor_slices", "tensorflow.keras.backend.get_session", "numpy.average", "tensorflow.Session", "tensorflow.keras.layers.Dropout", "numpy.argmax", "tensorflow.global_variables_initializer", "tensorflow.gradients", "tens...
[((322, 339), 'numpy.ones', 'np.ones', (['[100, 3]'], {}), '([100, 3])\n', (329, 339), True, 'import numpy as np\n'), ((684, 722), 'tensorflow.keras.Input', 'tf.keras.Input', ([], {'shape': '(self.nfeature,)'}), '(shape=(self.nfeature,))\n', (698, 722), True, 'import tensorflow as tf\n'), ((1319, 1357), 'tensorflow.ker...
#!/usr/bin/env python # -*- coding: utf-8 -*- """Definition of the NSDE algorithm""" import numpy as np try: from openmdao.utils.concurrent import concurrent_eval except ModuleNotFoundError: import warnings warnings.warn("OpenMDAO is not installed. Concurrent evaluation is not available.") from . import ...
[ "numpy.copy", "numpy.abs", "numpy.greater", "numpy.ceil", "numpy.random.default_rng", "numpy.ones", "numpy.asarray", "numpy.min", "numpy.any", "numpy.max", "numpy.empty_like", "numpy.empty", "numpy.concatenate", "numpy.linalg.norm", "warnings.warn", "numpy.isnan", "numpy.all", "ope...
[((221, 309), 'warnings.warn', 'warnings.warn', (['"""OpenMDAO is not installed. Concurrent evaluation is not available."""'], {}), "(\n 'OpenMDAO is not installed. Concurrent evaluation is not available.')\n", (234, 309), False, 'import warnings\n'), ((3826, 3853), 'numpy.random.default_rng', 'np.random.default_rng...
import sys import os import numpy as np import cv2 from PIL import Image from skimage.morphology import binary_dilation import time def result_fusion(data_list,label_list=None,save_path=None): len_ = len(os.listdir(data_list[0])) count = 0 for item in os.scandir(data_list[0]): img_list = [item.pat...
[ "skimage.morphology.binary_dilation", "PIL.Image.fromarray", "os.listdir", "PIL.Image.open", "os.scandir", "numpy.zeros_like", "os.path.join", "numpy.argmax", "time.time", "sys.stdout.write" ]
[((266, 290), 'os.scandir', 'os.scandir', (['data_list[0]'], {}), '(data_list[0])\n', (276, 290), False, 'import os\n'), ((1172, 1194), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (1188, 1194), False, 'import sys\n'), ((1360, 1384), 'os.scandir', 'os.scandir', (['data_list[0]'], {}), '(data...
import numpy as np import matplotlib.pyplot as plt from .single_unit import PSTH def shiftappend(arr, shift, end=None, direction='left'): if isinstance(end, type(None)): end = arr[-1] if direction == 'left': return np.hstack((arr[arr > shift]-shift, arr[arr < shift]+end-shift)) elif d...
[ "numpy.hstack", "numpy.size", "numpy.zeros_like", "numpy.max", "matplotlib.pyplot.figure", "matplotlib.pyplot.bar", "matplotlib.pyplot.title", "matplotlib.pyplot.xlim", "matplotlib.pyplot.subplot", "matplotlib.pyplot.show" ]
[((246, 315), 'numpy.hstack', 'np.hstack', (['(arr[arr > shift] - shift, arr[arr < shift] + end - shift)'], {}), '((arr[arr > shift] - shift, arr[arr < shift] + end - shift))\n', (255, 315), True, 'import numpy as np\n'), ((1975, 2002), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 4)'}), '(figsize=(...
import os import subprocess import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torch.nn.functional import interpolate from loguru import logger from tqdm import tqdm import numpy as np import wandb from draw_concat import draw_concat from generate_noise import generate...
[ "torch.optim.lr_scheduler.MultiStepLR", "models.save_networks", "numpy.mean", "generate_noise.generate_spatial_noise", "minecraft.level_utils.clear_empty_world", "torch.nn.ReplicationPad3d", "minecraft.level_renderer.render_minecraft", "subprocess.call", "models.calc_gradient_penalty", "draw_conca...
[((1431, 1484), 'minecraft.level_utils.clear_empty_world', 'clear_empty_world', (['opt.output_dir', '"""Curr_Empty_World"""'], {}), "(opt.output_dir, 'Curr_Empty_World')\n", (1448, 1484), False, 'from minecraft.level_utils import one_hot_to_blockdata_level, save_level_to_world, clear_empty_world\n'), ((2866, 2970), 'to...
from PIL import Image import numpy as np from skimage import transform IMG_HEIGHT = 100 IMG_WIDTH = 100 def load(filename): np_image = Image.open(filename) np_image = np.array(np_image).astype('float32')/255 # (IMG_HEIGHT, IMG_WIDTH, 3)) np_image = transform.resize(np_image, (IMG_HEIGHT, IMG_WIDTH, 3...
[ "numpy.array", "numpy.expand_dims", "PIL.Image.open", "skimage.transform.resize" ]
[((142, 162), 'PIL.Image.open', 'Image.open', (['filename'], {}), '(filename)\n', (152, 162), False, 'from PIL import Image\n'), ((268, 322), 'skimage.transform.resize', 'transform.resize', (['np_image', '(IMG_HEIGHT, IMG_WIDTH, 3)'], {}), '(np_image, (IMG_HEIGHT, IMG_WIDTH, 3))\n', (284, 322), False, 'from skimage imp...
import copy import numpy as np from PIL import Image from torch.utils.data.dataset import Dataset class CleanLabelDataset(Dataset): """Clean-label dataset. Args: dataset (Dataset): The dataset to be wrapped. adv_dataset_path (str): The adversarially perturbed dataset path. transform ...
[ "PIL.Image.fromarray", "numpy.where", "numpy.nonzero", "copy.deepcopy", "numpy.load" ]
[((688, 710), 'copy.deepcopy', 'copy.deepcopy', (['dataset'], {}), '(dataset)\n', (701, 710), False, 'import copy\n'), ((2366, 2386), 'PIL.Image.fromarray', 'Image.fromarray', (['img'], {}), '(img)\n', (2381, 2386), False, 'from PIL import Image\n'), ((735, 760), 'numpy.load', 'np.load', (['adv_dataset_path'], {}), '(a...
# This file is mainly derived from https://github.com/openai/baselines. from collections import deque import os.path as osp import time import csv import json from gym.core import Wrapper import numpy as np from .vec_env import VecEnvWrapper class Monitor(Wrapper): EXT = "monitor.csv" f = None def __in...
[ "collections.deque", "json.dumps", "os.path.join", "gym.core.Wrapper.__init__", "numpy.zeros", "os.path.isdir", "time.time" ]
[((435, 466), 'gym.core.Wrapper.__init__', 'Wrapper.__init__', (['self'], {'env': 'env'}), '(self, env=env)\n', (451, 466), False, 'from gym.core import Wrapper\n'), ((490, 501), 'time.time', 'time.time', ([], {}), '()\n', (499, 501), False, 'import time\n'), ((4528, 4539), 'time.time', 'time.time', ([], {}), '()\n', (...
#!/usr/bin/env python import numpy as np p = np.pi def DH_to_T(DH): """! Computes the transformation matrices given the DH table of the serial link. @param DH: devavitt-hartemberg parameters. @return T: transformation matrices of a joint with respect to previous joint. """ # Get the number of rows, to kn...
[ "numpy.sin", "numpy.dot", "numpy.cos" ]
[((1758, 1783), 'numpy.dot', 'np.dot', (['T_rel_ini[i]', 'Tel'], {}), '(T_rel_ini[i], Tel)\n', (1764, 1783), True, 'import numpy as np\n'), ((2206, 2232), 'numpy.dot', 'np.dot', (['T[i - 1]', 'T_rel[i]'], {}), '(T[i - 1], T_rel[i])\n', (2212, 2232), True, 'import numpy as np\n'), ((431, 447), 'numpy.cos', 'np.cos', (['...
# -*- coding: utf-8 -*- # # This code is part of Qiskit. # # (C) Copyright IBM 2019, 2020. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # An...
[ "numpy.abs", "qiskit.ignis.verification.tomography.basis.default_gateset_basis", "qiskit.ignis.verification.tomography.GatesetTomographyFitter", "numpy.sqrt", "qiskit.ignis.verification.tomography.gateset_tomography_circuits", "numpy.nditer", "qiskit.compiler.assemble", "qiskit.extensions.SGate", "n...
[((6227, 6242), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6240, 6242), False, 'import unittest\n'), ((1256, 1289), 'qiskit.Aer.get_backend', 'Aer.get_backend', (['"""qasm_simulator"""'], {}), "('qasm_simulator')\n", (1271, 1289), False, 'from qiskit import Aer\n'), ((1309, 1365), 'qiskit.ignis.verification.t...
from scipy.optimize import fsolve import numpy as np from household_dist import HOUSEHOLD_DIST def compute_household_infection_prob(prevalence, household_dist, SAR=0.3741): """ computes the probability that a household is infected given population level prevalence, household size distribution and househol...
[ "scipy.optimize.fsolve", "numpy.sum" ]
[((2099, 2177), 'scipy.optimize.fsolve', 'fsolve', (['match_prevalence', '(0.005)'], {'args': '(target_prevalence, household_dist, SAR)'}), '(match_prevalence, 0.005, args=(target_prevalence, household_dist, SAR))\n', (2105, 2177), False, 'from scipy.optimize import fsolve\n'), ((564, 586), 'numpy.sum', 'np.sum', (['ho...
import torch import torch.nn as nn # from torch.nn import init import functools # from torch.autograd import Variable import numpy as np import pdb ############################################################################### # Functions ##############################################################################...
[ "torch.nn.Sigmoid", "torch.nn.ReLU", "torch.nn.parallel.data_parallel", "numpy.ceil", "torch.nn.Tanh", "torch.nn.LeakyReLU", "torch.nn.Dropout", "torch.nn.Sequential", "torch.nn.ReflectionPad2d", "torch.nn.Conv2d", "torch.cuda.is_available", "functools.partial", "torch.nn.ReplicationPad2d", ...
[((739, 785), 'functools.partial', 'functools.partial', (['nn.BatchNorm2d'], {'affine': '(True)'}), '(nn.BatchNorm2d, affine=True)\n', (756, 785), False, 'import functools\n'), ((1527, 1552), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1550, 1552), False, 'import torch\n'), ((4480, 4505), '...
import cv2 import numpy as np import torch from matplotlib import pyplot as plt from dataset import mydataset_PCA from torch.utils.data import DataLoader from tqdm import tqdm import pickle import sklearn.decomposition as dc import argparse parser = argparse.ArgumentParser(description='Process some integers.') parser....
[ "numpy.mean", "argparse.ArgumentParser", "sklearn.decomposition.PCA", "tqdm.tqdm", "numpy.argsort", "numpy.array", "numpy.sum", "numpy.concatenate", "torch.utils.data.DataLoader", "dataset.mydataset_PCA" ]
[((251, 312), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process some integers."""'}), "(description='Process some integers.')\n", (274, 312), False, 'import argparse\n'), ((688, 788), 'dataset.mydataset_PCA', 'mydataset_PCA', ([], {'image_dir': 'image_dir', 'text_path': 'text_path',...
import ipywidgets as widgets import ipywidgets from traitlets import Unicode import traitlets from traittypes import Array import logging import numpy as np from .serialize import array_cube_png_serialization, array_serialization from .transferfunction import * import warnings logger = logging.getLogger("ipyvolume") ...
[ "logging.getLogger", "traitlets.Instance", "traitlets.List", "ipywidgets.FloatSlider", "numpy.nanmin", "ipywidgets.HBox", "traittypes.Array", "ipywidgets.register", "numpy.nanmax", "warnings.warn", "ipyvolume.pylab.figure", "traitlets.Unicode", "traitlets.CInt", "ipywidgets.jslink", "tra...
[((289, 319), 'logging.getLogger', 'logging.getLogger', (['"""ipyvolume"""'], {}), "('ipyvolume')\n", (306, 319), False, 'import logging\n'), ((352, 389), 'ipywidgets.register', 'widgets.register', (['"""ipyvolume.Scatter"""'], {}), "('ipyvolume.Scatter')\n", (368, 389), True, 'import ipywidgets as widgets\n'), ((1748,...
from abc import ABC, abstractmethod import logging from typing import Any from ROAR.utilities_module.module import Module from ROAR.utilities_module.vehicle_models import Vehicle, VehicleControl from collections import deque import numpy as np class ROARManiaPlanner(Module): def __init__(self, agent, **kwargs): ...
[ "logging.getLogger", "numpy.sign" ]
[((406, 433), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (423, 433), False, 'import logging\n'), ((3298, 3318), 'numpy.sign', 'np.sign', (['norm_offset'], {}), '(norm_offset)\n', (3305, 3318), True, 'import numpy as np\n')]
from collections import namedtuple import numpy as np import pandas as pd import rdkit.Chem as Chem from tqdm import tqdm from neural_fingerprint import NFPRegressor from neural_fingerprint.chemutils import rf_evaluation from neural_fingerprint.models.ecfp import ECFP max_val = 1000 train_idx = 800 def read_data(m...
[ "sklearn.gaussian_process.GaussianProcessRegressor", "neural_fingerprint.chemutils.rf_evaluation", "numpy.ones", "matplotlib.pyplot.pcolor", "matplotlib.pyplot.colorbar", "rdkit.Chem.MolFromSmiles", "tqdm.tqdm", "neural_fingerprint.NFPRegressor", "sklearn.manifold.TSNE", "numpy.linspace", "numpy...
[((354, 400), 'pandas.read_table', 'pd.read_table', (['"""./data/train.txt"""'], {'header': 'None'}), "('./data/train.txt', header=None)\n", (367, 400), True, 'import pandas as pd\n'), ((414, 464), 'pandas.read_table', 'pd.read_table', (['"""./data/train.logP-SA"""'], {'header': 'None'}), "('./data/train.logP-SA', head...
#!/usr/bin/env python # manual """ This script allows you to manually control the simulator or Duckiebot using the keyboard arrows. """ import os os.sys.path.append("../../gym-duckietown") import cv2 import sys import argparse import pyglet from pyglet.window import key import numpy as np import gym import gym_duckie...
[ "PIL.Image.fromarray", "pyglet.window.key.KeyStateHandler", "pyglet.clock.schedule_interval", "pyglet.app.run", "NaiveLaneDetection.NaiveLaneDetection", "os.sys.path.append", "cv2.imshow", "numpy.array", "cv2.addWeighted", "cv2.cvtColor", "sys.exit", "cv2.waitKey", "cv2.namedWindow" ]
[((147, 189), 'os.sys.path.append', 'os.sys.path.append', (['"""../../gym-duckietown"""'], {}), "('../../gym-duckietown')\n", (165, 189), False, 'import os\n'), ((1290, 1346), 'numpy.array', 'np.array', (['[[(0, 200), (640, 200), (640, 480), (0, 480)]]'], {}), '([[(0, 200), (640, 200), (640, 480), (0, 480)]])\n', (1298...
import math import os import time import numpy as np from torch.utils.tensorboard import SummaryWriter import utils.loss as loss import utils.tensorboard as utb def get_probs(length, exp): probs = (np.arange(1, length + 1) / 100) ** exp last_x = int(0.9 * length) probs[last_x:] = probs[last_x] retur...
[ "numpy.random.normal", "torch.utils.tensorboard.SummaryWriter", "numpy.tile", "math.ceil", "utils.loss.PSNR", "numpy.log", "numpy.max", "numpy.argsort", "numpy.sum", "numpy.linspace", "utils.tensorboard.load_event_accumulator", "numpy.zeros", "numpy.exp", "numpy.transpose", "time.time", ...
[((1385, 1412), 'numpy.linspace', 'np.linspace', (['(0.01)', '(0.3)', '(200)'], {}), '(0.01, 0.3, 200)\n', (1396, 1412), True, 'import numpy as np\n'), ((733, 766), 'utils.tensorboard.load_loglikelihood', 'utb.load_loglikelihood', (['event_acc'], {}), '(event_acc)\n', (755, 766), True, 'import utils.tensorboard as utb\...
#%% Setup from matplotlib import rc rc("font", **{"family": "serif", "serif": ["Computer Modern Roman"], "size": 20}) rc("text", usetex=True) import os import numpy as np import mpmath as mp import matplotlib.pyplot as plt from scipy.special import lambertw from scipy import optimize # path = os.path.expanduser( # ...
[ "matplotlib.pyplot.xticks", "matplotlib.pyplot.ylabel", "scipy.special.lambertw", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "numpy.log", "matplotlib.pyplot.fill_between", "numpy.linspace", "matplotlib.pyplot.figure", "matplotlib.pyplot.yticks", "matplotlib.rc", "scipy.optimize.root...
[((37, 122), 'matplotlib.rc', 'rc', (['"""font"""'], {}), "('font', **{'family': 'serif', 'serif': ['Computer Modern Roman'], 'size':\n 20})\n", (39, 122), False, 'from matplotlib import rc\n'), ((119, 142), 'matplotlib.rc', 'rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (121, 142), False, ...
#!/usr/bin/env python3 import cudasift import cv2 import numpy as np from profiling import TaggedTimer def main(): sift = cudasift.PyCudaSift(dev_num=0) timr = TaggedTimer() filename = "../data/CY_279b46b9_1575825158217_1575825184058.jpg" # filename = "/home/jfinken/projects/here/sp/jfinken/faiss_gp...
[ "numpy.asarray", "cudasift.i_align_up", "cudasift.PyCudaSift", "profiling.TaggedTimer", "cv2.imread" ]
[((129, 159), 'cudasift.PyCudaSift', 'cudasift.PyCudaSift', ([], {'dev_num': '(0)'}), '(dev_num=0)\n', (148, 159), False, 'import cudasift\n'), ((171, 184), 'profiling.TaggedTimer', 'TaggedTimer', ([], {}), '()\n', (182, 184), False, 'from profiling import TaggedTimer\n'), ((349, 391), 'cv2.imread', 'cv2.imread', (['fi...
#!/usr/bin/env python """ Perform the Mann-Whitney U test, the Kolmogorov-Smirnov test, and the Student's t-test for the following ensembles: - GPU double precision (reference & control) - CPU double precision - GPU single precision - GPU double precision with additional explicit diffusion Make sure to compile the cp...
[ "numpy.mean", "pickle.dump", "numpy.sqrt", "kolmogorov_smirnov.ks", "numpy.max", "mannwhitneyu.mwu", "numpy.zeros", "numpy.quantile", "numpy.min", "xarray.open_dataset", "numpy.arange" ]
[((2243, 2267), 'xarray.open_dataset', 'xr.open_dataset', (['fname_r'], {}), '(fname_r)\n', (2258, 2267), True, 'import xarray as xr\n'), ((2292, 2316), 'xarray.open_dataset', 'xr.open_dataset', (['fname_c'], {}), '(fname_c)\n', (2307, 2316), True, 'import xarray as xr\n'), ((2343, 2369), 'xarray.open_dataset', 'xr.ope...
from matplotlib.gridspec import GridSpec import matplotlib.pyplot as plt import numpy as np import matplotlib import math from ._default_matplotlib_figure_dimensions import _default_matplotlib_figure_dimensions def _calculate_nrows(nplots, ncols): return math.ceil(nplots / ncols) def _initialize_plot_with_dimens...
[ "math.ceil", "numpy.ones", "numpy.any", "numpy.array", "matplotlib.pyplot.figure", "matplotlib.gridspec.GridSpec" ]
[((261, 286), 'math.ceil', 'math.ceil', (['(nplots / ncols)'], {}), '(nplots / ncols)\n', (270, 286), False, 'import math\n'), ((980, 1014), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'fig_dimensions'}), '(figsize=fig_dimensions)\n', (990, 1014), True, 'import matplotlib.pyplot as plt\n'), ((1783, 1906)...
#!/usr/bin/env python3 """ corrections.py: Script to apply corrections to the images. """ import os from argparse import ArgumentParser from datetime import date, datetime from typing import Optional, Sequence import numpy as np from astropy.io import fits from dresscode.utils import load_config def main(argv: Op...
[ "numpy.abs", "numpy.full_like", "astropy.io.fits.PrimaryHDU", "argparse.ArgumentParser", "dresscode.utils.load_config", "numpy.where", "numpy.std", "numpy.nanmedian", "os.path.isfile", "numpy.sum", "datetime.date", "os.path.basename", "astropy.io.fits.open", "numpy.log1p" ]
[((371, 387), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (385, 387), False, 'from argparse import ArgumentParser\n'), ((542, 566), 'dresscode.utils.load_config', 'load_config', (['args.config'], {}), '(args.config)\n', (553, 566), False, 'from dresscode.utils import load_config\n'), ((2419, 2438), '...
import numpy as np import os from scipy.optimize import least_squares, minimize from scipy.special import fresnel def autophase(S): '''Optimize phase of complex data by maximizing the sum of imaginary over sum of real .. math:: \phi = \\arctan \left( \\frac{\sum_i^N \Im(s_i) }{ \sum_i^N \Re(s_i) } \\...
[ "numpy.sqrt", "numpy.linalg.pinv", "numpy.polyfit", "numpy.log", "numpy.roots", "numpy.array", "numpy.linalg.norm", "numpy.sin", "numpy.imag", "scipy.optimize.least_squares", "numpy.max", "numpy.exp", "numpy.real", "numpy.dot", "scipy.special.fresnel", "numpy.abs", "numpy.eye", "nu...
[((2633, 2666), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (2645, 2666), False, 'import os\n'), ((2687, 2723), 'numpy.loadtxt', 'np.loadtxt', (['full_path'], {'delimiter': '""","""'}), "(full_path, delimiter=',')\n", (2697, 2723), True, 'import numpy as np\n'), ((2995, 3...
import os import cv2 import numpy as np from PIL import Image from IPython.display import Video from IPython.display import display as ds DESTINATION_FOLDER = "results" def check_folder(folder): if not os.path.exists(folder): os.makedirs(folder) def display(images_array, save=False): for im in ...
[ "cv2.rectangle", "os.path.exists", "PIL.Image.fromarray", "cv2.imwrite", "os.makedirs", "IPython.display.Video", "cv2.putText", "cv2.imdecode", "cv2.cvtColor", "numpy.fromstring" ]
[((888, 918), 'numpy.fromstring', 'np.fromstring', (['image', 'np.uint8'], {}), '(image, np.uint8)\n', (901, 918), True, 'import numpy as np\n'), ((934, 971), 'cv2.imdecode', 'cv2.imdecode', (['nparr', 'cv2.IMREAD_COLOR'], {}), '(nparr, cv2.IMREAD_COLOR)\n', (946, 971), False, 'import cv2\n'), ((1482, 1523), 'cv2.cvtCo...
import os import numpy as np import random import numbers import skimage from skimage import io, color import torch # read uint8 image from path def imread_uint8(imgpath, mode='RGB'): ''' mode: 'RGB', 'gray', 'Y', 'L'. 'Y' and 'L' mean the Y channel of YCbCr. ''' if mode == 'RGB': img = io....
[ "os.path.exists", "os.makedirs", "skimage.color.rgb2ycbcr", "numpy.fliplr", "skimage.img_as_float32", "skimage.io.imread", "numpy.rot90", "skimage.img_as_ubyte", "random.randint" ]
[((1733, 1758), 'random.randint', 'random.randint', (['(0)', '(h - ph)'], {}), '(0, h - ph)\n', (1747, 1758), False, 'import random\n'), ((1771, 1796), 'random.randint', 'random.randint', (['(0)', '(w - pw)'], {}), '(0, w - pw)\n', (1785, 1796), False, 'import random\n'), ((1988, 2015), 'skimage.img_as_float32', 'skima...
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Routines related to the canonical Chandra ACA dark current model. The model is based on smoothed twice-broken power-law fits of dark current histograms from Jan-2007 though Aug-2017. This analysis was done entirely with dark current maps scaled to -1...
[ "Chandra.Time.DateTime", "numpy.convolve", "numpy.random.poisson", "numpy.searchsorted", "numpy.flatnonzero", "numpy.log", "numpy.exp", "numpy.sum", "numpy.array", "warnings.warn", "mica.archive.aca_dark.get_dark_cal_image", "numpy.arange", "numpy.random.shuffle" ]
[((1034, 1087), 'numpy.arange', 'np.arange', (['(-2.5 * sigma)', '(2.5 * sigma)', 'dx'], {'dtype': 'float'}), '(-2.5 * sigma, 2.5 * sigma, dx, dtype=float)\n', (1043, 1087), True, 'import numpy as np\n'), ((1093, 1125), 'numpy.exp', 'np.exp', (['(-0.5 * (xg / sigma) ** 2)'], {}), '(-0.5 * (xg / sigma) ** 2)\n', (1099, ...
# <NAME> 2014-2020 # mlxtend Machine Learning Library Extensions # Author: <NAME> <<EMAIL>> # # License: BSD 3 clause import random import numpy as np import pytest from sklearn import exceptions from sklearn.base import clone from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import Logist...
[ "mlxtend.data.iris_data", "sklearn.model_selection.GridSearchCV", "numpy.abs", "numpy.testing.assert_equal", "sklearn.base.clone", "sklearn.neighbors.KNeighborsClassifier", "sklearn.ensemble.RandomForestClassifier", "random.seed", "sklearn.linear_model.LogisticRegression", "mlxtend.classifier.Ense...
[((631, 642), 'mlxtend.data.iris_data', 'iris_data', ([], {}), '()\n', (640, 642), False, 'from mlxtend.data import iris_data\n'), ((699, 718), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (713, 718), True, 'import numpy as np\n'), ((730, 787), 'sklearn.linear_model.LogisticRegression', 'LogisticR...
from altair.vegalite.v4 import schema from altair.vegalite.v4.schema.channels import Tooltip import pandas as pd import altair as alt import numpy as np from queries import Pomodoro THEME = 'magma' # TO DO: Add docstings where needed def get_current_date(): """ Gets the current date to perform default ...
[ "altair.selection_single", "queries.Pomodoro", "numpy.select", "altair.Chart", "altair.Axis", "pandas.merge", "altair.Scale", "altair.themes.register", "altair.Y", "altair.X", "altair.themes.enable", "altair.Tooltip", "altair.Column", "altair.hconcat", "altair.Size", "altair.Color", ...
[((9347, 9388), 'altair.themes.register', 'alt.themes.register', (['"""my_theme"""', 'my_theme'], {}), "('my_theme', my_theme)\n", (9366, 9388), True, 'import altair as alt\n'), ((9389, 9418), 'altair.themes.enable', 'alt.themes.enable', (['"""my_theme"""'], {}), "('my_theme')\n", (9406, 9418), True, 'import altair as ...
import numpy as np import os import sklearn.metrics from scipy.optimize import curve_fit def slice_lat(ds): return ds.sel(lat=slice(-25, 25)) def ensure_dir(file_path): """Check if a directory exists and create it if needed""" if not os.path.exists(file_path): os.makedirs(file_path) def days_p...
[ "scipy.optimize.curve_fit", "os.path.exists", "os.makedirs", "numpy.exp", "numpy.array", "numpy.full" ]
[((250, 275), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (264, 275), False, 'import os\n'), ((285, 307), 'os.makedirs', 'os.makedirs', (['file_path'], {}), '(file_path)\n', (296, 307), False, 'import os\n'), ((1020, 1061), 'numpy.exp', 'np.exp', (['(-(x - x0) ** 2 / (2 * sigma ** 2))'], {...
""" Library Features: Name: lib_dryes_downloader_geo Author(s): <NAME> (<EMAIL>), <NAME> (<EMAIL>) Date: '20210929' Version: '1.0.0' """ ################################################################################# # Library import os import logging from osgeo import gdal, gdalconst imp...
[ "osgeo.gdal.Open", "os.path.exists", "logging.getLogger", "rasterio.crs.CRS", "osgeo.gdal.GetDriverByName", "lib_dryes_downloader_hsaf_generic.create_darray_2d", "osgeo.gdal.ReprojectImage", "numpy.abs", "numpy.flipud", "rasterio.open", "numpy.max", "numpy.isnan", "numpy.min", "numpy.meshg...
[((1101, 1147), 'osgeo.gdal.Open', 'gdal.Open', (['file_name_in', 'gdalconst.GA_ReadOnly'], {}), '(file_name_in, gdalconst.GA_ReadOnly)\n', (1110, 1147), False, 'from osgeo import gdal, gdalconst\n'), ((1422, 1535), 'osgeo.gdal.ReprojectImage', 'gdal.ReprojectImage', (['dset_tiff_in', 'dset_tiff_out', 'dset_proj_in', '...
import numpy as np import cv2 def ransac_align_points( pA, pB, threshold, diagonal_constraint=0.75, default=np.eye(4)[:3], ): """ """ # sensible requirement of 51 or more spots to compute ransac affine if len(pA) <= 50 or len(pB) <= 50: if default is not None: print("Insuffici...
[ "numpy.eye", "cv2.estimateAffine3D", "numpy.diag" ]
[((554, 627), 'cv2.estimateAffine3D', 'cv2.estimateAffine3D', (['pA', 'pB'], {'ransacThreshold': 'threshold', 'confidence': '(0.999)'}), '(pA, pB, ransacThreshold=threshold, confidence=0.999)\n', (574, 627), False, 'import cv2\n'), ((114, 123), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (120, 123), True, 'import nu...
# Credit - https://github.com/balajisrinivas/Detect-Face-and-Blur-OpenCV # For blurface from asyncio import TimeoutError, sleep from calendar import timegm from datetime import datetime, timedelta from json import JSONDecodeError, dumps from platform import system from random import choice from statistics import mean,...
[ "urllib.parse.urlencode", "src.utils.funcs.dateToZodiac", "cv2.dnn.readNetFromCaffe", "src.utils.funcs.errorEmbed", "platform.system", "asyncio.sleep", "src.utils.funcs.monthNameToNumber", "src.utils.funcs.leapYear", "src.utils.funcs.printError", "cv2.imread", "time.gmtime", "src.utils.funcs.v...
[((2643, 2666), 'discord.ext.tasks.loop', 'tasks.loop', ([], {'seconds': '(2.0)'}), '(seconds=2.0)\n', (2653, 2666), False, 'from discord.ext import commands, tasks\n'), ((3899, 3948), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(5)', 'commands.BucketType.user'], {}), '(1, 5, commands.BucketType.use...
import argparse import multiprocessing import os import pickle import subprocess import sys from random import randint from time import sleep import georasters as gr import numpy as np from osgeo import gdal, osr def save_img(data, geotransform, proj, outPath, noDataValue=np.nan, split=False): # Start the gdal d...
[ "os.path.exists", "pickle.dump", "argparse.ArgumentParser", "osgeo.gdal.Warp", "osgeo.osr.SpatialReference", "os.path.join", "pickle.load", "multiprocessing.cpu_count", "numpy.array", "os.path.dirname", "osgeo.gdal.Info", "multiprocessing.Pool", "os.path.basename", "sys.exit", "osgeo.gda...
[((2477, 2516), 'osgeo.gdal.Info', 'gdal.Info', (['raster_path'], {'options': '"""-json"""'}), "(raster_path, options='-json')\n", (2486, 2516), False, 'from osgeo import gdal, osr\n'), ((3684, 3718), 'osgeo.osr.SpatialReference', 'osr.SpatialReference', ([], {'wkt': 'dest_prj'}), '(wkt=dest_prj)\n', (3704, 3718), Fals...
# copytrue (c) 2020 PaddlePaddle Authors. All Rights Reserve. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicab...
[ "cv2.rectangle", "numpy.dstack", "PIL.Image.open", "xml.etree.ElementTree.parse", "pycocotools.mask.decode", "numpy.asarray", "cv2.LUT", "numpy.array", "pycocotools.mask.merge", "numpy.nonzero", "cv2.getTextSize", "numpy.load", "cv2.imread" ]
[((1435, 1452), 'cv2.imread', 'cv2.imread', (['image'], {}), '(image)\n', (1445, 1452), False, 'import cv2\n'), ((1503, 1517), 'xml.etree.ElementTree.parse', 'ET.parse', (['anno'], {}), '(anno)\n', (1511, 1517), True, 'import xml.etree.ElementTree as ET\n'), ((2771, 2788), 'cv2.imread', 'cv2.imread', (['image'], {}), '...
""" @author <NAME> """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(PROJECT_ROOT) import numpy as np import tensorflow as tf # TF 1.x c...
[ "tensorflow.shape", "tensorflow.self_adjoint_eig", "tensorflow.transpose", "tensorflow.reduce_sum", "tensorflow.nn.moments", "numpy.log", "tensorflow.multiply", "tensorflow.reduce_mean", "tensorflow.cast", "sys.path.append", "tensorflow.log", "tensorflow.square", "tensorflow.device", "os.p...
[((233, 262), 'sys.path.append', 'sys.path.append', (['PROJECT_ROOT'], {}), '(PROJECT_ROOT)\n', (248, 262), False, 'import sys\n'), ((199, 224), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (214, 224), False, 'import os\n'), ((2690, 2701), 'tensorflow.shape', 'tf.shape', (['x'], {}), '(x)\n...
import os import numpy as np from pwtools.common import is_seq, file_write from .testenv import testdir def test_is_seq(): fn = os.path.join(testdir, 'is_seq_test_file') file_write(fn, 'lala') fd = open(fn , 'r') for xx in ([1,2,3], (1,2,3), np.array([1,2,3])): print(type(xx)) assert is...
[ "pwtools.common.is_seq", "numpy.array", "os.path.join", "pwtools.common.file_write" ]
[((133, 174), 'os.path.join', 'os.path.join', (['testdir', '"""is_seq_test_file"""'], {}), "(testdir, 'is_seq_test_file')\n", (145, 174), False, 'import os\n'), ((179, 201), 'pwtools.common.file_write', 'file_write', (['fn', '"""lala"""'], {}), "(fn, 'lala')\n", (189, 201), False, 'from pwtools.common import is_seq, fi...
# Save to HDF because cPickle fails with very large arrays # https://github.com/numpy/numpy/issues/2396 import h5py import numpy as np import tempfile import unittest def dict_to_hdf(fname, d): """ Save a dict-of-dict datastructure where values are numpy arrays to a .hdf5 file """ with h5py.File(fn...
[ "numpy.random.rand", "h5py.File", "tempfile.NamedTemporaryFile", "unittest.main", "numpy.random.randn" ]
[((2172, 2187), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2185, 2187), False, 'import unittest\n'), ((308, 329), 'h5py.File', 'h5py.File', (['fname', '"""w"""'], {}), "(fname, 'w')\n", (317, 329), False, 'import h5py\n'), ((753, 774), 'h5py.File', 'h5py.File', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (7...
import argparse from datetime import datetime as dt from lightgbm import LGBMRegressor import numpy as np import pandas as pd from sklearn.metrics import r2_score from sklearn.model_selection import KFold import yaml # from models import lgbm as my_lgbm from cv import r2_cv from preprocessing import load_x, load_y fr...
[ "pandas.read_feather", "preprocessing.load_x", "argparse.ArgumentParser", "yaml.dump", "lightgbm.LGBMRegressor", "preprocessing.load_y", "numpy.exp", "yaml.safe_load", "datetime.datetime.now", "sklearn.metrics.r2_score" ]
[((543, 568), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (566, 568), False, 'import argparse\n'), ((972, 1001), 'preprocessing.load_x', 'load_x', (['features', 'dropped_ids'], {}), '(features, dropped_ids)\n', (978, 1001), False, 'from preprocessing import load_x, load_y\n'), ((1062, 1111),...
# <NAME> 2014-2020 # mlxtend Machine Learning Library Extensions # Author: <NAME> <<EMAIL>> # # License: BSD 3 clause from mlxtend.utils import assert_raises from mlxtend.utils import check_Xy, format_kwarg_dictionaries import numpy as np import sys import os y = np.array([1, 2, 3, 4]) X = np.array([[1., 2.], [3., 4....
[ "numpy.array", "mlxtend.utils.format_kwarg_dictionaries", "mlxtend.utils.assert_raises", "mlxtend.utils.check_Xy" ]
[((266, 288), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (274, 288), True, 'import numpy as np\n'), ((293, 351), 'numpy.array', 'np.array', (['[[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]]'], {}), '([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]])\n', (301, 351), True, 'import numpy as n...
from __future__ import absolute_import from __future__ import print_function import numpy as np import re from scipy import linalg import scipy.ndimage as ndi from six.moves import range import os import sys import threading import copy import inspect import types from keras import backend as K from keras.utils.gener...
[ "six.moves.range", "keras.backend.image_dim_ordering", "threading.Lock", "numpy.random.randint", "numpy.random.seed" ]
[((3432, 3454), 'keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (3452, 3454), True, 'from keras import backend as K\n'), ((4362, 4378), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (4376, 4378), False, 'import threading\n'), ((7642, 7674), 'numpy.random.seed', 'np.random.seed', ([...
import pytest import numpy as np from sklearn.ensemble import RandomForestClassifier from ..sequential import sequential import pkg_resources PATH = pkg_resources.resource_filename(__name__, 'test_data/') def test_sequential(): "Test sequential feature selection" # load data X = np.load(PATH+'featur...
[ "numpy.load", "sklearn.ensemble.RandomForestClassifier", "pkg_resources.resource_filename" ]
[((151, 206), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""test_data/"""'], {}), "(__name__, 'test_data/')\n", (182, 206), False, 'import pkg_resources\n'), ((300, 337), 'numpy.load', 'np.load', (["(PATH + 'features_largeN.npy')"], {}), "(PATH + 'features_largeN.npy')\n", (307...
import numpy as np from keras.models import Model from keras.layers import Input, Conv2D, DepthwiseConv2D # model inputs = Input(shape=(4, 4, 3)) x = DepthwiseConv2D((3, 3), strides=( 1, 1), depth_multiplier=1, padding='same')(inputs) model = Model(inputs, x) model.load_weights('model.h5') print(model.summary()) ...
[ "numpy.allclose", "keras.layers.DepthwiseConv2D", "keras.layers.Input", "keras.models.Model", "numpy.load" ]
[((124, 146), 'keras.layers.Input', 'Input', ([], {'shape': '(4, 4, 3)'}), '(shape=(4, 4, 3))\n', (129, 146), False, 'from keras.layers import Input, Conv2D, DepthwiseConv2D\n'), ((248, 264), 'keras.models.Model', 'Model', (['inputs', 'x'], {}), '(inputs, x)\n', (253, 264), False, 'from keras.models import Model\n'), (...
from pymesh.TestCase import TestCase from pymesh import distance_to_mesh, BVH from pymesh.meshutils import generate_box_mesh import numpy as np class DistanceToMeshTest(TestCase): def test_boundary_pts_cgal(self): mesh = generate_box_mesh( np.array([0, 0, 0]), np.array([1, 1, 1]))...
[ "numpy.array", "numpy.zeros", "pymesh.distance_to_mesh" ]
[((336, 380), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]]'], {}), '([[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]])\n', (344, 380), True, 'import numpy as np\n'), ((453, 488), 'pymesh.distance_to_mesh', 'distance_to_mesh', (['mesh', 'pts', '"""cgal"""'], {}), "(mesh, pts, 'cgal')\n", (469, 488), False, 'from p...