code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' from typing import List from bs4 import BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' ...
[ "bs4.BeautifulSoup", "common._common_test" ]
[((2240, 2269), 'common._common_test', '_common_test', (['get_game_genres'], {}), '(get_game_genres)\n', (2252, 2269), False, 'from common import _common_test\n'), ((368, 408), 'bs4.BeautifulSoup', 'BeautifulSoup', (['rs.content', '"""html.parser"""'], {}), "(rs.content, 'html.parser')\n", (381, 408), False, 'from bs4 ...
#!/usr/bin/env python """ Unit tests for Regional Intersection Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random """ from io import StringIO from typing import List, Tuple from unittest import TestCase from pprin...
[ "slig.datastructs.rigraph.RIGraph", "slig.datastructs.region.Region" ]
[((888, 908), 'slig.datastructs.rigraph.RIGraph', 'RIGraph', ([], {'dimension': '(1)'}), '(dimension=1)\n', (895, 908), False, 'from slig.datastructs.rigraph import RIGraph\n'), ((1103, 1131), 'slig.datastructs.rigraph.RIGraph', 'RIGraph', ([], {'dimension': 'dimension'}), '(dimension=dimension)\n', (1110, 1131), False...
# -*- coding: utf-8 -*- # Written by <NAME> (<EMAIL>) import os import pprint import torch import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default impor...
[ "wbia_orientation.train._make_loss", "pprint.pformat", "wbia_orientation.dataset.custom_transforms.ToTensor", "wbia_orientation.config.default.update_config", "torch.load", "wbia_orientation.train.parse_args", "wbia_orientation.train._make_model", "wbia_orientation.dataset.custom_transforms.Resize", ...
[((1435, 1491), 'wbia_orientation.dataset.animal.AnimalDataset', 'AnimalDataset', (['cfg', 'cfg.DATASET.TEST_SET', 'test_transform'], {}), '(cfg, cfg.DATASET.TEST_SET, test_transform)\n', (1448, 1491), False, 'from wbia_orientation.dataset.animal import AnimalDataset\n'), ((1770, 1782), 'wbia_orientation.train.parse_ar...
# Generated by Django 3.0.9 on 2020-08-04 16:34 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ("resources", "0083_auto_20200804_1634"), ("users", "0059_auto_20200706_1659"), ] operations = [ migr...
[ "django.db.models.Q" ]
[((591, 618), 'django.db.models.Q', 'models.Q', ([], {'status__in': '[0, 2]'}), '(status__in=[0, 2])\n', (599, 618), False, 'from django.db import migrations, models\n'), ((958, 994), 'django.db.models.Q', 'models.Q', ([], {'partner__status__in': '[0, 2]'}), '(partner__status__in=[0, 2])\n', (966, 994), False, 'from dj...
import discord from discord.ext import commands, tasks from discord.ext.commands import has_permissions, CheckFailure from utils.converters import CtxRoleConverter from utils.utils import str2bool from functools import reduce import random import json import utils.embed as embed from utils.colors impor...
[ "pymongo.MongoClient", "os.path.abspath", "utils.utils.str2bool", "discord.ext.commands.command", "json.load", "discord.ext.commands.RoleConverter", "random.randint", "discord.ext.commands.has_permissions", "discord.ext.commands.Cog.listener", "discord.PermissionOverwrite", "logging.getLogger" ]
[((2683, 2706), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (2704, 2706), False, 'from discord.ext import commands, tasks\n'), ((4513, 4536), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (4534, 4536), False, 'from discord.ext import commands, tasks\...
from sqlalchemy import Column, String, Boolean, ForeignKey, Integer, Float from ..db import Base class Attribute(Base): __tablename__ = "attribute" id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference...
[ "sqlalchemy.String", "sqlalchemy.ForeignKey", "sqlalchemy.Column" ]
[((163, 250), 'sqlalchemy.Column', 'Column', (['Integer'], {'autoincrement': '(True)', 'primary_key': '(True)', 'unique': '(True)', 'nullable': '(False)'}), '(Integer, autoincrement=True, primary_key=True, unique=True, nullable\n =False)\n', (169, 250), False, 'from sqlalchemy import Column, String, Boolean, Foreign...
# -*- coding: utf-8 -*- """ SciHub client """ import logging import os import random import urllib import requests from bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: """ Client for accessing SciHub """ DEFAULT_HEADERS = { ...
[ "urllib.parse.urljoin", "requests.Session", "logging.NullHandler", "bs4.BeautifulSoup", "os.path.join", "logging.getLogger" ]
[((157, 184), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (174, 184), False, 'import logging\n'), ((200, 221), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (219, 221), False, 'import logging\n'), ((604, 622), 'requests.Session', 'requests.Session', ([], {}), '()\n', ...
from BusArrivalItem import BusArrivalItem from api import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id: str): response = call( 'busarrivalservice', { 'stationId': station_id, ...
[ "BusArrivalItem.BusArrivalItem", "api.call" ]
[((235, 308), 'api.call', 'call', (['"""busarrivalservice"""', "{'stationId': station_id, 'routeId': route_id}"], {}), "('busarrivalservice', {'stationId': station_id, 'routeId': route_id})\n", (239, 308), False, 'from api import call\n'), ((482, 510), 'BusArrivalItem.BusArrivalItem', 'BusArrivalItem', (['list_element'...
""" Author: <NAME> GitHub: wafflescore """ from minisom import MiniSom, asymptotic_decay import numpy as np import matplotlib.pyplot as plt import itertools from skimage import measure from skimage.segmentation import random_walker from skimage import filters from scipy.spatial import distance from collections impor...
[ "random.sample", "matplotlib.pyplot.cm.rainbow", "acse_9_irp_wafflescore.MiscHelpers.ext_eval_entropy", "numpy.shape", "numpy.argpartition", "matplotlib.pyplot.figure", "skimage.measure.label", "numpy.arange", "numpy.unique", "numpy.full", "matplotlib.pyplot.colorbar", "random.seed", "numpy....
[((468, 582), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s | %(levelname)s : %(message)s"""', 'level': 'logging.INFO', 'stream': 'sys.stdout'}), "(format='%(asctime)s | %(levelname)s : %(message)s',\n level=logging.INFO, stream=sys.stdout)\n", (487, 582), False, 'import logging\n'), ...
import json from urllib.request import urlopen from urllib.error import URLError from urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object, or None in case of failure def fetch_index(repo_url): try: with urlopen(urljo...
[ "urllib.parse.urljoin", "urllib.request.urlopen" ]
[((920, 942), 'urllib.request.urlopen', 'urlopen', (['EXTERNAL_LIST'], {}), '(EXTERNAL_LIST)\n', (927, 942), False, 'from urllib.request import urlopen\n'), ((315, 346), 'urllib.parse.urljoin', 'urljoin', (['repo_url', '"""index.json"""'], {}), "(repo_url, 'index.json')\n", (322, 346), False, 'from urllib.parse import ...
from flask import Blueprint,request from app import pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint("ipscan", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): ...
[ "celery_app.utils.utils.get_current_time", "flask.Blueprint", "app.pa_domain.find_one", "flask.request.args.get", "app.pa_ip.find" ]
[((189, 240), 'flask.Blueprint', 'Blueprint', (['"""ipscan"""', '__name__'], {'url_prefix': '"""/ipscan"""'}), "('ipscan', __name__, url_prefix='/ipscan')\n", (198, 240), False, 'from flask import Blueprint, request\n'), ((333, 359), 'flask.request.args.get', 'request.args.get', (['"""domain"""'], {}), "('domain')\n", ...
from notebook.utils import url_path_join as ujoin from notebook.base.handlers import IPythonHandler import os, json, git, urllib, requests from git import Repo, GitCommandError from subprocess import check_output import subprocess repo = None htable = [] config = { "GIT_USER": "alpaca", "GIT_PARENT_DIR": os.path....
[ "os.path.expanduser" ]
[((312, 362), 'os.path.expanduser', 'os.path.expanduser', (['"""~/Desktop/jupyter_versioning"""'], {}), "('~/Desktop/jupyter_versioning')\n", (330, 362), False, 'import os, json, git, urllib, requests\n')]
from pymongo import MongoClient from settings import MONGO_URL client = MongoClient(MONGO_URL) db = client.rolz_database
[ "pymongo.MongoClient" ]
[((76, 98), 'pymongo.MongoClient', 'MongoClient', (['MONGO_URL'], {}), '(MONGO_URL)\n', (87, 98), False, 'from pymongo import MongoClient\n')]
""" Samples of the various charts. Run this script to generate the reference samples. """ import os from svg.charts.plot import Plot from svg.charts import bar from svg.charts import time_series from svg.charts import pie from svg.charts import schedule from svg.charts import line def sample_Plot(): g = Plot( ...
[ "svg.charts.plot.Plot", "svg.charts.pie.Pie", "svg.charts.bar.VerticalBar", "svg.charts.bar.HorizontalBar", "os.path.dirname", "collections.namedtuple", "svg.charts.line.Line", "svg.charts.time_series.Plot", "os.path.join" ]
[((313, 461), 'svg.charts.plot.Plot', 'Plot', (["{'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels':\n True, 'stagger_y_labels': True, 'show_x_guidelines': True}"], {}), "({'min_x_value': 0, 'min_y_value': 0, 'area_fill': True,\n 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_...
import data_reader import time import tensorflow as tf def worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__': data = list(range(100)) bsize = 10 reader = data_reader.data_reader(data, worker, bsize) for i in range(10): a = reader.get_next_batch() print(a)
[ "data_reader.data_reader", "time.sleep" ]
[((75, 90), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (85, 90), False, 'import time\n'), ((190, 234), 'data_reader.data_reader', 'data_reader.data_reader', (['data', 'worker', 'bsize'], {}), '(data, worker, bsize)\n', (213, 234), False, 'import data_reader\n')]
import mimetypes from django.core.files.base import ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.conf import settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from ...
[ "azure.storage.blob.blockblobservice.BlockBlobService", "azure.storage.blob.models.ContentSettings", "django.core.files.base.ContentFile", "azure.storage.blob.baseblobservice.BaseBlobService", "mimetypes.guess_type" ]
[((1728, 1753), 'django.core.files.base.ContentFile', 'ContentFile', (['blob.content'], {}), '(blob.content)\n', (1739, 1753), False, 'from django.core.files.base import ContentFile\n'), ((1076, 1128), 'azure.storage.blob.baseblobservice.BaseBlobService', 'BaseBlobService', (['self.account_name', 'self.account_key'], {...
#-------------------------------------------------------------- # By <NAME> # Painted Harmony Group, Inc # June 26, 2017 # Please See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): ...
[ "SentimentAnalyzer.SentimentAnalyzer" ]
[((371, 399), 'SentimentAnalyzer.SentimentAnalyzer', 'analyzer.SentimentAnalyzer', ([], {}), '()\n', (397, 399), True, 'import SentimentAnalyzer as analyzer\n')]
import numpy as np class DecisionTreeClassifierTranspiler(object): def __init__(self, model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','...
[ "numpy.argmax" ]
[((488, 516), 'numpy.argmax', 'np.argmax', (['class_aux'], {'axis': '(1)'}), '(class_aux, axis=1)\n', (497, 516), True, 'import numpy as np\n')]
import unittest import requests from helpers.fake_http_server import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): se...
[ "helpers.fake_http_server.FakeServer", "requests.get" ]
[((2488, 2505), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (2500, 2505), False, 'import requests\n'), ((206, 218), 'helpers.fake_http_server.FakeServer', 'FakeServer', ([], {}), '()\n', (216, 218), False, 'from helpers.fake_http_server import FakeServer\n')]
#!/usr/bin/env python import textgrid import sys if len(sys.argv) != 2: print("textgrid-to-audacity.py [filename]") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list() for i in tg[0]: if i.mark != '': if not started: start = i.minTime ...
[ "textgrid.TextGrid.fromFile" ]
[((138, 177), 'textgrid.TextGrid.fromFile', 'textgrid.TextGrid.fromFile', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (164, 177), False, 'import textgrid\n')]
import contextlib import os import logging from django.test.runner import DiscoverRunner from django.conf import settings from django.db import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): """ See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#...
[ "os.path.join", "logging.getLogger" ]
[((167, 194), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (184, 194), False, 'import logging\n'), ((724, 779), 'os.path.join', 'os.path.join', (['settings.MANAGE_ROOT', '"""legacy-schema.sql"""'], {}), "(settings.MANAGE_ROOT, 'legacy-schema.sql')\n", (736, 779), False, 'import os\n')]
# coding: utf8 import datetime import logging import random import re import ssl import subprocess import threading import time from multiprocessing import Process as Thread import telebot from aiohttp import web from telebot import types import api import cherrypy import config import secret_config import text impo...
[ "utils.check_text", "ssl.SSLContext", "api.get_chats_count", "api.get_user_param", "utils.have_args", "utils.send_err_report", "utils.new_user_in_chat", "utils.ban_stickerpack", "utils.to_bool", "utils.get_my_ip", "utils.unban_stickerpack", "utils.get_greeting", "utils.parse_chat_id", "uti...
[((358, 375), 'utils.get_my_ip', 'utils.get_my_ip', ([], {}), '()\n', (373, 375), False, 'import utils\n'), ((826, 868), 'telebot.TeleBot', 'telebot.TeleBot', ([], {'token': 'secret_config.token'}), '(token=secret_config.token)\n', (841, 868), False, 'import telebot\n'), ((911, 939), 'logging.getLogger', 'logging.getLo...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Wed Mar 20 22:08:31 2019 @author: iaricanli """ import copy T = True F = False D = "_" """ Generate the inputs for the algorithm. A list of dictionarys. Each element of the list represents a different boolean expression input -- say if we are trying to r...
[ "copy.deepcopy" ]
[((6780, 6807), 'copy.deepcopy', 'copy.deepcopy', (['list_of_maps'], {}), '(list_of_maps)\n', (6793, 6807), False, 'import copy\n'), ((7059, 7086), 'copy.deepcopy', 'copy.deepcopy', (['minterms2bln'], {}), '(minterms2bln)\n', (7072, 7086), False, 'import copy\n')]
import yaml from pathlib import Path from os import environ class Config: def __init__(self, file_path="config.yml"): try: with open(file_path, encoding="UTF-8") as file: self.config = yaml.full_load(file.read()) except Exception: self.config = {} ...
[ "pathlib.Path" ]
[((2076, 2090), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2080, 2090), False, 'from pathlib import Path\n')]
#! /usr/bin/env python import pyslet.xml.structures as xml from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method("/", self.home) ...
[ "pyslet.xml.structures.EscapeCharData" ]
[((715, 775), 'pyslet.xml.structures.EscapeCharData', 'xml.EscapeCharData', (["context.session.entity['UserName'].value"], {}), "(context.session.entity['UserName'].value)\n", (733, 775), True, 'import pyslet.xml.structures as xml\n'), ((1147, 1188), 'pyslet.xml.structures.EscapeCharData', 'xml.EscapeCharData', (['self...
import pathlib from typing import ( Iterable ) CONTRACTS_ROOT = "./scripts/benchmark/contract_data/" CONTRACTS = [ "erc20.sol" ] def get_contracts() -> Iterable[pathlib.Path]: for val in CONTRACTS: yield pathlib.Path(CONTRACTS_ROOT) / pathlib.Path(val)
[ "pathlib.Path" ]
[((227, 255), 'pathlib.Path', 'pathlib.Path', (['CONTRACTS_ROOT'], {}), '(CONTRACTS_ROOT)\n', (239, 255), False, 'import pathlib\n'), ((258, 275), 'pathlib.Path', 'pathlib.Path', (['val'], {}), '(val)\n', (270, 275), False, 'import pathlib\n')]
########################################################################## # # pgAdmin 4 - PostgreSQL Tools # # Copyright (C) 2013 - 2020, The pgAdmin Development Team # This software is released under the PostgreSQL Licence # ########################################################################## import json impor...
[ "random.randint", "regression.python_test_utils.test_utils.get_driver_version", "datetime.date.today", "regression.python_test_utils.test_utils.create_table_with_query", "pgadmin.browser.server_groups.servers.databases.tests.utils.connect_database", "pgadmin.browser.server_groups.servers.databases.tests.u...
[((5722, 5790), 'pgadmin.browser.server_groups.servers.databases.tests.utils.disconnect_database', 'database_utils.disconnect_database', (['self', 'self.server_id', 'self.db_id'], {}), '(self, self.server_id, self.db_id)\n', (5756, 5790), True, 'from pgadmin.browser.server_groups.servers.databases.tests import utils as...
import numpy as np import pickle as pkl def function_generator(init_num): seq = np.array([], dtype='int') n = init_num seq = np.append(seq, n) while True: if ((n%2)==0): next_number = n/2 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq...
[ "numpy.append", "numpy.asarray", "numpy.array" ]
[((1092, 1119), 'numpy.asarray', 'np.asarray', (['output_seq_data'], {}), '(output_seq_data)\n', (1102, 1119), True, 'import numpy as np\n'), ((1130, 1149), 'numpy.asarray', 'np.asarray', (['x_train'], {}), '(x_train)\n', (1140, 1149), True, 'import numpy as np\n'), ((1160, 1179), 'numpy.asarray', 'np.asarray', (['y_tr...
# ------------------------------------------------------------------------------ # Copyright (c) Microsoft # Licensed under the MIT License. # Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import from __future__ import divis...
[ "torch.distributed.is_initialized", "numpy.argmax", "torch.distributed.get_rank", "os.path.basename", "logging.StreamHandler", "numpy.zeros", "time.strftime", "pathlib.Path", "torch.optim.Adam", "torch.distributed.get_world_size", "numpy.bincount", "logging.getLogger", "torch.optim.SGD" ]
[((491, 518), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (508, 518), False, 'import logging\n'), ((1111, 1145), 'torch.distributed.get_world_size', 'torch.distributed.get_world_size', ([], {}), '()\n', (1143, 1145), False, 'import torch\n'), ((1239, 1267), 'torch.distributed.get_rank'...
import sys from startup_script_utils import load_yaml, pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml("/opt/netbox/initializers/virtualization_interfaces.yml") if interfaces is None: sys.exit() required_assocs = {"virtual_machine"...
[ "virtualization.models.VMInterface.objects.get_or_create", "startup_script_utils.pop_custom_fields", "startup_script_utils.set_custom_fields_values", "startup_script_utils.load_yaml", "sys.exit" ]
[((176, 243), 'startup_script_utils.load_yaml', 'load_yaml', (['"""/opt/netbox/initializers/virtualization_interfaces.yml"""'], {}), "('/opt/netbox/initializers/virtualization_interfaces.yml')\n", (185, 243), False, 'from startup_script_utils import load_yaml, pop_custom_fields, set_custom_fields_values\n'), ((272, 282...
from __future__ import ( annotations, ) from typing import ( Generator, NoReturn ) class StdReader: def __init__( self, ) -> NoReturn: import sys self.buf = sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks: Generator def async_readlines( self, )...
[ "collections.deque" ]
[((2602, 2609), 'collections.deque', 'deque', ([], {}), '()\n', (2607, 2609), False, 'from collections import deque\n')]
from django.db import models from django.contrib.auth.models import User from django.urls import reverse from django.utils.timezone import now from django.template.defaultfilters import slugify import uuid import os class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'), ) ...
[ "django.db.models.TextField", "django.db.models.OneToOneField", "django.db.models.ManyToManyField", "uuid.uuid4", "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.PositiveIntegerField", "django.db.models.Manager", "django.db.models.SlugField", "django.db.models.ImageF...
[((331, 382), 'django.db.models.CharField', 'models.CharField', (['"""标题"""'], {'max_length': '(200)', 'unique': '(True)'}), "('标题', max_length=200, unique=True)\n", (347, 382), False, 'from django.db import models\n'), ((394, 433), 'django.db.models.SlugField', 'models.SlugField', (['"""slug"""'], {'max_length': '(60)...
# Copyright 2017 Quantum Information Science, University of Parma, Italy. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2...
[ "sys.path.append", "os.path.dirname", "time.sleep", "myLogger.MyHandler", "qiskit.QuantumProgram", "operator.itemgetter", "logging.getLogger" ]
[((1015, 1050), 'sys.path.append', 'sys.path.append', (['"""../qiskit-sdk-py"""'], {}), "('../qiskit-sdk-py')\n", (1030, 1050), False, 'import sys\n'), ((1212, 1240), 'logging.getLogger', 'logging.getLogger', (['"""utility"""'], {}), "('utility')\n", (1229, 1240), False, 'import logging\n'), ((1259, 1279), 'myLogger.My...
import os import time import run_services from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username = os.getenv("user") groupname = username def copy(src, file_list, dest, user_pass, *args): log.info("\nCopying\n") try: ...
[ "os.makedirs", "os.path.isdir", "os.path.exists", "run_services.run_basic_services", "time.time", "bigdata_logs.logger.getLoggingInstance", "os.getenv", "Basic_linux_commands.chown_chmod.chown" ]
[((152, 172), 'bigdata_logs.logger.getLoggingInstance', 'getLoggingInstance', ([], {}), '()\n', (170, 172), False, 'from bigdata_logs.logger import getLoggingInstance\n'), ((185, 202), 'os.getenv', 'os.getenv', (['"""user"""'], {}), "('user')\n", (194, 202), False, 'import os\n'), ((476, 496), 'os.path.exists', 'os.pat...
import os import pytest from dj_database_url import parse from django.conf import settings from testing.postgresql import Postgresql postgres = os.environ.get("POSTGRESQL_PATH") initdb = os.environ.get("INITDB_PATH") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pyte...
[ "os.environ.get", "pytest.hookimpl", "testing.postgresql.Postgresql" ]
[((146, 179), 'os.environ.get', 'os.environ.get', (['"""POSTGRESQL_PATH"""'], {}), "('POSTGRESQL_PATH')\n", (160, 179), False, 'import os\n'), ((189, 218), 'os.environ.get', 'os.environ.get', (['"""INITDB_PATH"""'], {}), "('INITDB_PATH')\n", (203, 218), False, 'import os\n'), ((233, 277), 'testing.postgresql.Postgresql...
""" Script that scrapes Jaimini's box website to retrieve when was the last One Piece chapter released to then ask you if you want to read the chapter in your browser or download it """ from bs4 import BeautifulSoup import requests import webbrowser from os import getcwd url = 'https://jaiminisbox.com/reader/series/o...
[ "bs4.BeautifulSoup", "webbrowser.open", "os.getcwd", "requests.get" ]
[((390, 414), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r', '"""lxml"""'], {}), "(r, 'lxml')\n", (403, 414), False, 'from bs4 import BeautifulSoup\n'), ((360, 377), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (372, 377), False, 'import requests\n'), ((1566, 1585), 'requests.get', 'requests.get', (['d_url']...
import json import xmltodict with open('complexes.json', 'r', encoding='UTF-8') as f: jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\nXML output(json_to_xml.xml):') print(xmlString) with open('...
[ "json.loads" ]
[((207, 229), 'json.loads', 'json.loads', (['jsonString'], {}), '(jsonString)\n', (217, 229), False, 'import json\n')]
import numpy as np from openmdao.api import CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(cr...
[ "numpy.matrix", "numpy.size", "numpy.argmax", "numpy.zeros", "numpy.array", "optigurator.utils.recording_filename", "numpy.delete" ]
[((3474, 3496), 'numpy.size', 'np.size', (['Pareto_points'], {}), '(Pareto_points)\n', (3481, 3496), True, 'import numpy as np\n'), ((3519, 3544), 'numpy.size', 'np.size', (['Pareto_points', '(0)'], {}), '(Pareto_points, 0)\n', (3526, 3544), True, 'import numpy as np\n'), ((3701, 3731), 'numpy.delete', 'np.delete', (['...
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not u...
[ "data_pipeline.sql.utils.build_alter_sql" ]
[((2063, 2094), 'data_pipeline.sql.utils.build_alter_sql', 'sql_utils.build_alter_sql', (['self'], {}), '(self)\n', (2088, 2094), True, 'import data_pipeline.sql.utils as sql_utils\n')]
""" Read in the data from csv files """ import pandas as pd import os import glob from save_data import save_object, load_object def load_csv(filename): # load data from pickle file if it exists obj = load_object(filename) if obj != None: return obj # otherwise load from csv else: ...
[ "os.path.join", "os.path.basename", "pandas.read_csv", "save_data.load_object", "save_data.save_object" ]
[((212, 233), 'save_data.load_object', 'load_object', (['filename'], {}), '(filename)\n', (223, 233), False, 'from save_data import save_object, load_object\n'), ((493, 515), 'save_data.load_object', 'load_object', (['pickle_fn'], {}), '(pickle_fn)\n', (504, 515), False, 'from save_data import save_object, load_object\...
from urllib.parse import ParseResult from os.path import realpath, dirname, join as _path_join import requests from json import load as json_load script_loc = realpath(__file__) script_dir = dirname(script_loc) del dirname del realpath mime_types: dict with open(_path_join(script_dir, "mimes.json")) as f: mime_ty...
[ "json.load", "urllib.parse.ParseResult", "os.path.realpath", "os.path.dirname", "requests.get", "os.path.join" ]
[((160, 178), 'os.path.realpath', 'realpath', (['__file__'], {}), '(__file__)\n', (168, 178), False, 'from os.path import realpath, dirname, join as _path_join\n'), ((192, 211), 'os.path.dirname', 'dirname', (['script_loc'], {}), '(script_loc)\n', (199, 211), False, 'from os.path import realpath, dirname, join as _path...
from disnake.ext import commands from utils.clash import client, pingToChannel, getClan import disnake usafam = client.usafam clans = usafam.clans server = usafam.server class autoB(commands.Cog, name="Board Setup"): def __init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=...
[ "disnake.ui.ActionRow", "disnake.Color.red", "disnake.ext.commands.Param", "disnake.SelectOption", "utils.clash.getClan", "utils.clash.pingToChannel", "disnake.ui.Select", "disnake.ext.commands.slash_command", "disnake.Color.green" ]
[((292, 332), 'disnake.ext.commands.slash_command', 'commands.slash_command', ([], {'name': '"""autoboard"""'}), "(name='autoboard')\n", (314, 332), False, 'from disnake.ext import commands\n'), ((606, 672), 'disnake.ext.commands.Param', 'commands.Param', ([], {'choices': "['Player Leaderboard', 'Clan Leaderboard']"}),...
# coding: utf-8 # Third Party Libraries from sanic_transmute import add_route from transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we have a single method, make it a list. if isinstance(kwargs.get("paths"), st...
[ "transmute_core.function.TransmuteAttributes", "sanic_transmute.add_route" ]
[((491, 520), 'transmute_core.function.TransmuteAttributes', 'TransmuteAttributes', ([], {}), '(**kwargs)\n', (510, 520), False, 'from transmute_core.function import TransmuteAttributes\n'), ((690, 715), 'sanic_transmute.add_route', 'add_route', (['blueprint', 'fnc'], {}), '(blueprint, fnc)\n', (699, 715), False, 'from...
import os from typing import Dict from abc import ABC from easy_sdm.data import ShapefileRegion import geopandas as gpd import numpy as np import pandas as pd import requests from easy_sdm.configs import configs from easy_sdm.utils import logger from typing import Dict, Optional from pathlib import Path class GBIFOc...
[ "pandas.DataFrame", "pandas.notnull", "easy_sdm.utils.logger.logging.info", "geopandas.points_from_xy", "numpy.array", "requests.get" ]
[((1336, 1378), 'requests.get', 'requests.get', (['self.base_url'], {'params': 'params'}), '(self.base_url, params=params)\n', (1348, 1378), False, 'import requests\n'), ((1460, 1567), 'easy_sdm.utils.logger.logging.info', 'logger.logging.info', (['f"""API call failed at offset {offset} with a status code of {r.status_...
from __future__ import division import numpy as np import unittest import chainer from chainer import testing from chainer.testing import attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, sh...
[ "numpy.random.uniform", "chainercv.links.model.fpn.head_loss_pre", "chainercv.links.model.fpn.Head", "chainer.testing.parameterize", "chainercv.links.model.fpn.head_loss_post", "chainer.testing.run_module" ]
[((414, 499), 'chainer.testing.parameterize', 'testing.parameterize', (["{'n_class': 1 + 1}", "{'n_class': 5 + 1}", "{'n_class': 20 + 1}"], {}), "({'n_class': 1 + 1}, {'n_class': 5 + 1}, {'n_class': 20 +\n 1})\n", (434, 499), False, 'from chainer import testing\n'), ((8017, 8055), 'chainer.testing.run_module', 'test...
#!/usr/bin/env python """ Module for reading STM files Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class derived in convert_text """ from asrtoolkit.data_structures.segment import segment def format_segment(seg): ""...
[ "asrtoolkit.data_structures.segment.segment" ]
[((824, 961), 'asrtoolkit.data_structures.segment.segment', 'segment', (["{'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start':\n start, 'stop': stop, 'label': label, 'text': text}"], {}), "({'audiofile': audiofile, 'channel': channel, 'speaker': speaker,\n 'start': start, 'stop': stop, 'label...
# -*- coding: utf-8 -*- """ Created on Mon Oct 5 14:29:04 2020 @author: ptrda """ import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.j...
[ "unittest.main", "Project02.createIndividualsDataFrame", "os.path.abspath", "us23.us23", "os.getcwd", "os.path.join" ]
[((290, 301), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (299, 301), False, 'import os\n'), ((1683, 1739), 'unittest.main', 'unittest.main', ([], {'argv': "['first-arg-is-ignored']", 'exit': '(False)'}), "(argv=['first-arg-is-ignored'], exit=False)\n", (1696, 1739), False, 'import unittest\n'), ((177, 226), 'os.path.a...
from json import loads, dumps from random import randint import stanza import praw import re import os from urllib.parse import quote from stanza import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have we run this co...
[ "json.dumps", "os.path.isfile", "stanza.Pipeline", "praw.Reddit", "re.search", "stanza.download" ]
[((200, 219), 'praw.Reddit', 'praw.Reddit', (['"""bot1"""'], {}), "('bot1')\n", (211, 219), False, 'import praw\n'), ((3354, 3375), 'stanza.download', 'stanza.download', (['"""en"""'], {}), "('en')\n", (3369, 3375), False, 'import stanza\n'), ((3391, 3439), 'stanza.Pipeline', 'stanza.Pipeline', (['"""en"""'], {'process...
import pytest from sys import version_info import fstr def test_basic(): template = fstr("{x} + {y} = {x + y}", x=1) assert template.format(y=2) == "1 + 2 = 3" assert template.format(y=3) == "1 + 3 = 4" def test_basic_format_language(): template = fstr("{x!r} + {y!r} = {x + y!r}", x="a") assert...
[ "pytest.mark.parametrize", "pytest.raises", "fstr" ]
[((2072, 2111), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""brace"""', '"""])}"""'], {}), "('brace', '])}')\n", (2095, 2111), False, 'import pytest\n'), ((3433, 3518), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""template"""', '_format_specifier_width_precision_templates'], {}), "('templa...
#!/usr/bin/python # -*- coding: utf-8 -*- import os import sys from builders import deb, rpm, amazon from builders.util import shell_call __author__ = "<NAME>" __copyright__ = "Copyright (C) Nginx, Inc. All rights reserved." __license__ = "" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" if __name__ == '__main__':...
[ "builders.amazon.build", "builders.deb.build", "builders.util.shell_call", "os.path.isfile", "builders.rpm.build" ]
[((404, 441), 'os.path.isfile', 'os.path.isfile', (['"""/etc/debian_version"""'], {}), "('/etc/debian_version')\n", (418, 441), False, 'import os\n'), ((451, 477), 'builders.deb.build', 'deb.build', ([], {'package': 'package'}), '(package=package)\n', (460, 477), False, 'from builders import deb, rpm, amazon\n'), ((487...
# Generated by Django 3.0.8 on 2020-12-19 21:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'), ] operations = [ migrations.AddField( ...
[ "django.db.models.ForeignKey", "django.db.models.BooleanField", "django.db.models.ImageField" ]
[((397, 512), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""category.Category"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='category.Category')\n", (414, 512), False, '...
#!/usr/bin/env python # -*- coding: utf-8 -*- import codecs import os from setuptools import find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding="utf-8").read() setup( name="pardal", version="0.1.0", author="<N...
[ "os.path.dirname", "codecs.open", "setuptools.find_packages" ]
[((164, 189), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (179, 189), False, 'import os\n'), ((560, 600), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', 'docs']"}), "(exclude=['tests', 'docs'])\n", (573, 600), False, 'from setuptools import find_packages, setup\n'...
"""Create dataframe and check the quality This script downloads a dataset from Seattle Open Data Portal and imports as a Pandas Dataframe. This tool checks if the dataframe: 1. Has at least 10 rows of data 2. Contains only the columns that specified as the second argument 3. Values in each column have the same python...
[ "pandas.read_csv" ]
[((824, 929), 'pandas.read_csv', 'pd.read_csv', (['"""https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD"""'], {}), "(\n 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD'\n )\n", (835, 929), True, 'import pandas as pd\n'), ((3476, 3581), 'pandas.read_...
import unittest import zserio from testutils import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, "constraints.zs").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter()...
[ "zserio.serialize", "zserio.deserialize", "zserio.BitStreamReader", "testutils.getZserioApi", "zserio.BitStreamWriter" ]
[((296, 320), 'zserio.BitStreamWriter', 'zserio.BitStreamWriter', ([], {}), '()\n', (318, 320), False, 'import zserio\n'), ((491, 552), 'zserio.BitStreamReader', 'zserio.BitStreamReader', (['writer.byte_array', 'writer.bitposition'], {}), '(writer.byte_array, writer.bitposition)\n', (513, 552), False, 'import zserio\n'...
import yaml import pandas as pd from more_itertools import flatten import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/com...
[ "pandas.read_feather", "os.chdir" ]
[((78, 98), 'os.chdir', 'os.chdir', (['"""notebook"""'], {}), "('notebook')\n", (86, 98), False, 'import os\n'), ((120, 225), 'pandas.read_feather', 'pd.read_feather', (['"""../references/gene_annotation_dmel_r6-26.feather"""'], {'columns': "['FBgn', 'gene_symbol']"}), "('../references/gene_annotation_dmel_r6-26.feathe...
""" <NAME> json from yr. Location Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --heade...
[ "json.dump", "requests.request" ]
[((987, 1032), 'requests.request', 'requests.request', (['"""GET"""', 'url'], {'headers': 'headers'}), "('GET', url, headers=headers)\n", (1003, 1032), False, 'import requests\n'), ((1257, 1275), 'json.dump', 'json.dump', (['data', 'f'], {}), '(data, f)\n', (1266, 1275), False, 'import json\n')]
import re from glom import glom import json from jsonschema import validate from jsonschema import ValidationError from jsonpath_ng import parse class HarVerifications: def __init__(self, har): self.har = har def rmatch(self, val, str_rxp): if val is None: return False if ...
[ "jsonschema.validate", "glom.glom", "json.loads", "jsonpath_ng.parse", "re.search" ]
[((395, 439), 're.search', 're.search', (['str_rxp', 'val'], {'flags': 're.IGNORECASE'}), '(str_rxp, val, flags=re.IGNORECASE)\n', (404, 439), False, 'import re\n'), ((1853, 1869), 'jsonpath_ng.parse', 'parse', (['json_path'], {}), '(json_path)\n', (1858, 1869), False, 'from jsonpath_ng import parse\n'), ((1369, 1407),...
from django.http import JsonResponse, HttpResponseNotFound from django.template import RequestContext, loader, Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def json_html_response(request, template_name, code, message): """ Provide response in json or html format accordin...
[ "django.http.JsonResponse", "django.template.loader.get_template", "django.template.Template", "logging.getLogger", "django.template.RequestContext" ]
[((167, 194), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (184, 194), False, 'import logging\n'), ((668, 743), 'django.http.JsonResponse', 'JsonResponse', ([], {'status': 'code', 'data': "{'results': {'code': code, 'msg': message}}"}), "(status=code, data={'results': {'code': code, 'ms...
from ..parser import Atomic, Variable, Compound, List from ..parser import isvariable, isatom, isnumber, islist, ispartiallist, iscallable from ..core import BuiltIn ### ### Term unification (ISO 8.2) ### class Unify_2(BuiltIn): """'='(?term, ?term) If X and Y are NSTO (Not Subject To Occur-check)...
[ "copy.deepcopy", "math.sqrt", "math.modf", "math.copysign", "math.log" ]
[((14667, 14681), 'math.copysign', 'copysign', (['(1)', 'x'], {}), '(1, x)\n', (14675, 14681), False, 'from math import copysign\n'), ((15045, 15052), 'math.modf', 'modf', (['x'], {}), '(x)\n', (15049, 15052), False, 'from math import modf\n'), ((15388, 15395), 'math.modf', 'modf', (['x'], {}), '(x)\n', (15392, 15395),...
#!/usr/bin/env python from itertools import izip import numpy as np import h5py from progress.bar import Bar import sys import rospy import rosbag from sensor_msgs.msg import Imu, Image def main(): if len(sys.argv) < 2: print("Usage: {} dataset_name".format(sys.argv[0])) exit(1) file_name = sys.argv[1] ...
[ "sensor_msgs.msg.Image", "numpy.transpose", "sensor_msgs.msg.Imu", "rospy.Time.from_sec", "itertools.izip" ]
[((474, 546), 'itertools.izip', 'izip', (["log_file['times']", "log_file['fiber_accel']", "log_file['fiber_gyro']"], {}), "(log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro'])\n", (478, 546), False, 'from itertools import izip\n'), ((737, 744), 'sensor_msgs.msg.Image', 'Image', ([], {}), '()\n', (742, 7...
from django.db import models class Pokemon(models.Model): title = models.CharField(max_length=200, verbose_name="Русское название") title_en = models.CharField(max_length=200, verbose_name="Английское название", blank=True) title_jp = models.CharField(max_length=200, verbose_name="Японское название", blan...
[ "django.db.models.TextField", "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.FloatField", "django.db.models.ImageField", "django.db.models.IntegerField", "django.db.models.DateTimeField" ]
[((72, 137), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""Русское название"""'}), "(max_length=200, verbose_name='Русское название')\n", (88, 137), False, 'from django.db import models\n'), ((153, 238), 'django.db.models.CharField', 'models.CharField', ([], {'max_le...
import random import asyncio from discord.ext import commands import discord import typing from base import BaseCog # https://github.com/Rapptz/discord.py/blob/v1.7.2/examples/guessing_game.py class GuessingGame(BaseCog, name="Free guessing game -- with nothing at stake."): def __init__(self, bot): self...
[ "discord.ext.commands.command", "random.randint" ]
[((578, 656), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""free_guess_now"""', 'help': '"""Guess a random number from 1-9"""'}), "(name='free_guess_now', help='Guess a random number from 1-9')\n", (594, 656), False, 'from discord.ext import commands\n'), ((951, 1055), 'discord.ext.commands.comm...
# -*- coding: utf-8 -*- """ Tests for py33_exceptions. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import unittest from trollius import py33_exceptions class TestWrapErrors(unittest.TestCase): def test_ebadf_wrapped_to_OSError(self): ...
[ "unittest.main", "trollius.py33_exceptions.wrap_error", "socket.socket" ]
[((824, 839), 'unittest.main', 'unittest.main', ([], {}), '()\n', (837, 839), False, 'import unittest\n'), ((442, 457), 'socket.socket', 'socket.socket', ([], {}), '()\n', (455, 457), False, 'import socket\n'), ((689, 731), 'trollius.py33_exceptions.wrap_error', 'py33_exceptions.wrap_error', (['s.send', "b'abc'"], {}),...
import tensorflow as tf from tensorflow.keras.applications.resnet50 import preprocess_input from tensorflow.keras.preprocessing.image import img_to_array from tensorflow.keras.preprocessing.image import load_img import tensorflow_datasets as tfds import numpy as np import pandas as pd from sklearn.model_selection imp...
[ "tensorflow.reduce_sum", "pandas.read_csv", "tensorflow.keras.metrics.Mean", "tensorflow.maximum", "tensorflow.numpy_function", "tensorflow.keras.losses.SparseCategoricalCrossentropy", "tensorflow.train.Checkpoint", "tensorflow.math.equal", "tensorflow.cast", "tensorflow.keras.optimizers.Adam", ...
[((705, 737), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (735, 737), True, 'import tensorflow as tf\n'), ((809, 847), 'tensorflow.config.list_physical_devices', 'tf.config.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (840, 847), True, 'import tensorflow as...
#!/usr/bin/env python3 """ main.py - The main module for processing data and creating visual summaries for this study. """ # =========================================================================== # # METADATA # =========================================================================== # __author__ = 'Robert (Bob...
[ "matplotlib.pyplot.subplot", "seaborn.lineplot", "os.getcwd", "pandas.read_csv", "pandas.merge", "matplotlib.pyplot.figure", "seaborn.boxplot", "pandas.to_datetime", "seaborn.countplot", "matplotlib.dates.DateFormatter", "seaborn.set" ]
[((1681, 1692), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1690, 1692), False, 'import os\n'), ((2073, 2085), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2083, 2085), True, 'import matplotlib.pyplot as plt\n'), ((2096, 2112), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (21...
"""Govee API client package.""" import asyncio import logging import time import math from contextlib import asynccontextmanager from dataclasses import dataclass from datetime import datetime from events import Events from typing import Any, List, Optional, Tuple, Union import aiohttp from govee_api_laggat.__versio...
[ "govee_api_laggat.learning_storage.GoveeLearnedInfo", "govee_api_laggat.learning_storage.GoveeAbstractLearningStorage", "math.ceil", "asyncio.sleep", "math.floor", "time.time", "aiohttp.ClientSession", "events.Events", "datetime.datetime.now", "logging.getLogger" ]
[((456, 483), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (473, 483), False, 'import logging\n'), ((2352, 2375), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (2373, 2375), False, 'import aiohttp\n'), ((2913, 2921), 'events.Events', 'Events', ([], {}), '()\n', (29...
""" Obtain the single photoelectron response for an SiPM. Can be used as an input to sim_telarray after normalisation with Konrads script """ import argparse from argparse import ArgumentDefaultsHelpFormatter as Formatter import numpy as np from scipy.special import binom from scipy.stats import norm from IPython impor...
[ "scipy.special.binom", "argparse.ArgumentParser", "os.makedirs", "numpy.power", "os.path.exists", "scipy.stats.norm.pdf", "numpy.arange", "numpy.linspace", "numpy.column_stack", "matplotlib.pyplot.semilogy", "os.path.join", "matplotlib.pyplot.savefig", "numpy.sqrt" ]
[((1223, 1249), 'numpy.power', 'np.power', (['(1 - pap)', 'N[:, 0]'], {}), '(1 - pap, N[:, 0])\n', (1231, 1249), True, 'import numpy as np\n'), ((1321, 1348), 'numpy.sqrt', 'np.sqrt', (['(K * spe_sigma ** 2)'], {}), '(K * spe_sigma ** 2)\n', (1328, 1348), True, 'import numpy as np\n'), ((1364, 1385), 'numpy.sqrt', 'np....
# %% import pandas as pd from collections import defaultdict import pickle from typing import DefaultDict cmap_data = pickle.load(open("./cmap_transformer.pkl", "rb")) mm_data = pickle.load(open("./mm_report_transformer.pkl", "rb")) # %% def convert_to_metric_first(data): rows = defaultdict(dict) for model, ...
[ "collections.defaultdict", "pandas.DataFrame" ]
[((286, 303), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (297, 303), False, 'from collections import defaultdict\n'), ((555, 573), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (567, 573), True, 'import pandas as pd\n')]
# -*- coding: utf-8 -*- """ New Discussions -- Provides a list of new discussions within a WikiProject's scope Copyright (C) 2015 <NAME>, 2016 <NAME> Licensed under MIT License: http://mitlicense.org """ from collections import namedtuple from datetime import datetime import re from reportsbot.task import Task from ...
[ "reportsbot.util.join_full_title", "mwparserfromhell.parse", "datetime.datetime.utcnow", "collections.namedtuple", "re.search" ]
[((465, 510), 'collections.namedtuple', 'namedtuple', (['"""_Section"""', "['name', 'timestamp']"], {}), "('_Section', ['name', 'timestamp'])\n", (475, 510), False, 'from collections import namedtuple\n'), ((525, 582), 'collections.namedtuple', 'namedtuple', (['"""_Discussion"""', "['title', 'name', 'timestamp']"], {})...
########################## # Test script to check if "is_triggered_only = yes" events are triggered from somewhere # If they not - they'll never be triggered # By Pelmen, https://github.com/Pelmen323 ########################## import re from ..test_classes.generic_test_class import DataCleaner, ResultsReporter from ..t...
[ "re.findall" ]
[((954, 982), 're.findall', 're.findall', (['"""id = .*"""', 'event'], {}), "('id = .*', event)\n", (964, 982), False, 'import re\n')]
from pathlib import Path from typing import Dict, Type, Iterator, List, Tuple import pyarrow as pa from pyarrow import csv as pcsv from pyarrow import parquet as pq from sqlalchemy import MetaData as AlchemyMetadata, Table as AlchemyTable from sqlalchemy import Integer, SmallInteger, Float, String, CHAR, Text, Boolean...
[ "pyarrow.csv.ParseOptions", "pyarrow.Table.from_batches", "pyarrow.csv.ConvertOptions", "pyarrow.csv.ReadOptions", "pyarrow.csv.open_csv", "pyarrow.parquet.ParquetWriter", "src.TRANSFORM_PATH_PREFIX.joinpath" ]
[((499, 540), 'src.TRANSFORM_PATH_PREFIX.joinpath', 'TRANSFORM_PATH_PREFIX.joinpath', (['"""parquet"""'], {}), "('parquet')\n", (529, 540), False, 'from src import EXTRACT_PATH_PREFIX, TRANSFORM_PATH_PREFIX\n'), ((554, 591), 'src.TRANSFORM_PATH_PREFIX.joinpath', 'TRANSFORM_PATH_PREFIX.joinpath', (['"""csv"""'], {}), "(...
# -*- coding: utf-8 -*- import tools import time import db import threading from .threads import ThreadPool class DetectorBase(object): """the base class for detecting""" def __init__(self): self.T = tools.Tools() self.now = int(time.time() * 1000) def getItems(self): pass de...
[ "tools.Tools", "db.DataBase", "time.time" ]
[((218, 231), 'tools.Tools', 'tools.Tools', ([], {}), '()\n', (229, 231), False, 'import tools\n'), ((1085, 1098), 'db.DataBase', 'db.DataBase', ([], {}), '()\n', (1096, 1098), False, 'import db\n'), ((255, 266), 'time.time', 'time.time', ([], {}), '()\n', (264, 266), False, 'import time\n')]
# -*- coding: utf-8 -*- from django.db import models from model_utils.models import TimeStampedModel from apps.post.models import Post class Comment(TimeStampedModel): """Comment for Post """ class Meta: db_table = "comment" ordering = ["-created"] post = models.ForeignKey(Post, on_...
[ "django.db.models.ForeignKey", "django.db.models.CharField" ]
[((293, 367), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Post'], {'on_delete': 'models.CASCADE', 'related_name': '"""comments"""'}), "(Post, on_delete=models.CASCADE, related_name='comments')\n", (310, 367), False, 'from django.db import models\n'), ((379, 411), 'django.db.models.CharField', 'models.CharFie...
from django.core.validators import MaxValueValidator from django.core.validators import MaxValueValidator from django.db import models from django.utils import timezone class Faculty(models.Model): name = models.CharField(max_length=80, blank=True, null=True) faculty_describtion = models.TextField(blank=True,...
[ "django.db.models.TextField", "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.IntegerField", "django.db.models.DateTimeField" ]
[((211, 265), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)', 'blank': '(True)', 'null': '(True)'}), '(max_length=80, blank=True, null=True)\n', (227, 265), False, 'from django.db import models\n'), ((292, 331), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'nu...
from dbnd._core.tracking.schemas.base import ApiStrictSchema from dbnd._vendor.marshmallow import fields, pre_load class MLAlert(ApiStrictSchema): sensitivity = fields.Float() look_back = fields.Integer() class AlertDefsSchema(ApiStrictSchema): severity = fields.Str(required=True) type = fields.Str(...
[ "dbnd._vendor.marshmallow.fields.Function", "dbnd._vendor.marshmallow.fields.DateTime", "dbnd._vendor.marshmallow.fields.Integer", "dbnd._vendor.marshmallow.fields.Method", "dbnd._vendor.marshmallow.fields.Int", "dbnd._vendor.marshmallow.fields.UUID", "dbnd._vendor.marshmallow.fields.Dict", "dbnd._ven...
[((167, 181), 'dbnd._vendor.marshmallow.fields.Float', 'fields.Float', ([], {}), '()\n', (179, 181), False, 'from dbnd._vendor.marshmallow import fields, pre_load\n'), ((198, 214), 'dbnd._vendor.marshmallow.fields.Integer', 'fields.Integer', ([], {}), '()\n', (212, 214), False, 'from dbnd._vendor.marshmallow import fie...
import datetime as dt import numpy as np import pandas as pd # START USER INPUT lgs_filepath = 'U:/CIO/#Data/output/investment/checker/lgs_table.csv' jpm_filepath = 'U:/CIO/#Data/input/jpm/report/investment/LGSS Preliminary Performance 202005.xlsx' lgs_dictionary_filepath = 'U:/CIO/#Data/input/lgs/dictionary/2020/06/N...
[ "pandas.DataFrame", "pandas.read_csv", "pandas.merge", "pandas.ExcelFile", "datetime.datetime", "pandas.read_excel", "pandas.isna", "pandas.concat" ]
[((368, 392), 'datetime.datetime', 'dt.datetime', (['(2020)', '(5)', '(31)'], {}), '(2020, 5, 31)\n', (379, 392), True, 'import datetime as dt\n'), ((438, 463), 'pandas.read_csv', 'pd.read_csv', (['lgs_filepath'], {}), '(lgs_filepath)\n', (449, 463), True, 'import pandas as pd\n'), ((641, 655), 'pandas.DataFrame', 'pd....
import os import json import calendar import time from boto.s3.connection import S3Connection from boto.s3.key import Key from .cachingHelper import getCache from .constants import LINKTAG_PUBTIME, FEEDTAG_DO_NOT_CLUSTER from .dbhelper import parseConnectionString, getS3Connection from .doc import Doc def _getEpochS...
[ "time.strptime", "json.loads", "json.dumps" ]
[((355, 397), 'time.strptime', 'time.strptime', (['t[:19]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(t[:19], '%Y-%m-%dT%H:%M:%S')\n", (368, 397), False, 'import time\n'), ((1663, 1679), 'json.dumps', 'json.dumps', (['tags'], {}), '(tags)\n', (1673, 1679), False, 'import json\n'), ((2102, 2125), 'json.loads', 'json.loads', ([...
from django.urls import path from . import views app_name = 'blog' urlpatterns = [ path('', views.post_list, name='post_list'), path('<slug:post>/', views.post_detail, name='post_detail'), path('comment/reply/', views.reply_page, name='reply'), path('about', views.about_page, name='about'), ]
[ "django.urls.path" ]
[((88, 131), 'django.urls.path', 'path', (['""""""', 'views.post_list'], {'name': '"""post_list"""'}), "('', views.post_list, name='post_list')\n", (92, 131), False, 'from django.urls import path\n'), ((137, 196), 'django.urls.path', 'path', (['"""<slug:post>/"""', 'views.post_detail'], {'name': '"""post_detail"""'}), ...
from mycroft import MycroftSkill, intent_file_handler class Midicontrol(MycroftSkill): def __init__(self): MycroftSkill.__init__(self) @intent_file_handler('midicontrol.intent') def handle_midicontrol(self, message): self.speak_dialog('midicontrol') def create_skill(): return Midico...
[ "mycroft.MycroftSkill.__init__", "mycroft.intent_file_handler" ]
[((155, 196), 'mycroft.intent_file_handler', 'intent_file_handler', (['"""midicontrol.intent"""'], {}), "('midicontrol.intent')\n", (174, 196), False, 'from mycroft import MycroftSkill, intent_file_handler\n'), ((121, 148), 'mycroft.MycroftSkill.__init__', 'MycroftSkill.__init__', (['self'], {}), '(self)\n', (142, 148)...
from os.path import expanduser from os import sep from re import split from functools import reduce from xmtrace import xmtrace @xmtrace def xm_path_translate(lua, ph): return expanduser(reduce(lambda a, b: a + sep + b, split(r"\\|/", ph)))
[ "re.split" ]
[((225, 244), 're.split', 'split', (['"""\\\\\\\\|/"""', 'ph'], {}), "('\\\\\\\\|/', ph)\n", (230, 244), False, 'from re import split\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import logging import logging.handlers CRITICAL = 1 ERROR = 2 WARNING = 3 INFO = 4 DEBUG = 5 class Logger: def __init__(self, fileName, level=DEBUG): dictLevel = { CRITICAL: logging.CRITICAL, ERROR: logging.ER...
[ "os.path.abspath", "os.makedirs", "os.path.dirname", "logging.StreamHandler", "os.path.exists", "logging.Formatter", "logging.handlers.RotatingFileHandler", "logging.getLogger" ]
[((588, 613), 'os.path.abspath', 'os.path.abspath', (['fileName'], {}), '(fileName)\n', (603, 613), False, 'import os\n'), ((628, 652), 'os.path.dirname', 'os.path.dirname', (['abspath'], {}), '(abspath)\n', (643, 652), False, 'import os\n'), ((740, 762), 'logging.getLogger', 'logging.getLogger', (['dir'], {}), '(dir)\...
import matplotlib.pyplot as plt import numpy as np nus_lpf,mu_lpf=np.load("clpf.npz",allow_pickle=True)["arr_0"] nus_modit,mu_modit=np.load("cmodit4500.npz",allow_pickle=True)["arr_0"] fig=plt.figure(figsize=(8,4)) plt.plot(nus_modit,mu_modit,label="MODIT",color="C1") plt.plot(nus_lpf,mu_lpf,label="DIRECT",ls="dashe...
[ "numpy.load", "matplotlib.pyplot.show", "matplotlib.pyplot.plot", "matplotlib.pyplot.legend", "matplotlib.pyplot.figure", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.savefig" ]
[((192, 218), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 4)'}), '(figsize=(8, 4))\n', (202, 218), True, 'import matplotlib.pyplot as plt\n'), ((218, 274), 'matplotlib.pyplot.plot', 'plt.plot', (['nus_modit', 'mu_modit'], {'label': '"""MODIT"""', 'color': '"""C1"""'}), "(nus_modit, mu_modit, label='...
import icon_get import unittest import mainutils import iconmanager test_ini = """ [Rainmeter] Author=<EMAIL>.<EMAIL>.<EMAIL> Name=Mid Dock ------------------------------------------------------------------------ ;Metadata added by RainBrowser ;http://rainmeter.net/RainCMS/?q=Rainmeter101_AnatomyOfASkin [Metadata]...
[ "iconmanager.IconManager", "mainutils.sort_by_ini" ]
[((2355, 2401), 'mainutils.sort_by_ini', 'mainutils.sort_by_ini', (['icons'], {'ini_str': 'test_ini'}), '(icons, ini_str=test_ini)\n', (2376, 2401), False, 'import mainutils\n'), ((2120, 2194), 'iconmanager.IconManager', 'iconmanager.IconManager', ([], {'name': 'icon_name', 'image_save_path': '"""."""', 'app_path': '""...
from transformers import pipeline import wikipedia import warnings import streamlit as st warnings.filterwarnings("ignore") def get_context_from_wiki(query: str) -> str: "Given a query, return the summary about the query from wikipedia" results = wikipedia.search(query) # There could be more than 1 due to Dis...
[ "wikipedia.search", "streamlit.text_input", "transformers.pipeline", "warnings.filterwarnings", "streamlit.title", "streamlit.write", "streamlit.text_area", "streamlit.sidebar.selectbox", "wikipedia.summary", "streamlit.empty" ]
[((92, 125), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (115, 125), False, 'import warnings\n'), ((255, 278), 'wikipedia.search', 'wikipedia.search', (['query'], {}), '(query)\n', (271, 278), False, 'import wikipedia\n'), ((675, 705), 'transformers.pipeline', 'pipeline...
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or a...
[ "_features.ImageFeatureColumn", "_features.TextFeatureColumn", "_features.CategoricalFeatureColumn", "_features.KeyFeatureColumn", "_features.NumericFeatureColumn", "_features.TargetFeatureColumn" ]
[((1671, 1693), '_features.KeyFeatureColumn', 'KeyFeatureColumn', (['name'], {}), '(name)\n', (1687, 1693), False, 'from _features import KeyFeatureColumn\n'), ((1912, 1937), '_features.TargetFeatureColumn', 'TargetFeatureColumn', (['name'], {}), '(name)\n', (1931, 1937), False, 'from _features import TargetFeatureColu...
from aedes_server.core.clusters import compute_clusters from django.core.management import BaseCommand class Command(BaseCommand): help = 'Calculate clusters for AedeSpot app.' def handle(self, *args, **options): ''' Computing clusters. ''' compute_clusters()
[ "aedes_server.core.clusters.compute_clusters" ]
[((284, 302), 'aedes_server.core.clusters.compute_clusters', 'compute_clusters', ([], {}), '()\n', (300, 302), False, 'from aedes_server.core.clusters import compute_clusters\n')]
import py_cui from pymusicterm.music import SongFile from pymusicterm.util.file import File, FileMetadata class SongInfoBlockLabel: _row:int=0 _column:int=2 _row_span:int=2 _column_span:int=3 _center:bool=False window:py_cui.widget_set.WidgetSet def __init__(self,window:py_cui.widget_se...
[ "pymusicterm.util.file.File" ]
[((615, 621), 'pymusicterm.util.file.File', 'File', ([], {}), '()\n', (619, 621), False, 'from pymusicterm.util.file import File, FileMetadata\n')]
# Generated by Django 3.2.7 on 2021-09-18 10:42 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("podcasts", "0042_podcast_hub_exception"), ] operations = [ migrations.AlterField( model_name="podcast", name="hub_to...
[ "django.db.models.UUIDField" ]
[((344, 412), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'blank': '(True)', 'editable': '(False)', 'null': '(True)', 'unique': '(True)'}), '(blank=True, editable=False, null=True, unique=True)\n', (360, 412), False, 'from django.db import migrations, models\n')]
""" Admin access page settings """ from django.contrib import admin from blog.models import get_model_factory from .posts_admin import PostAdmin # Register your models here. admin.site.register(get_model_factory('PostsFactory').create(), PostAdmin)
[ "blog.models.get_model_factory" ]
[((195, 228), 'blog.models.get_model_factory', 'get_model_factory', (['"""PostsFactory"""'], {}), "('PostsFactory')\n", (212, 228), False, 'from blog.models import get_model_factory\n')]
import setuptools with open("README.md", "r", encoding="utf-8") as fh: long_description = fh.read() # # The following code can be used if you have private dependencies. Basically it requires the user to set an # # environment variable `GH_PAT` to a Github Personal Access Token (with access to the private reposi...
[ "setuptools.find_packages" ]
[((1812, 1838), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (1836, 1838), False, 'import setuptools\n')]
import numpy as np from ligo.skymap import kde import matplotlib matplotlib.use('Agg') from matplotlib.colors import to_rgb from matplotlib import pyplot as plt from mpl_toolkits.basemap import Basemap #matplotlib.rc('text', usetex=True) def greedy(density): i,j = np.shape(density) idx = np.argsort(density.fla...
[ "numpy.zeros", "matplotlib.colors.to_rgb", "numpy.shape", "numpy.append", "numpy.indices", "matplotlib.use", "numpy.sin", "numpy.mean", "numpy.cos", "numpy.arange", "mpl_toolkits.basemap.Basemap" ]
[((65, 86), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (79, 86), False, 'import matplotlib\n'), ((270, 287), 'numpy.shape', 'np.shape', (['density'], {}), '(density)\n', (278, 287), True, 'import numpy as np\n'), ((396, 413), 'numpy.append', 'np.append', (['c', '(1.0)'], {}), '(c, 1.0)\n', (4...
# DLトレーニングで共通のロジック・モジュール import tensorflow as tf from tensorflow.python.data.ops.readers import TFRecordDatasetV2 from tensorflow.python.keras.callbacks import History from google.cloud import storage from typing import Callable, List import os def get_tfrecord_dataset( dataset_path: str, preprocessing: Call...
[ "tensorflow.distribute.cluster_resolver.TPUClusterResolver", "tensorflow.data.TFRecordDataset", "tensorflow.distribute.TPUStrategy", "tensorflow.keras.callbacks.TerminateOnNaN", "tensorflow.data.Options", "google.cloud.storage.Client", "tensorflow.config.experimental_connect_to_cluster", "tensorflow.k...
[((942, 993), 'tensorflow.io.gfile.glob', 'tf.io.gfile.glob', (['f"""{dataset_path}/{split}-*.tfrec"""'], {}), "(f'{dataset_path}/{split}-*.tfrec')\n", (958, 993), True, 'import tensorflow as tf\n'), ((1022, 1094), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['file_names'], {'num_parallel_reads': 'tf...
import numpy as np def align_depth_to_rgb( depth, bgr_cameramodel, depth_cameramodel, depth_to_rgb_transform): """Align depth image to color image. Parameters ---------- depth : numpy.ndarray depth image in meter order. bgr_cameramodel : cameramodels.Pinhol...
[ "numpy.zeros", "numpy.isnan", "numpy.where", "numpy.array", "numpy.matmul" ]
[((780, 855), 'numpy.zeros', 'np.zeros', (['(bgr_cameramodel.height, bgr_cameramodel.width)'], {'dtype': 'np.float32'}), '((bgr_cameramodel.height, bgr_cameramodel.width), dtype=np.float32)\n', (788, 855), True, 'import numpy as np\n'), ((893, 908), 'numpy.isnan', 'np.isnan', (['depth'], {}), '(depth)\n', (901, 908), T...
#!/usr/bin/python import logging class NullHandler(logging.Handler): def emit(self, record): pass log = logging.getLogger('AppDataPublisher') log.setLevel(logging.ERROR) log.addHandler(NullHandler()) import copy import threading from DustLinkData import DustLinkData from EventBus import EventBusClient ...
[ "DustLinkData.DustLinkData.DustLinkData", "logging.getLogger" ]
[((117, 154), 'logging.getLogger', 'logging.getLogger', (['"""AppDataPublisher"""'], {}), "('AppDataPublisher')\n", (134, 154), False, 'import logging\n'), ((1252, 1279), 'DustLinkData.DustLinkData.DustLinkData', 'DustLinkData.DustLinkData', ([], {}), '()\n', (1277, 1279), False, 'from DustLinkData import DustLinkData\...
import unittest from Sastrawi.Morphology.Disambiguator.DisambiguatorPrefixRule1 import DisambiguatorPrefixRule1a, DisambiguatorPrefixRule1b class Test_DisambiguatorPrefixRule1Test(unittest.TestCase): def setUp(self): self.subject1a = DisambiguatorPrefixRule1a() self.subject1b = DisambiguatorPrefixR...
[ "unittest.main", "Sastrawi.Morphology.Disambiguator.DisambiguatorPrefixRule1.DisambiguatorPrefixRule1a", "Sastrawi.Morphology.Disambiguator.DisambiguatorPrefixRule1.DisambiguatorPrefixRule1b" ]
[((786, 801), 'unittest.main', 'unittest.main', ([], {}), '()\n', (799, 801), False, 'import unittest\n'), ((247, 274), 'Sastrawi.Morphology.Disambiguator.DisambiguatorPrefixRule1.DisambiguatorPrefixRule1a', 'DisambiguatorPrefixRule1a', ([], {}), '()\n', (272, 274), False, 'from Sastrawi.Morphology.Disambiguator.Disamb...
import itertools # This snippet has been turned into a full repo: # github.com/patrickleweryharris/anagram_solver def anagram_solver(lst): """ Return all possible combinations of letters in lst @type lst: [str] @rtype: None """ for i in range(0, len(lst) + 1): for subset in itertools....
[ "itertools.permutations" ]
[((310, 340), 'itertools.permutations', 'itertools.permutations', (['lst', 'i'], {}), '(lst, i)\n', (332, 340), False, 'import itertools\n')]
from BranchFilters.BranchFilterer import BranchFilterer from Interoperability.ShellCommandExecuter import ShellCommandExecuter from RepositoryWalkers.BranchToCommitWalker import BranchToCommitWalker from Logger import Logger class HeadToMasterBranchFilterer(BranchFilterer): def __init__(self, repository): ...
[ "RepositoryWalkers.BranchToCommitWalker.BranchToCommitWalker", "Logger.Logger", "Interoperability.ShellCommandExecuter.ShellCommandExecuter" ]
[((334, 346), 'Logger.Logger', 'Logger', (['self'], {}), '(self)\n', (340, 346), False, 'from Logger import Logger\n'), ((1139, 1200), 'RepositoryWalkers.BranchToCommitWalker.BranchToCommitWalker', 'BranchToCommitWalker', (['self.repository', 'head_master_merge_base'], {}), '(self.repository, head_master_merge_base)\n'...
''' Colormapping The final glyph customization we'll practice is using the CategoricalColorMapper to color each glyph by a categorical property. Here, you're going to use the automobile dataset to plot miles-per-gallon vs weight and color each circle glyph by the region where the automobile was manufactured. The ori...
[ "bokeh.models.CategoricalColorMapper" ]
[((1368, 1462), 'bokeh.models.CategoricalColorMapper', 'CategoricalColorMapper', ([], {'factors': "['Europe', 'Asia', 'US']", 'palette': "['red', 'green', 'blue']"}), "(factors=['Europe', 'Asia', 'US'], palette=['red',\n 'green', 'blue'])\n", (1390, 1462), False, 'from bokeh.models import CategoricalColorMapper\n')]
import asyncio import aiohttp import requests import json from .op import EsiOp from .auth import EsiAuth from .cache import EsiCache, DictCache from .esisession import EsiSession import logging logger = logging.getLogger("EsiPysi") class EsiPysi(object): """ The EsiPysi class creates "EsiOp" operations based...
[ "requests.get", "logging.getLogger" ]
[((205, 233), 'logging.getLogger', 'logging.getLogger', (['"""EsiPysi"""'], {}), "('EsiPysi')\n", (222, 233), False, 'import logging\n'), ((1557, 1582), 'requests.get', 'requests.get', (['swagger_url'], {}), '(swagger_url)\n', (1569, 1582), False, 'import requests\n')]
import numpy as np import torch def compute_lid(x, x_train, k, exclude_self=False): """ Calculate LID using the estimation from [1] [1] Ma et al., "Characterizing Adversarial Subspaces Using Local Intrinsic Dimensionality," ICLR 2018. """ with torch.no_grad(): x = x.view((x.size(...
[ "torch.no_grad", "numpy.zeros", "numpy.ceil", "torch.log" ]
[((2835, 2858), 'numpy.zeros', 'np.zeros', (['(batch_size,)'], {}), '((batch_size,))\n', (2843, 2858), True, 'import numpy as np\n'), ((276, 291), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (289, 291), False, 'import torch\n'), ((1899, 1930), 'numpy.ceil', 'np.ceil', (['(num_total / batch_size)'], {}), '(num_t...