edited_code stringlengths 17 978k | original_code stringlengths 17 978k |
|---|---|
import datetime
from typing import Union
from asyncpraw import models
from discord import Embed, Color, Member, User, Status, Message, TextChannel
from bot import constants
from bot.utils.misc import (
get_badges, get_join_pos, has_verified_role, format_activity, get_device_status, format_date
)
def simple_emb... | import datetime
from typing import Union
from asyncpraw import models
from discord import Embed, Color, Member, User, Status, Message, TextChannel
from bot import constants
from bot.utils.misc import (
get_badges, get_join_pos, has_verified_role, format_activity, get_device_status, format_date
)
def simple_emb... |
import burin.types
class GCodeGen:
def __init__(self):
self.speeds = {'v' : 4000, 'z' : 100, 'travel' : 10000, 'plot' : 2000}
self.heights = {'z_clearance' : 15, 'v_clearance' : 1.0, # Z and V axis heights for moving around during setup
'z_travel' : 10, 'v_trave... |
import burin.types
class GCodeGen:
def __init__(self):
self.speeds = {'v' : 4000, 'z' : 100, 'travel' : 10000, 'plot' : 2000}
self.heights = {'z_clearance' : 15, 'v_clearance' : 1.0, # Z and V axis heights for moving around during setup
'z_travel' : 10, 'v_trave... |
import os
import copy
from flask import (
Blueprint,
render_template,
request,
flash,
abort,
redirect,
url_for,
current_app,
)
from lightbluetent.models import db, User, Society
from lightbluetent.home import auth_decorator
from lightbluetent.utils import gen_unique_strin... | import os
import copy
from flask import (
Blueprint,
render_template,
request,
flash,
abort,
redirect,
url_for,
current_app,
)
from lightbluetent.models import db, User, Society
from lightbluetent.home import auth_decorator
from lightbluetent.utils import gen_unique_strin... |
'''
:copyright:
Copyright (C) 2021 Laura Keyson, IRIS Data Management Center
:license:
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF li... | '''
:copyright:
Copyright (C) 2021 Laura Keyson, IRIS Data Management Center
:license:
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF li... |
import os
import platform
import sys
from datetime import datetime
from typing import TYPE_CHECKING, Optional, Tuple
import boto3
import click
import requests
from botocore.config import Config
from getmac import get_mac_address
from git.config import GitConfigParser
from google.api_core.exceptions import NotFound
fro... | import os
import platform
import sys
from datetime import datetime
from typing import TYPE_CHECKING, Optional, Tuple
import boto3
import click
import requests
from botocore.config import Config
from getmac import get_mac_address
from git.config import GitConfigParser
from google.api_core.exceptions import NotFound
fro... |
import asyncio
import json
import logging
from collections import OrderedDict
from datetime import timedelta
from functools import partial
from typing import Optional
import async_timeout
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from aiohttp import ClientSession
from homeassistant.... | import asyncio
import json
import logging
from collections import OrderedDict
from datetime import timedelta
from functools import partial
from typing import Optional
import async_timeout
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from aiohttp import ClientSession
from homeassistant.... |
import ast
import inspect
import os
import platform
import re
import sys
import traceback
import warnings
from functools import update_wrapper
from operator import attrgetter
from threading import Lock
from threading import Thread
import click
from werkzeug.utils import import_string
from .globals import current_app
... | import ast
import inspect
import os
import platform
import re
import sys
import traceback
import warnings
from functools import update_wrapper
from operator import attrgetter
from threading import Lock
from threading import Thread
import click
from werkzeug.utils import import_string
from .globals import current_app
... |
from pathlib import Path
from typing import Dict, Any
from enum import Enum
import requests
import pandas as pd
import os
from skt.vault_utils import get_secrets
MLS_MODEL_DIR = os.path.join(Path.home(), "mls_temp_dir")
MODEL_BINARY_NAME = "model.joblib"
MODEL_TAR_NAME = "model.tar.gz"
MODEL_META_NAME = "model.json... | from pathlib import Path
from typing import Dict, Any
from enum import Enum
import requests
import pandas as pd
import os
from skt.vault_utils import get_secrets
MLS_MODEL_DIR = os.path.join(Path.home(), "mls_temp_dir")
MODEL_BINARY_NAME = "model.joblib"
MODEL_TAR_NAME = "model.tar.gz"
MODEL_META_NAME = "model.json... |
import discord
import requests
from discord.ext import commands
from inspect import cleandoc
from datetime import datetime
from pytz import timezone
from urllib import parse
from utils import check_arg, Literal
class General(commands.Cog):
"""
General commands
"""
def __init__(self, bot):
s... | import discord
import requests
from discord.ext import commands
from inspect import cleandoc
from datetime import datetime
from pytz import timezone
from urllib import parse
from utils import check_arg, Literal
class General(commands.Cog):
"""
General commands
"""
def __init__(self, bot):
s... |
import streamlit as st
from src.utils.chart_funcs import *
from src.utils.helper_funcs import *
files = [
{
"title": "Autoeficacia",
"file": "post_avanzado_autoeficacia.xlsx"
},
{
"title": "Conocimientos",
"file": "post_avanzado_conocimientos.xlsx",
"respuestas": ... | import streamlit as st
from src.utils.chart_funcs import *
from src.utils.helper_funcs import *
files = [
{
"title": "Autoeficacia",
"file": "post_avanzado_autoeficacia.xlsx"
},
{
"title": "Conocimientos",
"file": "post_avanzado_conocimientos.xlsx",
"respuestas": ... |
import asyncio
import time
from datetime import datetime
import aiohttp
from collections import OrderedDict
import multiprocessing as mp
from .reporter import Reporter
class Attacker(mp.Process):
def __init__(self, config, start_attack_event, kill_event):
super().__init__()
self._threads = config... | import asyncio
import time
from datetime import datetime
import aiohttp
from collections import OrderedDict
import multiprocessing as mp
from .reporter import Reporter
class Attacker(mp.Process):
def __init__(self, config, start_attack_event, kill_event):
super().__init__()
self._threads = config... |
import shutil
import time
from functools import partial
from pathlib import Path
import librosa
import librosa.display
import matplotlib.pyplot as plt
import numpy as np
import toml
import torch
from joblib import Parallel, delayed
from rich import print
from rich.console import Console
from torch.cuda.amp import Grad... | import shutil
import time
from functools import partial
from pathlib import Path
import librosa
import librosa.display
import matplotlib.pyplot as plt
import numpy as np
import toml
import torch
from joblib import Parallel, delayed
from rich import print
from rich.console import Console
from torch.cuda.amp import Grad... |
# pylint: disable=g-bad-file-header
# Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/... | # pylint: disable=g-bad-file-header
# Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/... |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import _ast
import abc
import ast
import logging
from typing import Callable, Iterable, List, Optional, Set, Union
from ...api... | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import _ast
import abc
import ast
import logging
from typing import Callable, Iterable, List, Optional, Set, Union
from ...api... |
import inspect
import logging
import os
import importlib
import signal
import socket
import sys
import time
import atexit
import gevent
import locust
from . import log
from .argument_parser import parse_locustfile_option, parse_options
from .env import Environment
from .log import setup_logging, greenlet_exception_l... | import inspect
import logging
import os
import importlib
import signal
import socket
import sys
import time
import atexit
import gevent
import locust
from . import log
from .argument_parser import parse_locustfile_option, parse_options
from .env import Environment
from .log import setup_logging, greenlet_exception_l... |
import random
import string
import stripe
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import redirect
... | import random
import string
import stripe
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import redirect
... |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the MIT license.
# See the LICENSE file in the project root for more information.
from __future__ import annotations # Allow Field[Any]
from dataclasses import fields, Field, is_dataclass
from pathlib... | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the MIT license.
# See the LICENSE file in the project root for more information.
from __future__ import annotations # Allow Field[Any]
from dataclasses import fields, Field, is_dataclass
from pathlib... |
"""Email handler."""
import getpass
import logging
import smtplib
import subprocess
from email import policy
from email.message import EmailMessage
from pathlib import Path
from typing import Optional, Union
try:
import keyring
except ImportError:
keyring = None # type: ignore[assignment]
logger = logging.g... | """Email handler."""
import getpass
import logging
import smtplib
import subprocess
from email import policy
from email.message import EmailMessage
from pathlib import Path
from typing import Optional, Union
try:
import keyring
except ImportError:
keyring = None # type: ignore[assignment]
logger = logging.g... |
import datetime
import logging
import discord
from discord.ext import commands
class CommandErrorHandler(commands.Cog):
def __init__(self, bot):
self.bot = bot
@staticmethod
async def bot_check(ctx: commands.Context, **kwargs):
"""
This runs at the start of every command
... | import datetime
import logging
import discord
from discord.ext import commands
class CommandErrorHandler(commands.Cog):
def __init__(self, bot):
self.bot = bot
@staticmethod
async def bot_check(ctx: commands.Context, **kwargs):
"""
This runs at the start of every command
... |
"""
model_pwcnet.py
PWC-Net model class.
Written by Phil Ferriere
Licensed under the MIT License (see LICENSE for details)
"""
from __future__ import absolute_import, division, print_function
import time
import datetime
import warnings
import numpy as np
import pandas as pd
import tensorflow as tf
from tqdm import ... | """
model_pwcnet.py
PWC-Net model class.
Written by Phil Ferriere
Licensed under the MIT License (see LICENSE for details)
"""
from __future__ import absolute_import, division, print_function
import time
import datetime
import warnings
import numpy as np
import pandas as pd
import tensorflow as tf
from tqdm import ... |
############################################################################
## Tool name: Transit Network Analysis Tools
## Created by: Melinda Morang, Esri
## Last updated: 13 September 2021
############################################################################
"""Count the number of destinations reachable... | ############################################################################
## Tool name: Transit Network Analysis Tools
## Created by: Melinda Morang, Esri
## Last updated: 13 September 2021
############################################################################
"""Count the number of destinations reachable... |
import vlc
import datetime
class Player:
def __init__(self,vlc_log=False):
params = "--quiet"
if vlc_log:
params = f'--verbose=2 --file-logging --logfile=vlc-log_{datetime.datetime.now().strftime('%m%d%Y')}.txt'
self.instance = vlc.Instance(params) # --verbose 2 --quiet
... | import vlc
import datetime
class Player:
def __init__(self,vlc_log=False):
params = "--quiet"
if vlc_log:
params = f'--verbose=2 --file-logging --logfile=vlc-log_{datetime.datetime.now().strftime("%m%d%Y")}.txt'
self.instance = vlc.Instance(params) # --verbose 2 --quiet
... |
from recon.core.module import BaseModule
import codecs
import os
import re
import time
import webbrowser
import math
#import pdb
class Module(BaseModule):
meta = {
'name': 'Common Pushpin Usernames',
'author': '4ngryR4v3n - forked from the PushPin Report Generator module created by Tim Tomes (@la... | from recon.core.module import BaseModule
import codecs
import os
import re
import time
import webbrowser
import math
#import pdb
class Module(BaseModule):
meta = {
'name': 'Common Pushpin Usernames',
'author': '4ngryR4v3n - forked from the PushPin Report Generator module created by Tim Tomes (@la... |
"""Revocation registry admin routes."""
import logging
from asyncio import shield
from aiohttp import web
from aiohttp_apispec import (
docs,
match_info_schema,
querystring_schema,
request_schema,
response_schema,
)
from marshmallow import fields, Schema, validate
from ..messaging.credential_de... | """Revocation registry admin routes."""
import logging
from asyncio import shield
from aiohttp import web
from aiohttp_apispec import (
docs,
match_info_schema,
querystring_schema,
request_schema,
response_schema,
)
from marshmallow import fields, Schema, validate
from ..messaging.credential_de... |
import logging
import os
import re
from subprocess import CalledProcessError
from qhub.provider import terraform
from qhub.utils import timer, check_cloud_credentials
from qhub.provider.dns.cloudflare import update_record
from qhub.state import terraform_state_sync
logger = logging.getLogger(__name__)
def deploy_co... | import logging
import os
import re
from subprocess import CalledProcessError
from qhub.provider import terraform
from qhub.utils import timer, check_cloud_credentials
from qhub.provider.dns.cloudflare import update_record
from qhub.state import terraform_state_sync
logger = logging.getLogger(__name__)
def deploy_co... |
from copyleaks.consts import Consts
import requests
'''
The MIT License(MIT)
Copyright(c) 2016 Copyleaks LTD (https://copyleaks.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without... | from copyleaks.consts import Consts
import requests
'''
The MIT License(MIT)
Copyright(c) 2016 Copyleaks LTD (https://copyleaks.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without... |
# Copyright (c) AIRBUS and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from enum import Enum
from typing import NamedTuple, Optional
from pathos.helpers import mp
from math import sqrt
from stable_baselines3 import PPO
fr... | # Copyright (c) AIRBUS and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from enum import Enum
from typing import NamedTuple, Optional
from pathos.helpers import mp
from math import sqrt
from stable_baselines3 import PPO
fr... |
import argparse
from utils.utils import verify_free_gpu_memory
from utils.codifications import Chromosome, Fitness
from time import sleep, time
import os
parser = argparse.ArgumentParser(description='Train a gen of a CNN.')
parser.add_argument('-gf', '--gen_file', type=str, required=True,
help='t... | import argparse
from utils.utils import verify_free_gpu_memory
from utils.codifications import Chromosome, Fitness
from time import sleep, time
import os
parser = argparse.ArgumentParser(description='Train a gen of a CNN.')
parser.add_argument('-gf', '--gen_file', type=str, required=True,
help='t... |
from typing import Dict, List, Union
import pytest
from zoo.repos import tasks as uut
from zoo.repos.models import Repository
from zoo.repos.zoo_yml import parse
from zoo.services.models import Environment, Service
pytestmark = pytest.mark.django_db
@pytest.fixture()
def generate_repositories(repository_factory):
... | from typing import Dict, List, Union
import pytest
from zoo.repos import tasks as uut
from zoo.repos.models import Repository
from zoo.repos.zoo_yml import parse
from zoo.services.models import Environment, Service
pytestmark = pytest.mark.django_db
@pytest.fixture()
def generate_repositories(repository_factory):
... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import yaml
from datetime import datetime
from tqdm import tqdm
from usal_echo import usr_dir
from usal_echo.d05_measurement.meas_utils import *
from usal_echo.d00_utils.log_utils import setup_logging
from usal_echo.d00_utils.db_utils import dbReadWriteSegment... | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import yaml
from datetime import datetime
from tqdm import tqdm
from usal_echo import usr_dir
from usal_echo.d05_measurement.meas_utils import *
from usal_echo.d00_utils.log_utils import setup_logging
from usal_echo.d00_utils.db_utils import dbReadWriteSegment... |
import os
from telethon import version
from telethon.errors.rpcerrorlist import (
MediaEmptyError,
WebpageCurlFailedError,
WebpageMediaEmptyError,
)
from telethon.events import CallbackQuery
from userbot import *
from userbot import CMD_HELP, CMD_HELP_BOT
from userbot.config import Config
from userbot.help... | import os
from telethon import version
from telethon.errors.rpcerrorlist import (
MediaEmptyError,
WebpageCurlFailedError,
WebpageMediaEmptyError,
)
from telethon.events import CallbackQuery
from userbot import *
from userbot import CMD_HELP, CMD_HELP_BOT
from userbot.config import Config
from userbot.help... |
#!/usr/bin/python3
# Copyright 2021 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# ... | #!/usr/bin/python3
# Copyright 2021 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# ... |
import boto3
from botocore.exceptions import ClientError
import json
import os
import time
from datetime import datetime, timezone
from dateutil import tz
from antiope.aws_account import *
from common import *
import logging
logger = logging.getLogger()
logger.setLevel(getattr(logging, os.getenv('LOG_LEVEL', default=... | import boto3
from botocore.exceptions import ClientError
import json
import os
import time
from datetime import datetime, timezone
from dateutil import tz
from antiope.aws_account import *
from common import *
import logging
logger = logging.getLogger()
logger.setLevel(getattr(logging, os.getenv('LOG_LEVEL', default=... |
import json
import glob
import os
import argparse
import sys
import re
class QueryAttackEval:
def __init__(self, args):
self.args = args
# this line is only to protect the object and should never trigger if running from this script
assert(self.args.technique or self.args.procedure or self.args.search)
def g... | import json
import glob
import os
import argparse
import sys
import re
class QueryAttackEval:
def __init__(self, args):
self.args = args
# this line is only to protect the object and should never trigger if running from this script
assert(self.args.technique or self.args.procedure or self.args.search)
def g... |
# my program
from custom_module import my_greeting
my_greeting()
numbers = [19, 3, 15, 7, 11]
print('\n Creating a bar chart from numbers:')
print(f'Index{'Value':>8} Bar')
for index, value in enumerate(numbers):
print(f'{index:>5}{value:>8} {'*' * value}')
| # my program
from custom_module import my_greeting
my_greeting()
numbers = [19, 3, 15, 7, 11]
print('\n Creating a bar chart from numbers:')
print(f'Index{"Value":>8} Bar')
for index, value in enumerate(numbers):
print(f'{index:>5}{value:>8} {"*" * value}')
|
#!/usr/bin/env python3
#
import argparse
import asyncio
import logging
import math
from motor.frameworks.asyncio import is_event_loop
import pymongo
import sys
import time
import pickle
from common import Cluster, yes_no
from copy import deepcopy
from pymongo import errors as pymongo_errors
from tqdm import tqdm
# E... | #!/usr/bin/env python3
#
import argparse
import asyncio
import logging
import math
from motor.frameworks.asyncio import is_event_loop
import pymongo
import sys
import time
import pickle
from common import Cluster, yes_no
from copy import deepcopy
from pymongo import errors as pymongo_errors
from tqdm import tqdm
# E... |
"""Get node elevations and calculate edge grades."""
import multiprocessing as mp
import time
from hashlib import sha1
from pathlib import Path
import networkx as nx
import numpy as np
import pandas as pd
import requests
from . import downloader
from . import utils
from . import utils_graph
# rasterio and gdal are ... | """Get node elevations and calculate edge grades."""
import multiprocessing as mp
import time
from hashlib import sha1
from pathlib import Path
import networkx as nx
import numpy as np
import pandas as pd
import requests
from . import downloader
from . import utils
from . import utils_graph
# rasterio and gdal are ... |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import... | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import... |
"""iffuci.tk pastebin site
Code written by @loxxi {iffuci}
Syntax: .iffuci"""
from datetime import datetime
import os
import requests
from uniborg.util import admin_cmd
def progress(current, total):
logger.info("Downloaded {} of {}\nCompleted {}".format(current, total, (current / total) * 100))
@borg.on(admin_c... | """iffuci.tk pastebin site
Code written by @loxxi {iffuci}
Syntax: .iffuci"""
from datetime import datetime
import os
import requests
from uniborg.util import admin_cmd
def progress(current, total):
logger.info("Downloaded {} of {}\nCompleted {}".format(current, total, (current / total) * 100))
@borg.on(admin_c... |
"""
Lightning supports model training on a cluster managed by SLURM in the following cases:
1. Training on a single cpu or single GPU.
2. Train on multiple GPUs on the same node using DataParallel or DistributedDataParallel
3. Training across multiple GPUs on multiple different nodes via DistributedDataParallel.
.. n... | """
Lightning supports model training on a cluster managed by SLURM in the following cases:
1. Training on a single cpu or single GPU.
2. Train on multiple GPUs on the same node using DataParallel or DistributedDataParallel
3. Training across multiple GPUs on multiple different nodes via DistributedDataParallel.
.. n... |
import re
import os
import json
import sys
import traceback
from hashlib import sha1
from queue import Queue, Empty
from threading import Thread
from itertools import zip_longest
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from collections import defaultdict
from google.ads.googleads.client import... | import re
import os
import json
import sys
import traceback
from hashlib import sha1
from queue import Queue, Empty
from threading import Thread
from itertools import zip_longest
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from collections import defaultdict
from google.ads.googleads.client import... |
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by ... | #
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by ... |
from pathlib import Path
from warnings import warn
import pandas as pd
import numpy as np
from pynwb import NWBFile, TimeSeries
from hdmf.backends.hdf5.h5_utils import H5DataIO
from ndx_events import Events
from pynwb.behavior import Position, SpatialSeries
from nwb_conversion_tools.basedatainterface import BaseDataI... | from pathlib import Path
from warnings import warn
import pandas as pd
import numpy as np
from pynwb import NWBFile, TimeSeries
from hdmf.backends.hdf5.h5_utils import H5DataIO
from ndx_events import Events
from pynwb.behavior import Position, SpatialSeries
from nwb_conversion_tools.basedatainterface import BaseDataI... |
import re
from io import BytesIO
from pathlib import Path
from base64 import b64encode
from typing import Type, Union, Tuple, Mapping, Iterable, Optional
from nonebot.typing import overrides
from nonebot.adapters import Message as BaseMessage, MessageSegment as BaseMessageSegment
from .utils import log, escape, unesc... | import re
from io import BytesIO
from pathlib import Path
from base64 import b64encode
from typing import Type, Union, Tuple, Mapping, Iterable, Optional
from nonebot.typing import overrides
from nonebot.adapters import Message as BaseMessage, MessageSegment as BaseMessageSegment
from .utils import log, escape, unesc... |
import pymysql
import os
from datetime import datetime
from lib.webexception import WebException
from http import HTTPStatus
from lib.services.dynamodb_service import get_session_username
from lib.services.rds_service import getResult, insertComment
def get_talent_detail(request, response):
data = request.data
... | import pymysql
import os
from datetime import datetime
from lib.webexception import WebException
from http import HTTPStatus
from lib.services.dynamodb_service import get_session_username
from lib.services.rds_service import getResult, insertComment
def get_talent_detail(request, response):
data = request.data
... |
import os
import sys
import sqlite3
import logging
from PyQt5 import QtCore
from PyQt5 import QtWidgets
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPalette
from multiprocessing import Process, Queue
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utility.setting import *
from ... | import os
import sys
import sqlite3
import logging
from PyQt5 import QtCore
from PyQt5 import QtWidgets
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPalette
from multiprocessing import Process, Queue
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utility.setting import *
from ... |
#!/usr/bin/env python3
#
# real-time data processor
import os
import ast
from textwrap import dedent
import socket
import threading
import multiprocessing as mp
from queue import Empty
from time import sleep
from astropy.time import Time, TimeDelta
import numpy as np
import yaml
import h5py
from darc import DARCBase
... | #!/usr/bin/env python3
#
# real-time data processor
import os
import ast
from textwrap import dedent
import socket
import threading
import multiprocessing as mp
from queue import Empty
from time import sleep
from astropy.time import Time, TimeDelta
import numpy as np
import yaml
import h5py
from darc import DARCBase
... |
import argparse
from pathlib import Path
import imageio
import yaml
import warnings
from hylfm.datasets.base import TensorInfo, get_dataset_from_info, N5CachedDatasetFromInfoSubset
from hylfm.datasets.heart_utils import get_transformations, idx2z_slice_241
def get_tensor_info(tag: str, name: str, meta: dict):
m... | import argparse
from pathlib import Path
import imageio
import yaml
import warnings
from hylfm.datasets.base import TensorInfo, get_dataset_from_info, N5CachedDatasetFromInfoSubset
from hylfm.datasets.heart_utils import get_transformations, idx2z_slice_241
def get_tensor_info(tag: str, name: str, meta: dict):
m... |
from aggregation_builder import AggregationQueryBuilder
from aggregation_builder.operators import *
import unittest
import datetime
class OtherOperatorsTests(unittest.TestCase):
def test_text(self):
query = [
{'$match': {'$text': {'$search': "cake"}}},
{'$group': {'_id': {'$meta': ... | from aggregation_builder import AggregationQueryBuilder
from aggregation_builder.operators import *
import unittest
import datetime
class OtherOperatorsTests(unittest.TestCase):
def test_text(self):
query = [
{'$match': {'$text': {'$search': "cake"}}},
{'$group': {'_id': {'$meta': ... |
import os
import subprocess
import sys
import time
import itertools
from difflib import Differ
from pathlib import Path
from clint.textui import colored
from watchdog.events import PatternMatchingEventHandler
from watchdog.observers import Observer
class Runner:
def __init__(self, filename):
self.sr... | import os
import subprocess
import sys
import time
import itertools
from difflib import Differ
from pathlib import Path
from clint.textui import colored
from watchdog.events import PatternMatchingEventHandler
from watchdog.observers import Observer
class Runner:
def __init__(self, filename):
self.sr... |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/01_custom_receipe.ipynb (unless otherwise specified).
__all__ = ['fastai_recipe', 'create_folders', 'load_fastai_model', 'save_base64_image', 'score_stream',
'predict_folder', 'predict_all_subfolders', 'fastai_jsonl_recipe']
# Cell
import numpy as np
import c... | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/01_custom_receipe.ipynb (unless otherwise specified).
__all__ = ['fastai_recipe', 'create_folders', 'load_fastai_model', 'save_base64_image', 'score_stream',
'predict_folder', 'predict_all_subfolders', 'fastai_jsonl_recipe']
# Cell
import numpy as np
import c... |
#!/usr/bin/env python3.7
import argparse
import string
import sys
import textwrap
import typing as t
from dataclasses import dataclass
from pathlib import Path
from urllib.parse import urlparse
import json5
import jsonref
PlainJSONType = t.Union[dict, list, t.AnyStr, float, bool]
JSONType = t.Union[PlainJSONType, t.... | #!/usr/bin/env python3.7
import argparse
import string
import sys
import textwrap
import typing as t
from dataclasses import dataclass
from pathlib import Path
from urllib.parse import urlparse
import json5
import jsonref
PlainJSONType = t.Union[dict, list, t.AnyStr, float, bool]
JSONType = t.Union[PlainJSONType, t.... |
import argparse
import os
from shutil import copyfile, rmtree
import click
import torch
# import sys
# sys.path.insert(1, '../confidnet')
from confidnet.loaders import get_loader
from confidnet.learners import get_learner
from confidnet.utils.logger import get_logger
from confidnet.utils.misc import load_yaml
from c... | import argparse
import os
from shutil import copyfile, rmtree
import click
import torch
# import sys
# sys.path.insert(1, '../confidnet')
from confidnet.loaders import get_loader
from confidnet.learners import get_learner
from confidnet.utils.logger import get_logger
from confidnet.utils.misc import load_yaml
from c... |
import inspect
import json
import logging
from json import JSONDecodeError
from pathlib import Path
from typing import Optional, TextIO, Union, ChainMap, List, Tuple, Dict, TYPE_CHECKING
import asyncio
import itertools
import sys
import jsonschema
from lightbus.exceptions import (
InvalidApiForSchemaCreation,
... | import inspect
import json
import logging
from json import JSONDecodeError
from pathlib import Path
from typing import Optional, TextIO, Union, ChainMap, List, Tuple, Dict, TYPE_CHECKING
import asyncio
import itertools
import sys
import jsonschema
from lightbus.exceptions import (
InvalidApiForSchemaCreation,
... |
import bisect
import io
import json
import hashlib
import logging
import os
import random
import struct
import sys
import subprocess
from BaseClasses import CollectionState, ShopType, Region, Location
from Dungeons import dungeon_music_addresses
from Regions import location_table
from Text import MultiByteTextMapper, ... | import bisect
import io
import json
import hashlib
import logging
import os
import random
import struct
import sys
import subprocess
from BaseClasses import CollectionState, ShopType, Region, Location
from Dungeons import dungeon_music_addresses
from Regions import location_table
from Text import MultiByteTextMapper, ... |
from pathlib import Path
from pprint import pprint
import keyword
import builtins
import textwrap
from ursina import color, lerp, application
def indentation(line):
return len(line) - len(line.lstrip())
def get_module_attributes(str):
attrs = list()
for l in str.split('\n'):
if len(l) == 0:
... | from pathlib import Path
from pprint import pprint
import keyword
import builtins
import textwrap
from ursina import color, lerp, application
def indentation(line):
return len(line) - len(line.lstrip())
def get_module_attributes(str):
attrs = list()
for l in str.split('\n'):
if len(l) == 0:
... |
import os
from pprint import pprint
import torch
import torch.optim as optim
import passport_generator
from dataset import prepare_dataset, prepare_wm
from experiments.base import Experiment
from experiments.trainer import Trainer, Tester
from experiments.trainer_private import TesterPrivate
from experiments.utils im... | import os
from pprint import pprint
import torch
import torch.optim as optim
import passport_generator
from dataset import prepare_dataset, prepare_wm
from experiments.base import Experiment
from experiments.trainer import Trainer, Tester
from experiments.trainer_private import TesterPrivate
from experiments.utils im... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import requests
import json
import sys
import base64
import datetime
import os
from azure.storage.blob import BlobServiceClient, ContentSettings
from cimetrics.env import get_env
# Always the same for metrics-devops
IMAGE_B... | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import requests
import json
import sys
import base64
import datetime
import os
from azure.storage.blob import BlobServiceClient, ContentSettings
from cimetrics.env import get_env
# Always the same for metrics-devops
IMAGE_B... |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2020 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
# Copyright (C) 2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of ... | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2020 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
# Copyright (C) 2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of ... |
from numpy.testing import run_module_suite
from spectractor import parameters
from spectractor.extractor.extractor import Spectractor
from spectractor.logbook import LogBook
from spectractor.config import load_config
import os
import numpy as np
def test_logbook():
logbook = LogBook('./ctiofulllogbook_jun2017_v5... | from numpy.testing import run_module_suite
from spectractor import parameters
from spectractor.extractor.extractor import Spectractor
from spectractor.logbook import LogBook
from spectractor.config import load_config
import os
import numpy as np
def test_logbook():
logbook = LogBook('./ctiofulllogbook_jun2017_v5... |
###############################################################################
# WaterTAP Copyright (c) 2021, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory, Oak Ridge National
# Laboratory, National Renewable Energy Laboratory, and National Energy
# Technology Laboratory ... | ###############################################################################
# WaterTAP Copyright (c) 2021, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory, Oak Ridge National
# Laboratory, National Renewable Energy Laboratory, and National Energy
# Technology Laboratory ... |
# -*- coding: utf-8 -*-
"""
mslib.retriever
~~~~~~~~~~~~~~~~~~~~
automation within mss to create for instance a number of the same plots
for several flights or several forecast steps
This file is part of mss.
:copyright: Copyright 2020 Joern Ungermann
:license: APACHE-2.0, see LICENSE fo... | # -*- coding: utf-8 -*-
"""
mslib.retriever
~~~~~~~~~~~~~~~~~~~~
automation within mss to create for instance a number of the same plots
for several flights or several forecast steps
This file is part of mss.
:copyright: Copyright 2020 Joern Ungermann
:license: APACHE-2.0, see LICENSE fo... |
import requests
from bs4 import BeautifulSoup
import pickle
import csv
import urllib.request
import json
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
import datetime
import time
import argparse
import os
... | import requests
from bs4 import BeautifulSoup
import pickle
import csv
import urllib.request
import json
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
import datetime
import time
import argparse
import os
... |
import json
import logging
import time
from pathlib import Path
from eosapi import Client
import click
import requests
from haiku_node.blockchain_helpers.accounts import (
AccountManager, make_default_accounts, create_public_data)
from haiku_node.blockchain.eos.mother import UnificationMother
from haiku_node.bl... | import json
import logging
import time
from pathlib import Path
from eosapi import Client
import click
import requests
from haiku_node.blockchain_helpers.accounts import (
AccountManager, make_default_accounts, create_public_data)
from haiku_node.blockchain.eos.mother import UnificationMother
from haiku_node.bl... |
"""Jira issues collector."""
import itertools
import re
from base_collectors import SourceCollector
from collector_utilities.type import URL, Value
from model import Entities, Entity, SourceMeasurement, SourceResponses
class JiraIssues(SourceCollector):
"""Jira collector for issues."""
SPRINT_NAME_RE = re.... | """Jira issues collector."""
import itertools
import re
from base_collectors import SourceCollector
from collector_utilities.type import URL, Value
from model import Entities, Entity, SourceMeasurement, SourceResponses
class JiraIssues(SourceCollector):
"""Jira collector for issues."""
SPRINT_NAME_RE = re.... |
# tullinge/booking
# https://github.com/tullinge/booking
# imports
from flask import Blueprint, render_template, redirect, request, session, jsonify
# components import
from components.decorators import (
login_required,
user_setup_completed,
user_not_setup,
booking_blocked,
)
from components.core imp... | # tullinge/booking
# https://github.com/tullinge/booking
# imports
from flask import Blueprint, render_template, redirect, request, session, jsonify
# components import
from components.decorators import (
login_required,
user_setup_completed,
user_not_setup,
booking_blocked,
)
from components.core imp... |
import asyncio
import synapse.lib.cell as s_cell
import synapse.lib.coro as s_coro
import synapse.lib.stormsvc as s_stormsvc
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.daemon as s_daemon
import synapse.telepath as s_telepath
import synapse.tests.utils as s_t_utils
class Foo:
de... | import asyncio
import synapse.lib.cell as s_cell
import synapse.lib.coro as s_coro
import synapse.lib.stormsvc as s_stormsvc
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.daemon as s_daemon
import synapse.telepath as s_telepath
import synapse.tests.utils as s_t_utils
class Foo:
de... |
# encoding=utf-8
# Author: Yu-Lun Chiang
# Description: Test NewsCrawler
import logging
import pytest
from collections import namedtuple
from Sanga.media import ftvnews
from Sanga.struct import NewsStruct
logger = logging.getLogger(__name__)
TEST_DATA = namedtuple(
typename="TEST_DATA",
field_names=[
... | # encoding=utf-8
# Author: Yu-Lun Chiang
# Description: Test NewsCrawler
import logging
import pytest
from collections import namedtuple
from Sanga.media import ftvnews
from Sanga.struct import NewsStruct
logger = logging.getLogger(__name__)
TEST_DATA = namedtuple(
typename="TEST_DATA",
field_names=[
... |
import requests
from .settings import ADDONS
def is_available(operation):
"""Checks wether the service for the operation is defined and listening"""
if operation in ADDONS:
health = ADDONS[operation]["health"]
try:
response = requests.request(health["method"], health["uri"])
... | import requests
from .settings import ADDONS
def is_available(operation):
"""Checks wether the service for the operation is defined and listening"""
if operation in ADDONS:
health = ADDONS[operation]["health"]
try:
response = requests.request(health["method"], health["uri"])
... |
import logging
from copy import deepcopy
from pathlib import Path
import click.testing
import message_ix
import pandas as pd
import pytest
from ixmp import Platform
from ixmp import config as ixmp_config
from message_ix_models import cli, util
from message_ix_models.util._logging import mark_time, preserve_log_level
... | import logging
from copy import deepcopy
from pathlib import Path
import click.testing
import message_ix
import pandas as pd
import pytest
from ixmp import Platform
from ixmp import config as ixmp_config
from message_ix_models import cli, util
from message_ix_models.util._logging import mark_time, preserve_log_level
... |
import discord
from aiohttp import request
from discord.ext import commands
import database as db
from functions import update_db
import io
import json
import contextlib
import textwrap
class Owner(commands.Cog):
"""A private cog which only works for me."""
def __init__(self, bot):
self.bot = bot
... | import discord
from aiohttp import request
from discord.ext import commands
import database as db
from functions import update_db
import io
import json
import contextlib
import textwrap
class Owner(commands.Cog):
"""A private cog which only works for me."""
def __init__(self, bot):
self.bot = bot
... |
#!/usr/bin/env python3
import os
import re
base_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
config_h = os.path.join(base_path, 'include', 'spdlog', 'version.h')
data = {'MAJOR': 0, 'MINOR': 0, 'PATCH': 0}
reg = re.compile(r'^\s*#define\s+SPDLOG_VER_([A-Z]+)\s+([0-9]+).*$')
with open(config_... | #!/usr/bin/env python3
import os
import re
base_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
config_h = os.path.join(base_path, 'include', 'spdlog', 'version.h')
data = {'MAJOR': 0, 'MINOR': 0, 'PATCH': 0}
reg = re.compile(r'^\s*#define\s+SPDLOG_VER_([A-Z]+)\s+([0-9]+).*$')
with open(config_... |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# ... | # -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# ... |
# To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %%
import os
from functools import partial
# %%
from matplotlib import pyplot as plt
import matplotlib
import pandas as pd
from numpy import unique
# %%
from model_analysis import *
# %% [markdown]
# ### Loading data
# %%
ru... | # To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %%
import os
from functools import partial
# %%
from matplotlib import pyplot as plt
import matplotlib
import pandas as pd
from numpy import unique
# %%
from model_analysis import *
# %% [markdown]
# ### Loading data
# %%
ru... |
'''
Any file within the no_import_common_class folder is for methods that can be
imported safely (without circular dependencies) into the classes in
the common class folder.
These methods are specific to category pages
'''
import constants.file_paths as file_paths
RESUME = 'resume'
NOT_RESUME = ('exer... | '''
Any file within the no_import_common_class folder is for methods that can be
imported safely (without circular dependencies) into the classes in
the common class folder.
These methods are specific to category pages
'''
import constants.file_paths as file_paths
RESUME = 'resume'
NOT_RESUME = ('exer... |
###############################################################################
# Copyright (c) 2018-2021 Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory
#
# Written by J. Chavez, S. Czyz, G. Kosinovsky, V. Mozin, S. Sangiorgio.
# RASE-support@llnl.gov.
#
# LLNL-CODE-... | ###############################################################################
# Copyright (c) 2018-2021 Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory
#
# Written by J. Chavez, S. Czyz, G. Kosinovsky, V. Mozin, S. Sangiorgio.
# RASE-support@llnl.gov.
#
# LLNL-CODE-... |
import asyncio
import websockets
from ftx_python.stream.channel import Channel
from ftx_python.stream.subscription import Subscription
import json
from typing import Union, Dict, DefaultDict
import warnings
import time
import hmac
from collections import defaultdict
from itertools import zip_longest
import zl... | import asyncio
import websockets
from ftx_python.stream.channel import Channel
from ftx_python.stream.subscription import Subscription
import json
from typing import Union, Dict, DefaultDict
import warnings
import time
import hmac
from collections import defaultdict
from itertools import zip_longest
import zl... |
import sys
import webbrowser
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
import requests
if __name__ == "__main__":
print("Googling.....")
url = "https://www.google.com/search?q=" + " ".join(sys.argv[1:])
res = requests.get(url, headers={"UserAgent": UserAgent().random})
# res.... | import sys
import webbrowser
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
import requests
if __name__ == "__main__":
print("Googling.....")
url = "https://www.google.com/search?q=" + " ".join(sys.argv[1:])
res = requests.get(url, headers={"UserAgent": UserAgent().random})
# res.... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Autogen with
# import json
#
# with open("/datasets/lvis/lvis_v1_val_headv1.json", "r") as f:
# a = json.load(f)
# c = a["categories"]
# for x in c:
# del x["image_count"]
# del x["instance_count"]
# LVIS_CATEGORIES = repr(c) + " # noqa"... | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Autogen with
# import json
#
# with open("/datasets/lvis/lvis_v1_val_headv1.json", "r") as f:
# a = json.load(f)
# c = a["categories"]
# for x in c:
# del x["image_count"]
# del x["instance_count"]
# LVIS_CATEGORIES = repr(c) + " # noqa"... |
#! /usr/bin/env python
import sys
import argparse
import re
from pprint import pprint
from datetime import date
from pathlib import Path
from shutil import copytree, copy2
from typing import TypeVar, List
import pandas as pd
import numpy as np
import time
import json
from hubmap_commons.globus_groups import get_globu... | #! /usr/bin/env python
import sys
import argparse
import re
from pprint import pprint
from datetime import date
from pathlib import Path
from shutil import copytree, copy2
from typing import TypeVar, List
import pandas as pd
import numpy as np
import time
import json
from hubmap_commons.globus_groups import get_globu... |
import contextlib
import datetime
import io
import os
import marshal
import pathlib
import shutil
import signal
import subprocess
import sys
import tempfile
import textwrap
import time
import typing
import components
from components._impl.workers import base
from components._impl.workers import subprocess_rpc
class ... | import contextlib
import datetime
import io
import os
import marshal
import pathlib
import shutil
import signal
import subprocess
import sys
import tempfile
import textwrap
import time
import typing
import components
from components._impl.workers import base
from components._impl.workers import subprocess_rpc
class ... |
from ._private.address import get_bus_address, parse_address
from .message import Message
from .constants import BusType, MessageFlag, MessageType, ErrorType, NameFlag, RequestNameReply, ReleaseNameReply
from .service import ServiceInterface
from .validators import assert_object_path_valid, assert_bus_name_valid
from .... | from ._private.address import get_bus_address, parse_address
from .message import Message
from .constants import BusType, MessageFlag, MessageType, ErrorType, NameFlag, RequestNameReply, ReleaseNameReply
from .service import ServiceInterface
from .validators import assert_object_path_valid, assert_bus_name_valid
from .... |
# -------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ----------------------------------------------------------------------... | # -------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ----------------------------------------------------------------------... |
import asyncio
import re
from datetime import timedelta
from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
import arrow
import dateutil.parser
import discord.errors
import regex
from async_rediscache import RedisCache
from dateutil.relativedelta import relativedelta
from discord import Col... | import asyncio
import re
from datetime import timedelta
from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
import arrow
import dateutil.parser
import discord.errors
import regex
from async_rediscache import RedisCache
from dateutil.relativedelta import relativedelta
from discord import Col... |
"""Helper for listing a summary of finished prums and progress on open prums.
Projecta are small bite-sized project quanta that typically will result in
one manuscript.
"""
from gooey import GooeyParser
import datetime
import dateutil.parser as date_parser
from regolith.helpers.basehelper import SoutHelperBase
... | """Helper for listing a summary of finished prums and progress on open prums.
Projecta are small bite-sized project quanta that typically will result in
one manuscript.
"""
from gooey import GooeyParser
import datetime
import dateutil.parser as date_parser
from regolith.helpers.basehelper import SoutHelperBase
... |
#!/usr/bin/python3 -i
#
# Copyright 2013-2022 The Khronos Group Inc.
#
# SPDX-License-Identifier: Apache-2.0
"""Types and classes for manipulating an API registry."""
import copy
import re
import sys
import xml.etree.ElementTree as etree
from collections import defaultdict, deque, namedtuple
from generator import Out... | #!/usr/bin/python3 -i
#
# Copyright 2013-2022 The Khronos Group Inc.
#
# SPDX-License-Identifier: Apache-2.0
"""Types and classes for manipulating an API registry."""
import copy
import re
import sys
import xml.etree.ElementTree as etree
from collections import defaultdict, deque, namedtuple
from generator import Out... |
#!/usr/bin/env python3
import xml
import urllib.request
import xml.etree.ElementTree as ET
BLOG_URL = "http://www.xkyle.com"
RSS_URL = f"{BLOG_URL}/index.xml"
def get_blog_rssxml():
with urllib.request.urlopen(RSS_URL) as response:
return response.read()
def print_blog_posts():
rssxml = get_blog_rs... | #!/usr/bin/env python3
import xml
import urllib.request
import xml.etree.ElementTree as ET
BLOG_URL = "http://www.xkyle.com"
RSS_URL = f"{BLOG_URL}/index.xml"
def get_blog_rssxml():
with urllib.request.urlopen(RSS_URL) as response:
return response.read()
def print_blog_posts():
rssxml = get_blog_rs... |
# Copyright (c) 2020 the original author or authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicabl... | # Copyright (c) 2020 the original author or authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicabl... |
import logging
import pytest
import botocore.exceptions as boto3exception
import json
import uuid
from ocs_ci.framework import config
from ocs_ci.ocs.exceptions import (
NoBucketPolicyResponse,
InvalidStatusCode,
UnexpectedBehaviour,
)
from ocs_ci.framework.testlib import MCGTest, tier1, tier2, tier3, ski... | import logging
import pytest
import botocore.exceptions as boto3exception
import json
import uuid
from ocs_ci.framework import config
from ocs_ci.ocs.exceptions import (
NoBucketPolicyResponse,
InvalidStatusCode,
UnexpectedBehaviour,
)
from ocs_ci.framework.testlib import MCGTest, tier1, tier2, tier3, ski... |
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, HttpRequest, HttpResponse
from django.db.models import Sum, Max, Count, Q, F
from django.core.paginator import Paginator
from django.contrib import messages
imp... | from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, HttpRequest, HttpResponse
from django.db.models import Sum, Max, Count, Q, F
from django.core.paginator import Paginator
from django.contrib import messages
imp... |
from sys import argv, exit
from getopt import getopt, GetoptError
# Constant variables
GRID_SIZE = 20 # Grid size for rows and columns
GRID_CHAR = ' ' # Character to fill blanks in the grid
ERRORS = [
'No intersections',
'Illegal adjacencies',
'Out of bounds'
] # List containing error strings
def main(... | from sys import argv, exit
from getopt import getopt, GetoptError
# Constant variables
GRID_SIZE = 20 # Grid size for rows and columns
GRID_CHAR = ' ' # Character to fill blanks in the grid
ERRORS = [
'No intersections',
'Illegal adjacencies',
'Out of bounds'
] # List containing error strings
def main(... |
"""
A Allen-Cahn equation
.. codeauthor:: David Zwicker <david.zwicker@ds.mpg.de>
"""
from typing import Callable # @UnusedImport
import numpy as np
from ..fields import ScalarField
from ..grids.boundaries.axes import BoundariesData
from ..tools.docstrings import fill_in_docstring
from ..tools.numba import jit, n... | """
A Allen-Cahn equation
.. codeauthor:: David Zwicker <david.zwicker@ds.mpg.de>
"""
from typing import Callable # @UnusedImport
import numpy as np
from ..fields import ScalarField
from ..grids.boundaries.axes import BoundariesData
from ..tools.docstrings import fill_in_docstring
from ..tools.numba import jit, n... |
#!/usr/bin/env python3
"""Library of Python tools -- Hugues Hoppe.
# pylint: disable=line-too-long
Useful commands to test and polish this file:
bash -c 'f=__init__.py; true && env python3 $f; env mypy --strict "$f"; autopep8 -a -a -a --max-line-length 80 --indent-size 2 --ignore E265 --diff "$f"; pylint --indent-str... | #!/usr/bin/env python3
"""Library of Python tools -- Hugues Hoppe.
# pylint: disable=line-too-long
Useful commands to test and polish this file:
bash -c 'f=__init__.py; true && env python3 $f; env mypy --strict "$f"; autopep8 -a -a -a --max-line-length 80 --indent-size 2 --ignore E265 --diff "$f"; pylint --indent-str... |
import functools
import os
import random
import warnings
from collections import OrderedDict
from datetime import datetime
import numpy as np
import torch
import torch.backends.cudnn as torchcudnn
from openpyxl import load_workbook, Workbook
from thop import profile
from torch.autograd.variable import Variable
class... | import functools
import os
import random
import warnings
from collections import OrderedDict
from datetime import datetime
import numpy as np
import torch
import torch.backends.cudnn as torchcudnn
from openpyxl import load_workbook, Workbook
from thop import profile
from torch.autograd.variable import Variable
class... |
import numpy as np
from ..visualization import Viewer
from ..utils import Subject, Observer
import copy
class Clipping(object):
class __Flip(object):
def __init__(self):
self.x = False
self.y = False
self.z = False
def __init__(self):
self.min_x = None
... | import numpy as np
from ..visualization import Viewer
from ..utils import Subject, Observer
import copy
class Clipping(object):
class __Flip(object):
def __init__(self):
self.x = False
self.y = False
self.z = False
def __init__(self):
self.min_x = None
... |
# coding=utf-8
# -*- coding: utf-8 -*-
# visual.py
# =====================================4
# This file contains components for the qt
# to establish visual outputs of price data
# loop result and strategy optimization
# results as well
# ======================================
import mplfinance as mpf
from mplfinance... | # coding=utf-8
# -*- coding: utf-8 -*-
# visual.py
# =====================================4
# This file contains components for the qt
# to establish visual outputs of price data
# loop result and strategy optimization
# results as well
# ======================================
import mplfinance as mpf
from mplfinance... |
import asyncio
import os
import platform
from pathlib import Path
from . import urlscan, utils
def main():
parser = utils.create_arg_parser()
args = parser.parse_args()
utils.validate_arguments(args)
api_key = os.environ["URLSCAN_API_KEY"]
data_dir = Path(os.getenv("URLSCAN_DATA_DIR", "."))
... | import asyncio
import os
import platform
from pathlib import Path
from . import urlscan, utils
def main():
parser = utils.create_arg_parser()
args = parser.parse_args()
utils.validate_arguments(args)
api_key = os.environ["URLSCAN_API_KEY"]
data_dir = Path(os.getenv("URLSCAN_DATA_DIR", "."))
... |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/60_medical.imaging.ipynb (unless otherwise specified).
__all__ = ['DcmDataset', 'DcmTag', 'DcmMultiValue', 'dcmread', 'get_dicom_files', 'DicomSegmentationDataLoaders',
'get_dicom_files', 'TensorDicom', 'PILDicom', 'pixels', 'scaled_px', 'array_freqhist_bins',... | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/60_medical.imaging.ipynb (unless otherwise specified).
__all__ = ['DcmDataset', 'DcmTag', 'DcmMultiValue', 'dcmread', 'get_dicom_files', 'DicomSegmentationDataLoaders',
'get_dicom_files', 'TensorDicom', 'PILDicom', 'pixels', 'scaled_px', 'array_freqhist_bins',... |
#!/usr/bin/env python3
import os, sys, datetime, time, base64, logging, signal, re, ssl, traceback, threading
from urllib.request import urlopen, Request
from urllib.error import HTTPError, URLError
from socketserver import ThreadingMixIn
from http.server import BaseHTTPRequestHandler, HTTPServer
from poshc2.server.I... | #!/usr/bin/env python3
import os, sys, datetime, time, base64, logging, signal, re, ssl, traceback, threading
from urllib.request import urlopen, Request
from urllib.error import HTTPError, URLError
from socketserver import ThreadingMixIn
from http.server import BaseHTTPRequestHandler, HTTPServer
from poshc2.server.I... |
import multiprocessing
import os
import itertools
import importlib
import time
import cache as cachelib
import numpy as np
import random
from multiprocessing import Pool, cpu_count
from io import StringIO
import statistics
import argparse
import sys
import json
parser = argparse.ArgumentParser(descr... | import multiprocessing
import os
import itertools
import importlib
import time
import cache as cachelib
import numpy as np
import random
from multiprocessing import Pool, cpu_count
from io import StringIO
import statistics
import argparse
import sys
import json
parser = argparse.ArgumentParser(descr... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.