Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Based on the snippet: <|code_start|> # leaf nodes are strings which are file contents
scaffold_structure = {
module_name: {
'graphql_schema': {
'__init__.py': GRAPHQL_INIT_SCAFFOLD,
},
'pent': {
'__init__.py': PENT_INIT_SCAFFOLD,
... | 'autopents.py': print_autopents_file(document_ast), |
Predict the next line after this snippet: <|code_start|>
def test_reverse_dictionary() -> None:
assert reverse_dict({1: '1', 2: '2'}) == {'2': 2, '1': 1}
def test_reverse_dictionary_dup_key() -> None:
assert reverse_dict({1: '1', 2: '1'})
@pytest.mark.asyncio
async def test_async_list() -> None:
async ... | list_of_one = await async_list(list_of_gens) |
Next line prediction: <|code_start|>
def test_reverse_dictionary() -> None:
assert reverse_dict({1: '1', 2: '2'}) == {'2': 2, '1': 1}
def test_reverse_dictionary_dup_key() -> None:
assert reverse_dict({1: '1', 2: '1'})
@pytest.mark.asyncio
async def test_async_list() -> None:
async def gen_num(num: int... | one, two = await async_tuple(gen_num(1), gen_num(2)) |
Using the snippet: <|code_start|>
list_of_one = await async_list(list_of_gens)
assert list_of_one == [1]
list_of_two = await async_list([gen_num(1), gen_num(2)])
assert list_of_two == [1, 2]
@pytest.mark.asyncio
async def test_async_tuple() -> None:
async def gen_num(num: int) -> int:
re... | assert is_camel_case('testFoo') is True |
Given the following code snippet before the placeholder: <|code_start|> assert list_of_two == [1, 2]
@pytest.mark.asyncio
async def test_async_tuple() -> None:
async def gen_num(num: int) -> int:
return num
one, two = await async_tuple(gen_num(1), gen_num(2))
assert one == 1
assert two ==... | assert to_snake_case('foo') == 'foo' |
Using the snippet: <|code_start|>
pytestmark = pytest.mark.asyncio
class EnumTest(Enum):
VALUE_ONE = 'VALUE_ONE'
VALUE_TWO = 'VALUE_TWO'
def define_schema(resolver: Callable, enumType: GraphQLObjectType) -> GraphQLSchema:
def resolve_input(_obj: Any, args: dict, *_: Any) -> Any:
assert isinstanc... | GraphQLEnumTest = GraphQLPythonEnumType(EnumTest) |
Using the snippet: <|code_start|>
def create_graphql_app(
root_object: PentContextfulObject, schema: GraphQLSchema, debug: bool=True
) -> Sanic:
""" Creates a Sanic app and adds a graphql/graphiql endpoint """
app = Sanic(__name__)
app.debug = debug
if debug:
# hack from https://github.... | root_object.context.loader = PentLoader(root_object.context) |
Predict the next line for this snippet: <|code_start|>
def enum_types(self) -> List[GrappleTypeDef]:
return [t for t in self._types if t.type_varietal == TypeVarietal.ENUM]
def is_enum(self, name: str) -> bool:
ttype = self.type_named(name)
return ttype and ttype.type_varietal == TypeVa... | check.isinst(dir_node, Directive) |
Next line prediction: <|code_start|> edge_name: str
edge_id: int
field: str
class DeletePentData(NamedTuple):
type: str
FieldVarietalsUnion = Union[EdgeToStoredIdData, DeletePentData]
class GrappleFieldData(NamedTuple):
name: str
type_ref: GrappleTypeRef
args: Any
field_varietal: Fi... | if is_camel_case(name): |
Using the snippet: <|code_start|> edge_id: int
field: str
class DeletePentData(NamedTuple):
type: str
FieldVarietalsUnion = Union[EdgeToStoredIdData, DeletePentData]
class GrappleFieldData(NamedTuple):
name: str
type_ref: GrappleTypeRef
args: Any
field_varietal: FieldVarietal
field_... | name = to_snake_case(name) |
Predict the next line for this snippet: <|code_start|> except pymysql.err.OperationalError as error:
print('Could not connect to local mysql instance:')
print('All tests will run locally')
print(error)
MagnusConn.is_up = False
else:
MagnusConn.i... | pent_context: PentContext, |
Given snippet: <|code_start|> print('All tests will run locally')
print(error)
MagnusConn.is_up = False
else:
MagnusConn.is_up = True
conn.close() # type: ignore
return MagnusConn.is_up
@staticmethod
def get_conn_info(db_name: str) -> ... | root_value: PentContextfulObject=None, |
Predict the next line after this snippet: <|code_start|>
class MagnusConn:
is_up = None
@staticmethod
<|code_end|>
using the current file's imports:
import traceback
import pymysql
import pymysql.cursors
from typing import Callable, List, Dict, Any
from graphql import GraphQLSchema, graphql
from graphscal... | def get_unittest_conn_info() -> ConnectionInfo: |
Next line prediction: <|code_start|>
class MagnusConn:
is_up = None
@staticmethod
def get_unittest_conn_info() -> ConnectionInfo:
return MagnusConn.get_conn_info('graphscale-unittest')
@staticmethod
def is_db_unittest_up() -> bool:
"""Tests to see if the unittest-mysql is up and... | conn = pymysql_conn_from_info(conn_info) |
Based on the snippet: <|code_start|>
def db_mem_fixture(*, mem: Callable, db: Callable) -> List[Callable]:
fixture_funcs = []
if MagnusConn.is_db_unittest_up():
fixture_funcs.append(db)
fixture_funcs.append(mem)
return fixture_funcs
async def async_test_graphql(
query: str,
pent_contex... | print_error('GRAPHQL ERROR') |
Given the following code snippet before the placeholder: <|code_start|>
class GraphQLArg(NamedTuple):
name: str
arg_type: str
value: Any
class InProcessGraphQLClient:
def __init__(self, root_value: PentContextfulObject, graphql_schema: GraphQLSchema) -> None:
self.root_value = root_value
... | def context(self) -> PentContext: |
Here is a snippet: <|code_start|>
class GraphQLArg(NamedTuple):
name: str
arg_type: str
value: Any
class InProcessGraphQLClient:
<|code_end|>
. Write the next line using the current file imports:
from typing import Any, Dict, NamedTuple, cast
from graphql import graphql as graphql_main
from graphql im... | def __init__(self, root_value: PentContextfulObject, graphql_schema: GraphQLSchema) -> None: |
Continue the code snippet: <|code_start|>
def pythonify_dict(input_data: Dict[str, Any]) -> Dict[str, Any]:
data = {}
for name, value in input_data.items():
python_name = to_snake_case(name)
data[python_name] = pythonify_dict(value) if isinstance(value, dict) else value
return data
def p... | check.invariant(callable(prop), 'must be async function') |
Based on the snippet: <|code_start|>
def pythonify_dict(input_data: Dict[str, Any]) -> Dict[str, Any]:
data = {}
for name, value in input_data.items():
python_name = to_snake_case(name)
data[python_name] = pythonify_dict(value) if isinstance(value, dict) else value
return data
def proces... | @async_field_error_boundary |
Continue the code snippet: <|code_start|> @async_field_error_boundary
async def mutation_resolver(obj: Any, args: Dict[str, Any], context: PentContext,
*_: Any) -> Any:
args = process_args(args)
pent_data_cls = context.cls_from_name(pent_data_cls_name)
pent... | @field_error_boundary |
Given the following code snippet before the placeholder: <|code_start|>
def pythonify_dict(input_data: Dict[str, Any]) -> Dict[str, Any]:
data = {}
for name, value in input_data.items():
python_name = to_snake_case(name)
data[python_name] = pythonify_dict(value) if isinstance(value, dict) else... | async def mutation_resolver(obj: Any, args: Dict[str, Any], context: PentContext, |
Given the following code snippet before the placeholder: <|code_start|>
def pythonify_dict(input_data: Dict[str, Any]) -> Dict[str, Any]:
data = {}
for name, value in input_data.items():
<|code_end|>
, predict the next line using imports from the current file:
from typing import Any, Callable, Dict
from grap... | python_name = to_snake_case(name) |
Based on the snippet: <|code_start|>
@fixture
def mock_client(request):
client = MockClient()
request.addfinalizer(client.on_stop)
return client
@fixture
def mock_transport():
<|code_end|>
, predict the immediate next line with the help of imports:
import sys
import asyncio
from concurrent.futures im... | return MockTransport() |
Given the code snippet: <|code_start|>
@fixture
def mock_transport():
return MockTransport()
@fixture
def broken_transport():
return MockTransport(broken=True)
@fixture(scope="session")
def executor(request):
loop = asyncio.get_event_loop()
print("Loop: {0}".format(loop))
ex = ThreadPoolExecuto... | return SimpleMovingAverage({ |
Using the snippet: <|code_start|>
logger = logging.getLogger(__name__)
def with_retry(fn):
def wrapper(*args, **kwargs):
for i in range(0, 10):
try:
result = fn(*args, **kwargs)
return result
except Exception as ex:
time.sleep(1)
... | cfg = load("config.yaml") |
Given the following code snippet before the placeholder: <|code_start|>
logger = logging.getLogger(__name__)
try:
load_dotenv(find_dotenv())
except Exception as ex:
logger.error("Error while loading .env: '{}'. Ignoring.".format(ex))
def main(args):
if args.debug:
logging.basicConfig(level=log... | cfg = load(args.config) |
Using the snippet: <|code_start|>
logger = logging.getLogger(__name__)
try:
load_dotenv(find_dotenv())
except Exception as ex:
logger.error("Error while loading .env: '{}'. Ignoring.".format(ex))
def main(args):
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.... | start_loop(cfg, args.noop) |
Using the snippet: <|code_start|>
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_lo... | main(args) |
Based on the snippet: <|code_start|>
class ConfigBase(object):
"""
Base config class
"""
def is_valid(self):
return True
class InstanceMixin(object):
"""
Mixin used for configs which are loaded dynamically
"""
@property
def module(self):
return self._data["mod... | self._instance = dynamic_import(self.module)(self._data) |
Given snippet: <|code_start|>
Fork responds to node names as functions. The function arguments are the same as node
constructor (__init__ method) arguments. Each call will append new node to the fork and
will connect the new node to the previous node in the fork.
To configure current no... | node_dict = node_dictionary() |
Continue the code snippet: <|code_start|> self.logger.info("initializing stream")
self.logger.debug("sorting nodes")
sorted_nodes = self.sorted_nodes()
self.pipes = []
self.logger.debug("flushing pipes")
for node in sorted_nodes:
node.inputs = []
n... | if isinstance(node, TargetNode): |
Next line prediction: <|code_start|> self.logger.debug("finalizing node %s" % node_label(node))
node.finalize()
def node_label(node):
"""Debug label for a node: node identifier with python object id."""
return "%s(%s)" % (node.identifier() or str(type(node)), id(node))
class _StreamNode... | except NodeFinished: |
Next line prediction: <|code_start|> return self._closed
def done_sending(self):
"""Close pipe from sender side"""
self._flush(True)
def done_receiving(self):
"""Close pipe from either side"""
self._note("C not_empty acq? r")
self.not_empty.acquire()
self... | self.logger = get_logger() |
Based on the snippet: <|code_start|>
class Graph(object):
"""Data processing stream"""
def __init__(self, nodes=None, connections=None):
"""Creates a node graph with connections.
:Parameters:
* `nodes` - dictionary with keys as node names and values as nodes
* `connectio... | self.logger = get_logger() |
Predict the next line after this snippet: <|code_start|>
:Arguments:
- `limit`: read only specified number of records from dataset to guess field properties
- `collapse`: whether records are collapsed into flat structure or not
Returns: tuple with Field objects. Order of fields ... | record = collapse_record(record) |
Next line prediction: <|code_start|> def probe_record(record, parent = None):
for key, value in record.items():
full_key = parent + "." + key if parent else key
if self.expand and type(value) == dict:
probe_record(value, full_key)
... | field = Field(probe.field) |
Here is a snippet: <|code_start|>RNNCell = tf.nn.rnn_cell.RNNCell
LSTMStateTuple = tf.nn.rnn_cell.LSTMStateTuple
def _conv2d(x, W, strides=None):
if strides is None:
strides = [1, 1]
return tf.nn.conv2d(x, W, strides=[1] + strides + [1], padding="SAME")
def dynamic_conv_rnn(cell, inputs, sequence_length=Non... | input_shape = smart_shape(inputs) |
Given the following code snippet before the placeholder: <|code_start|>
_registered_datasets = {}
def register_dataset(name, **args):
name = name.lower()
def _register(dataset):
_registered_datasets[name] = (dataset, args)
return dataset
return _register
def load_dataset(config, subset, session, name... | import_submodules("datasets") |
Predict the next line for this snippet: <|code_start|>
DEFAULT_PATH = "/home/" + username() + "/data/KITTI_instance/"
NAME = "KITTI_instance"
@register_dataset(NAME)
<|code_end|>
with the help of current file imports:
from datasets.Loader import register_dataset
from datasets.Mapillary.MapillaryLike_instance import... | class KittiInstanceDataset(MapillaryLikeInstanceDataset): |
Predict the next line after this snippet: <|code_start|>
def __init__(self, name, inputs, tower_setup, initial_weights, hack_gradient_magnitude=1.0):
super().__init__()
assert len(initial_weights) == len(inputs)
with tf.variable_scope(name):
initializer = tf.constant_initializer(initial_weights)
... | inp, dim = prepare_input(inputs) |
Predict the next line after this snippet: <|code_start|>
NUM_CLASSES = 2
class MapillaryLikeInstanceDataset(FileListDataset):
def __init__(self, config, subset, name, default_path, data_list_path, id_divisor, cat_ids_to_use):
super().__init__(config, name, subset, default_path, NUM_CLASSES)
self.validatio... | print("{} ({}): using data_dir:".format(self.name, self.subset), self.data_dir, file=log.v5) |
Given the code snippet: <|code_start|>
class FeedDataset(AbstractDataset):
def __init__(self, config, subset, data_keys_to_use, num_classes=2):
super().__init__(config, subset, num_classes)
self._data_keys_to_use = data_keys_to_use
self._batch_size = -1
if subset == "val":
self._batch_size = ... | DataKeys.IMAGES: (tf.float32, (None, None, 3)), |
Next line prediction: <|code_start|>
class Augmentor:
def apply_before_resize(self, tensors):
return tensors
def apply_after_resize(self, tensors):
return tensors
def batch_apply_before_resize(self, tensors_batch):
return tensors_batch
def batch_apply_after_resize(self, tensors_batch):
retu... | img = tensors[DataKeys.IMAGES] |
Based on the snippet: <|code_start|>
class Augmentor:
def apply_before_resize(self, tensors):
return tensors
def apply_after_resize(self, tensors):
return tensors
def batch_apply_before_resize(self, tensors_batch):
return tensors_batch
def batch_apply_after_resize(self, tensors_batch):
retur... | gamma = tf.log(0.5 + 1 / math.sqrt(2) * factor) / tf.log(0.5 - 1 / math.sqrt(2) * factor) |
Continue the code snippet: <|code_start|> return aug_tensors
def _sample_factor(self):
return tf.random_uniform(shape=[], minval=self.gamma_range[0], maxval=self.gamma_range[1], dtype=tf.float32)
def batch_apply_after_resize(self, tensors_batch):
factor = self._sample_factor()
return [self.apply_af... | flipped = flip_coords_horizontal_y0x0y1x1(val, tf.shape(tensors[DataKeys.IMAGES])[1]) |
Predict the next line for this snippet: <|code_start|> def _sample_factor(self):
return tf.random_uniform(shape=[], minval=self.gamma_range[0], maxval=self.gamma_range[1], dtype=tf.float32)
def batch_apply_after_resize(self, tensors_batch):
factor = self._sample_factor()
return [self.apply_after_resize(... | flipped = flip_coords_horizontal_x0y0x1y1(val, tf.shape(tensors[DataKeys.IMAGES])[1]) |
Based on the snippet: <|code_start|>
NAME = "MOTS_challenge_feed_test"
DEFAULT_PATH = "/globalwork/voigtlaender/data/MOTS_challenge/test/"
SEQ_IDS_TRAIN = []
SEQ_IDS_VAL = ["%04d" % idx for idx in [1, 6, 7, 12]]
TIMESTEPS_PER_SEQ = {"0001": 450, "0006": 1194, "0007": 500, "0012": 900}
<|code_end|>
, predict the immed... | @register_dataset(NAME) |
Based on the snippet: <|code_start|>
NAME = "MOTS_challenge_feed_test"
DEFAULT_PATH = "/globalwork/voigtlaender/data/MOTS_challenge/test/"
SEQ_IDS_TRAIN = []
SEQ_IDS_VAL = ["%04d" % idx for idx in [1, 6, 7, 12]]
TIMESTEPS_PER_SEQ = {"0001": 450, "0006": 1194, "0007": 500, "0012": 900}
@register_dataset(NAME)
<|code_e... | class MOTSSegtrackFeedDataset(KittiSegtrackLikeFeedDataset): |
Using the snippet: <|code_start|>
NAME = "Mapillary_detection"
DATA_LIST_PATH = "datasets/Mapillary/"
DEFAULT_PATH = "/fastwork/" + username() + "/mywork/data/mapillary_quarter/"
N_MAX_DETECTIONS = 300
ID_DIVISOR = 256
CLASS_IDS_WITH_INSTANCES = [0, 1, 8, 19, 20, 21, 22, 23, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, ... | @register_dataset(NAME) |
Continue the code snippet: <|code_start|>
NAME = "Mapillary_detection"
DATA_LIST_PATH = "datasets/Mapillary/"
DEFAULT_PATH = "/fastwork/" + username() + "/mywork/data/mapillary_quarter/"
N_MAX_DETECTIONS = 300
ID_DIVISOR = 256
CLASS_IDS_WITH_INSTANCES = [0, 1, 8, 19, 20, 21, 22, 23, 32, 33, 34, 35, 36, 37, 38, 39, 40, ... | class MapillaryDetectionDataset(MapillaryLikeDetectionFileListDataset): |
Predict the next line after this snippet: <|code_start|>
class BboxDetectionRefinementForwarder(Forwarder):
def __init__(self, engine):
super().__init__(engine)
self.model_name = self.config.string("model")
def forward(self):
out_folder = "forwarded/" + self.model_name + "/detection_bbox_refined/"
... | extraction_keys = [Extractions.DET_MASKS, DataKeys.IMAGE_FILENAMES, DataKeys.IDS] |
Next line prediction: <|code_start|>
class BboxDetectionRefinementForwarder(Forwarder):
def __init__(self, engine):
super().__init__(engine)
self.model_name = self.config.string("model")
def forward(self):
out_folder = "forwarded/" + self.model_name + "/detection_bbox_refined/"
tf.gfile.MakeDirs(... | extraction_keys = [Extractions.DET_MASKS, DataKeys.IMAGE_FILENAMES, DataKeys.IDS] |
Here is a snippet: <|code_start|> tensors, crop_offset = random_crop_tensors(tensors, crop_size, crop_offset)
resized_and_cropped_tensors.append(tensors)
return resized_and_cropped_tensors
def fixed_resize_and_crop(tensors, size):
assert len(size) in (1, 2)
if len(size) == 2:
assert size[0] == size[... | cropped, offset_ = random_crop_image(val, size, offset_) |
Here is a snippet: <|code_start|>def random_resize_and_crop(tensors, size):
assert len(size) in (1, 2)
if len(size) == 2:
assert size[0] == size[1]
crop_size = size
else:
crop_size = [size, size]
tensors, _, _ = resize_random_scale_with_min_size(tensors, min_size=crop_size)
tensors, _ = random_cro... | crop_offset = get_crop_offset(scaled_size, crop_size) |
Continue the code snippet: <|code_start|> self.load_init_savers = None
try:
self.load_init = config.string("load_init", "")
if self.load_init == "":
self.load_init = []
else:
self.load_init = [self.load_init]
except TypeError:
self.load_init = config.string_list("load_... | print("loading model from", fn, file=log.v1) |
Using the snippet: <|code_start|>
# See also savitar1
class FullyConnected(Layer):
def __init__(self, name, inputs, n_features, tower_setup, activation="relu", dropout=0.0, batch_norm=False,
batch_norm_decay=Layer.BATCH_NORM_DECAY_DEFAULT, l2=Layer.L2_DEFAULT, W_initializer=None):
super(FullyConn... | inp, n_features_inp = prepare_input(inputs) |
Given snippet: <|code_start|>
# See also savitar1
class FullyConnected(Layer):
def __init__(self, name, inputs, n_features, tower_setup, activation="relu", dropout=0.0, batch_norm=False,
batch_norm_decay=Layer.BATCH_NORM_DECAY_DEFAULT, l2=Layer.L2_DEFAULT, W_initializer=None):
super(FullyConnecte... | inp = apply_dropout(inp, dropout) |
Given snippet: <|code_start|>
NAME = "MOTS_challenge_feed"
DEFAULT_PATH = "/globalwork/voigtlaender/data/MOTS_challenge/train/"
SEQ_IDS_TRAIN = []
SEQ_IDS_VAL = ["%04d" % idx for idx in [2, 5, 9, 11]]
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import glob
from datasets.Loade... | @register_dataset(NAME) |
Next line prediction: <|code_start|>
NAME = "MOTS_challenge_feed"
DEFAULT_PATH = "/globalwork/voigtlaender/data/MOTS_challenge/train/"
SEQ_IDS_TRAIN = []
SEQ_IDS_VAL = ["%04d" % idx for idx in [2, 5, 9, 11]]
@register_dataset(NAME)
<|code_end|>
. Use current file imports:
(import glob
from datasets.Loader import re... | class MOTSSegtrackFeedDataset(KittiSegtrackLikeFeedDataset): |
Using the snippet: <|code_start|>
class Layer:
BATCH_NORM_DECAY_DEFAULT = 0.95
BATCH_NORM_EPSILON = 1e-5
L2_DEFAULT = 1e-4
def __init__(self):
self.summaries = []
self.regularizers = []
self.losses = []
self.update_ops = []
self.outputs = []
self.placeholders = []
self.measures = ... | beta, gamma, moving_mean, moving_var = create_batch_norm_vars(n_features, tower_setup, scope_name) |
Given the following code snippet before the placeholder: <|code_start|>
DEFAULT_PATH = "/fastwork/" + username() + "/mywork/data/mapillary/"
NAME = "mapillary_instance"
@register_dataset("mapillary_instance_full", resolution="full")
@register_dataset("mapillary_instance_half", resolution="half")
@register_dataset("ma... | class MapillaryInstanceDataset(MapillaryLikeInstanceDataset): |
Next line prediction: <|code_start|> self.gt_data = {seq: [] for seq in self.seq_ids}
self.visibility_threshold = config.float("visibility_threshold", 0.5) # Boxes with visibility < this are ignored
parent_folder = os.path.join(self.data_dir, "train")
for seq in self.seq_ids:
gt_filename = os.pat... | return_dict[DataKeys.BBOXES_y0x0y1x1] = bboxes |
Here is a snippet: <|code_start|>
NAME = "KITTI_segtrack_amodal"
@register_dataset(NAME)
class KittiSegtrackAmodalDataset(KittiSegtrackDataset):
def __init__(self, config, subset):
super().__init__(config, subset, NAME)
def postproc_annotation(self, ann_filename, ann):
ann = super().postproc_annotation(... | ann = {DataKeys.SEGMENTATION_LABELS: ann} |
Predict the next line after this snippet: <|code_start|>
NAME = "MOTS_challenge"
DEFAULT_PATH = "/globalwork/voigtlaender/data/MOTS_challenge/train/"
SEQ_IDS_TRAIN = ["%04d" % idx for idx in [2, 5, 9, 11]]
SEQ_IDS_VAL = ["%04d" % idx for idx in [2, 5, 9, 11]]
TIMESTEPS_PER_SEQ = {"0002": 600, "0005": 837, "0009": 525,... | class MotsChallengeDataset(KittiSegtrackDataset): |
Predict the next line after this snippet: <|code_start|>
NAME = "MOTS_challenge"
DEFAULT_PATH = "/globalwork/voigtlaender/data/MOTS_challenge/train/"
SEQ_IDS_TRAIN = ["%04d" % idx for idx in [2, 5, 9, 11]]
SEQ_IDS_VAL = ["%04d" % idx for idx in [2, 5, 9, 11]]
TIMESTEPS_PER_SEQ = {"0002": 600, "0005": 837, "0009": 525,... | @register_dataset(NAME) |
Predict the next line for this snippet: <|code_start|>
class MOTFeedDataset(KittiSegtrackLikeFeedDataset):
def __init__(self, config, subset, name, default_path, seq_ids_train, seq_ids_val):
super().__init__(config, subset, name, default_path, seq_ids_train, seq_ids_val, False)
def get_filenames_for_video_id... | @register_dataset("MOT17_feed") |
Given the code snippet: <|code_start|> use_masks_for_ignore=False, crowd_masks=None):
"""
Label each anchor as fg/bg/ignore.
Args:
anchors: Ax4 float
gt_boxes: Bx4 float
crowd_boxes: Cx4 float
Returns:
anchor_labels: (A,) int. Each element is {-1, 0, 1}
anchor... | box_ious = calculate_ious(anchors, gt_boxes) # NA x NB |
Given the following code snippet before the placeholder: <|code_start|>
def make_disjoint(tracks, strategy):
def get_max_y(obj):
_, y, _, h = cocomask.toBbox(obj.mask)
return y + h
for frame, objects in enumerate(tracks):
if len(objects) == 0:
continue
if strategy == "y_pos":
objects... | objects_disjoint.append(TrackElement(box=obj.box, track_id=obj.track_id, class_=obj.class_, score=obj.score, |
Here is a snippet: <|code_start|># encoding: utf-8
# pylint: disable=invalid-name,missing-docstring
def test_User_repr(user_instance):
assert len(str(user_instance)) > 0
def test_User_auth(user_instance):
assert user_instance.is_authenticated
assert not user_instance.is_anonymous
@pytest.mark.parame... | (models.User.StaticRoles.INTERNAL.mask |
Given the code snippet: <|code_start|>
@pytest.yield_fixture()
def patch_User_password_scheme():
# pylint: disable=invalid-name,protected-access
"""
By default, the application uses ``bcrypt`` to store passwords securely.
However, ``bcrypt`` is a slow hashing algorithm (by design), so it is
bett... | _user_instance = utils.generate_user_instance(user_id=user_id) |
Based on the snippet: <|code_start|># encoding: utf-8
# pylint: disable=missing-docstring,redefined-outer-name
@pytest.yield_fixture()
def patch_User_password_scheme():
# pylint: disable=invalid-name,protected-access
"""
By default, the application uses ``bcrypt`` to store passwords securely.
Howev... | password_field_context = models.User.password.property.columns[0].type.context |
Given the code snippet: <|code_start|># encoding: utf-8
# pylint: disable=missing-docstring
def test_new_team_creation(flask_app_client, db, regular_user):
# pylint: disable=invalid-name
team_title = "Test Team Title"
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
resp... | team = models.Team.query.get(response.json['id']) |
Predict the next line after this snippet: <|code_start|># encoding: utf-8
# pylint: disable=too-few-public-methods,invalid-name,abstract-method,method-hidden
"""
RESTful API Rules
-----------------------
"""
class DenyAbortMixin(object):
"""
A helper permissions mixin raising an HTTP Error (specified in
... | return abort(code=self.DENY_ABORT_HTTP_CODE, message=self.DENY_ABORT_MESSAGE) |
Based on the snippet: <|code_start|># encoding: utf-8
"""
Webargs Parser wrapper module
-----------------------------
"""
class CustomWebargsParser(FlaskParser):
"""
This custom Webargs Parser aims to overload :meth:``handle_error`` in order
to call our custom :func:``abort`` function.
See the follo... | abort(status_code, messages=error.messages) |
Given snippet: <|code_start|>from __future__ import unicode_literals
User = get_user_model()
class UserProfileInline(admin.StackedInline):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.contrib import admin
from authtools.admin import NamedUserAdmin
from .models impo... | model = Profile |
Next line prediction: <|code_start|> class MyDoc(object):
__database__ = Dingus()
__collection__ = Dingus()
self.document = MyDoc
self.returned = self.connection.connect_document(self.document)
def should_return_subclass_of_original(self):
assert issubclass(s... | assert mod.DocumentProxy.calls( |
Based on the snippet: <|code_start|>
self.field_validator(path, value, sub_structure)
def _check_for_unknown_fields(body, structure, path):
"""Check `body` for any keys not present in `structure`.
This only checks the first level of keys. Any keys from :class:`dict`s in
the `body`\ 's values wil... | raise ValidationError(err) |
Using the snippet: <|code_start|>==========
"""
class Connection(MongoClient):
"""A connection to a MongoDB database.
This is a wrapper for a :class:`pymongo.mongo_client.MongoClient`.
"""
def connect_document(self, document):
"""Connect a document by creating a new type and injecting th... | return DocumentProxy(self, get_concrete_classes()) |
Predict the next line for this snippet: <|code_start|>
def run_ensure_indexes():
"""Run ensure_indexes.py with the example documents.
"""
os.environ['PYTHONPATH'] = os.getcwd()
process_args = [
'python',
'scalymongo/manage/ensure_indexes.py',
'tests.acceptance.manage.ensure_ind... | class TestEnsureIndex(BaseAcceptanceTest): |
Given the code snippet: <|code_start|> self.spec = {'foo': Dingus(), 'bar': Dingus()}
self.MyDoc.check_query_sharding(self.spec)
def should_not_crash(self):
pass
class WhenShardIndexNotSpecified(BaseDocumentSubclassTest):
def setup(self):
BaseDocumentSubclassTest.setup(self)
... | GlobalQueryException, self.MyDoc.check_query_sharding, self.spec) |
Using the snippet: <|code_start|> ):
def setup(self):
BaseModify.setup(self)
self.my_doc.modify(self.update)
class TestModifyWithExplicitQuerySpec(
BaseModify,
PropertyModifyWithExplicitQuerySpec,
PropertyModifyUpdatesLocalCopy,
):
def setup(self):
BaseModify.setup... | ModifyFailedError, |
Predict the next line after this snippet: <|code_start|>
class BaseCursorTestCase(DingusWhitelistTestCase):
module = mod
additional_mocks = ['wrapped_cursor']
def setup(self):
DingusWhitelistTestCase.setup(self)
self.document_type = DeterministicDingus('document_type')
<|code_end|>
usi... | self.cursor = Cursor(self.wrapped_cursor, self.document_type) |
Using the snippet: <|code_start|>## validate_single_field ##
class DescribeValidateSingleField(
DingusTestCase(validate_single_field, ['ValidationError'])):
def setup(self):
super(DescribeValidateSingleField, self).setup()
self.path = Dingus('path')
self.value = Dingus('value')
... | assert_raises_with_message( |
Given the code snippet: <|code_start|> def should_print_help(self):
assert self.parser.calls('print_help')
####
##
## main
##
####
class DescribeParseArguments(DingusWhitelistTestCase):
module = mod
module_mocks = [
'parse_arguments', '__import__', 'Connection', 'ensure_indexes']
addi... | assert self.module.ensure_indexes.calls( |
Next line prediction: <|code_start|>
additional_mocks = ['arguments']
def setup(self):
BaseParseArgumentsTestCase.setup(self)
self.parser.parse_args.return_value = (self.options, self.arguments)
assert_raises(SystemExit, parse_arguments)
def should_print_help(self):
assert... | main() |
Next line prediction: <|code_start|>
def should_add_background_option(self):
assert self.parser.calls(
'add_option', '--background', action='store_true',
help='create indexes as a non-blocking operation [default]',
)
def should_add_no_background_option(self):
ass... | self.returned = parse_arguments() |
Continue the code snippet: <|code_start|>
"""
parser = OptionParser()
parser.usage = '%prog [options] MODULE ENDPOINT'
parser.add_option(
'--background', action='store_true',
help='create indexes as a non-blocking operation [default]',
)
parser.add_option(
'--no-backgroun... | connection = Connection(endpoint) |
Predict the next line for this snippet: <|code_start|>
class BaseAcceptanceTest(object):
@classmethod
def setup_class(cls):
<|code_end|>
with the help of current file imports:
from scalymongo import Connection
and context from other files:
# Path: scalymongo/connection.py
# class Connection(MongoClient):
... | cls.connection = Connection() |
Given snippet: <|code_start|>
class BaseStructureWalker(object):
@classmethod
def setup_class(self):
self.field_validator = Dingus('field_validator')
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from dingus import Dingus
from scalymongo.structure_walker import ... | self.structure_walker = StructureWalker(self.field_validator) |
Predict the next line for this snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law o... | return init_net(net, gpu_ids) |
Given snippet: <|code_start|># limitations under the License.
class LayeredVideoDataset(BaseDataset):
"""A dataset class for video layers.
It assumes that the directory specified by 'dataroot' contains metadata.json, and the directories iuv, rgb_256, and rgb_512.
The 'iuv' directory should contain direc... | self.image_paths = sorted(make_dataset(rgbdir, opt.max_dataset_size)) |
Predict the next line after this snippet: <|code_start|> sys.exit("There was an error parsing " +
compounds_file + "\n" + "I/O error({0}): {1}".format(e.errno, e.strerror))
return cpds
def reactions(organism_type="", rctf='Biochemistry/reactions.master.tsv', verbose=False):
"""
Pa... | locations = location() |
Given snippet: <|code_start|> lambda_d_factor = h.fixed("dip_vae.lambda_d_factor", 1.)
dip_type = h.fixed("dip_vae.dip_type", "ii")
config_dip_vae_ii = h.zipit(
[model_name, model_fn, lambda_od, lambda_d_factor, dip_type])
# BetaTCVAE config.
model_name = h.fixed("model.name", "beta_tc_vae")
model_fn ... | class AbstractReasoningStudyV1(study.Study): |
Using the snippet: <|code_start|> model_fn = h.fixed("model.model", "@beta_tc_vae()")
betas = h.sweep("beta_tc_vae.beta", h.discrete([1., 2., 4., 6., 8., 10.]))
config_beta_tc_vae = h.zipit([model_name, model_fn, betas])
all_models = h.chainit([
config_beta_vae, config_factor_vae, config_dip_vae_i, config_... | model_config_file = resources.get_file( |
Using the snippet: <|code_start|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for t... | result = self.evaluate(relational_layers.repeat(tf.constant(a), 3, axis=-2)) |
Next line prediction: <|code_start|> num_train=gin.REQUIRED,
num_test_points_per_class=gin.REQUIRED,
batch_size=16):
"""Computes unfairness scores.
We first compute either the mean or maximum total variation for a given
sensitive and target variable. ... | mus_train, ys_train = utils.generate_batch_factor_code( |
Given the code snippet: <|code_start|>
@gin.configurable("evaluation", blacklist=["model_dirs", "output_dir"])
def evaluate(model_dirs,
output_dir,
evaluation_fn=gin.REQUIRED,
random_seed=gin.REQUIRED,
name=""):
"""Loads a trained estimator and evaluates it accordin... | dataset = named_data.get_named_ground_truth_data() |
Given the code snippet: <|code_start|>(https://arxiv.org/abs/1905.12614)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
FLAGS = flags.FLAGS
@gin.configurable("evaluation", blacklist=["model_dirs", "output_dir"])
def evaluate(model_dirs,
... | gin_dict = results.gin_dict(gin_config_file) |
Based on the snippet: <|code_start|># coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.ap... | ground_truth_data = dummy_data.DummyData() |
Predict the next line for this snippet: <|code_start|># you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is... | _ = fairness.compute_fairness(ground_truth_data, representation_function, |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.