Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Next line prediction: <|code_start|>
MAP = {
'C': ['C', 'C/C++ Header'],
'C++': ['C++', 'C/C++ Header'],
'Objective C': ['Objective C', 'C/C++ Header']
}
def run(project_id, repo_path, cursor, **options):
threshold = options.get('threshold', 0)
query = 'SELECT language FROM projects WHERE id = %d' % project_id
cursor.execute(query)
record = cursor.fetchone()
language = record[0]
languages = MAP[language] if language in MAP else [language]
<|code_end|>
. Use current file imports:
(import sys
from lib import utilities)
and context including class names, function names, or small code snippets from other files:
# Path: lib/utilities.py
# TOKENIZER = None
# ACK_LANGUAGE_MAP = {
# 'c': 'cc',
# 'c++': 'cpp',
# 'c#': 'csharp',
# 'objective-c': 'objc',
# 'ojective-c++': 'objcpp',
# }
# def get_cache_hits():
# def get_loc(path, files=None):
# def search(
# pattern, path, recursive=True, whole=False, ignorecase=False, include=None,
# exclude=None
# ):
# def url_to_json(url, headers={}):
# def get_repo_path(repo_id, repositories_dir):
# def clone(owner, name, directory, date=None):
# def read(jsonfile):
# def parse_datetime_delta(datetime_delta):
# def is_cloneable(owner, name):
# def get_files(path, language):
. Output only the next line. | _sloc = utilities.get_loc(repo_path) |
Given the following code snippet before the placeholder: <|code_start|>
class DatabaseTestCase(unittest.TestCase):
def setUp(self):
path = (
os.path.join(
os.path.abspath(
os.path.join(
os.path.dirname(os.path.realpath(__file__)),
os.pardir
)
),
'config.json'
)
)
settings = None
with open(path, 'r') as file_:
settings = json.load(file_)['options']['datasource']
<|code_end|>
, predict the next line using imports from the current file:
import json
import os
import pickle
import unittest
import mysql.connector as mysql
from lib import database
and context including class names, function names, and sometimes code from other files:
# Path: lib/database.py
# class DatabaseError(Exception):
# class Database(object):
# def __init__(self, value):
# def __str__(self):
# def __init__(self, settings):
# def connect(self):
# def disconnect(self):
# def get(self, query):
# def post(self, query, data=None):
# def cursor(self):
# def _connected(self):
# def __getstate__(self):
# def __setstate__(self, state):
. Output only the next line. | self.database = database.Database(settings) |
Predict the next line after this snippet: <|code_start|> yield kwargs
def chunker(sweeper, num_chunks=10, confirm=True):
chunks = [ [] for _ in range(num_chunks) ]
print('computing chunks')
configs = [config for config in sweeper]
random.shuffle(configs, random.random)
for i, config in enumerate(configs):
chunks[i % num_chunks].append(config)
print('num chunks: ', num_chunks)
print('chunk sizes: ', [len(chunk) for chunk in chunks])
print('total jobs: ', sum([len(chunk) for chunk in chunks]))
resp = 'y'
if confirm:
print('continue?(y/n)')
resp = str(input())
if resp == 'y':
return chunks
else:
return []
def run_sweep_doodad(target, params, run_mode, mounts, test_one=False, docker_image='python:3', return_output=False, verbose=False):
# build archive
target_dir = os.path.dirname(target)
target_mount_dir = os.path.join('target', os.path.basename(target_dir))
<|code_end|>
using the current file's imports:
import math
import os
import itertools
import multiprocessing
import random
import hashlib
import doodad
from datetime import datetime
from doodad import mount
from doodad.launch import launch_api
from doodad.darchive import archive_builder_docker as archive_builder
and any relevant context from other files:
# Path: doodad/mount.py
# class Mount(object):
# class MountLocal(Mount):
# class MountGit(Mount):
# class MountS3(Mount):
# class MountGCP(Mount):
# def __init__(self, mount_point=None, pythonpath=False, output=False):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def writeable(self):
# def name(self):
# def __str__(self):
# def __init__(self, local_dir, mount_point=None, cleanup=True,
# filter_ext=('.pyc', '.log', '.git', '.mp4'),
# filter_dir=('data', '.git'),
# **kwargs):
# def ignore_patterns(self, dirname, contents):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __str__(self):
# def docker_mount_dir(self):
# def __init__(self, git_url, branch=None,
# ssh_identity=None, **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __init__(self,
# s3_path,
# sync_interval=15,
# output=True,
# dry=False,
# include_types=('*.txt', '*.csv', '*.json', '*.gz', '*.tar', '*.log', '*.pkl'),
# **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __init__(self,
# gcp_path=None,
# sync_interval=15,
# output=True,
# dry=False,
# exclude_regex='*.tmp',
# **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
#
# Path: doodad/launch/launch_api.py
# def run_command(
# command,
# cli_args=None,
# mode=launch_mode.LocalMode(),
# mounts=tuple(),
# return_output=False,
# verbose=False,
# docker_image='ubuntu:18.04',
# singularity_image=None,
# container_type='docker',
# extra_container_flags='',
# ):
# def run_python(
# target,
# target_mount_dir='target',
# mounts=tuple(),
# docker_image='python:3',
# **kwargs
# ):
# def make_python_command(
# target,
# python_cmd='python',
# ):
#
# Path: doodad/darchive/archive_builder_docker.py
# THIS_FILE_DIR = os.path.dirname(__file__)
# MAKESELF_PATH = os.path.join(THIS_FILE_DIR, 'makeself.sh')
# MAKESELF_HEADER_PATH = os.path.join(THIS_FILE_DIR, 'makeself-header.sh')
# BEGIN_HEADER = '--- BEGIN DAR OUTPUT ---'
# DAR_PAYLOAD_MOUNT = 'dar_payload'
# FINAL_SCRIPT = './final_script.sh'
# def build_archive(archive_filename='runfile.dar',
# docker_image='ubuntu:18.04',
# singularity_image=None,
# container_type='docker',
# extra_container_flags='',
# payload_script='',
# mounts=(),
# use_gpu_image=False,
# verbose=False):
# def write_metadata(arch_dir):
# def write_docker_hook(
# arch_dir, image_name, mounts, script_name,
# extra_flags='',
# verbose=False, use_nvidia_docker=False):
# def write_singularity_hook(arch_dir, image_name, mounts,
# script_name,
# extra_flags='',
# verbose=False, use_nvidia_docker=False):
# def create_bind_flag(mnt):
# def write_run_script(arch_dir, mounts, payload_script, verbose=False):
# def compile_archive(archive_dir, output_file, script_name, verbose=False):
# def run_archive(filename, cli_args='', encoding='utf-8', shell_interpreter='sh', timeout=None, get_output=True):
# def _strip_stdout(output):
# def temp_archive_file():
. Output only the next line. | target_mount = mount.MountLocal(local_dir=target_dir, mount_point=target_mount_dir) |
Given snippet: <|code_start|>def chunker(sweeper, num_chunks=10, confirm=True):
chunks = [ [] for _ in range(num_chunks) ]
print('computing chunks')
configs = [config for config in sweeper]
random.shuffle(configs, random.random)
for i, config in enumerate(configs):
chunks[i % num_chunks].append(config)
print('num chunks: ', num_chunks)
print('chunk sizes: ', [len(chunk) for chunk in chunks])
print('total jobs: ', sum([len(chunk) for chunk in chunks]))
resp = 'y'
if confirm:
print('continue?(y/n)')
resp = str(input())
if resp == 'y':
return chunks
else:
return []
def run_sweep_doodad(target, params, run_mode, mounts, test_one=False, docker_image='python:3', return_output=False, verbose=False):
# build archive
target_dir = os.path.dirname(target)
target_mount_dir = os.path.join('target', os.path.basename(target_dir))
target_mount = mount.MountLocal(local_dir=target_dir, mount_point=target_mount_dir)
mounts = list(mounts) + [target_mount]
target_full_path = os.path.join(target_mount.mount_point, os.path.basename(target))
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import math
import os
import itertools
import multiprocessing
import random
import hashlib
import doodad
from datetime import datetime
from doodad import mount
from doodad.launch import launch_api
from doodad.darchive import archive_builder_docker as archive_builder
and context:
# Path: doodad/mount.py
# class Mount(object):
# class MountLocal(Mount):
# class MountGit(Mount):
# class MountS3(Mount):
# class MountGCP(Mount):
# def __init__(self, mount_point=None, pythonpath=False, output=False):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def writeable(self):
# def name(self):
# def __str__(self):
# def __init__(self, local_dir, mount_point=None, cleanup=True,
# filter_ext=('.pyc', '.log', '.git', '.mp4'),
# filter_dir=('data', '.git'),
# **kwargs):
# def ignore_patterns(self, dirname, contents):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __str__(self):
# def docker_mount_dir(self):
# def __init__(self, git_url, branch=None,
# ssh_identity=None, **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __init__(self,
# s3_path,
# sync_interval=15,
# output=True,
# dry=False,
# include_types=('*.txt', '*.csv', '*.json', '*.gz', '*.tar', '*.log', '*.pkl'),
# **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __init__(self,
# gcp_path=None,
# sync_interval=15,
# output=True,
# dry=False,
# exclude_regex='*.tmp',
# **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
#
# Path: doodad/launch/launch_api.py
# def run_command(
# command,
# cli_args=None,
# mode=launch_mode.LocalMode(),
# mounts=tuple(),
# return_output=False,
# verbose=False,
# docker_image='ubuntu:18.04',
# singularity_image=None,
# container_type='docker',
# extra_container_flags='',
# ):
# def run_python(
# target,
# target_mount_dir='target',
# mounts=tuple(),
# docker_image='python:3',
# **kwargs
# ):
# def make_python_command(
# target,
# python_cmd='python',
# ):
#
# Path: doodad/darchive/archive_builder_docker.py
# THIS_FILE_DIR = os.path.dirname(__file__)
# MAKESELF_PATH = os.path.join(THIS_FILE_DIR, 'makeself.sh')
# MAKESELF_HEADER_PATH = os.path.join(THIS_FILE_DIR, 'makeself-header.sh')
# BEGIN_HEADER = '--- BEGIN DAR OUTPUT ---'
# DAR_PAYLOAD_MOUNT = 'dar_payload'
# FINAL_SCRIPT = './final_script.sh'
# def build_archive(archive_filename='runfile.dar',
# docker_image='ubuntu:18.04',
# singularity_image=None,
# container_type='docker',
# extra_container_flags='',
# payload_script='',
# mounts=(),
# use_gpu_image=False,
# verbose=False):
# def write_metadata(arch_dir):
# def write_docker_hook(
# arch_dir, image_name, mounts, script_name,
# extra_flags='',
# verbose=False, use_nvidia_docker=False):
# def write_singularity_hook(arch_dir, image_name, mounts,
# script_name,
# extra_flags='',
# verbose=False, use_nvidia_docker=False):
# def create_bind_flag(mnt):
# def write_run_script(arch_dir, mounts, payload_script, verbose=False):
# def compile_archive(archive_dir, output_file, script_name, verbose=False):
# def run_archive(filename, cli_args='', encoding='utf-8', shell_interpreter='sh', timeout=None, get_output=True):
# def _strip_stdout(output):
# def temp_archive_file():
which might include code, classes, or functions. Output only the next line. | command = launch_api.make_python_command( |
Next line prediction: <|code_start|> print('num chunks: ', num_chunks)
print('chunk sizes: ', [len(chunk) for chunk in chunks])
print('total jobs: ', sum([len(chunk) for chunk in chunks]))
resp = 'y'
if confirm:
print('continue?(y/n)')
resp = str(input())
if resp == 'y':
return chunks
else:
return []
def run_sweep_doodad(target, params, run_mode, mounts, test_one=False, docker_image='python:3', return_output=False, verbose=False):
# build archive
target_dir = os.path.dirname(target)
target_mount_dir = os.path.join('target', os.path.basename(target_dir))
target_mount = mount.MountLocal(local_dir=target_dir, mount_point=target_mount_dir)
mounts = list(mounts) + [target_mount]
target_full_path = os.path.join(target_mount.mount_point, os.path.basename(target))
command = launch_api.make_python_command(
target_full_path
)
print('Launching jobs with mode %s' % run_mode)
results = []
njobs = 0
<|code_end|>
. Use current file imports:
(import math
import os
import itertools
import multiprocessing
import random
import hashlib
import doodad
from datetime import datetime
from doodad import mount
from doodad.launch import launch_api
from doodad.darchive import archive_builder_docker as archive_builder)
and context including class names, function names, or small code snippets from other files:
# Path: doodad/mount.py
# class Mount(object):
# class MountLocal(Mount):
# class MountGit(Mount):
# class MountS3(Mount):
# class MountGCP(Mount):
# def __init__(self, mount_point=None, pythonpath=False, output=False):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def writeable(self):
# def name(self):
# def __str__(self):
# def __init__(self, local_dir, mount_point=None, cleanup=True,
# filter_ext=('.pyc', '.log', '.git', '.mp4'),
# filter_dir=('data', '.git'),
# **kwargs):
# def ignore_patterns(self, dirname, contents):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __str__(self):
# def docker_mount_dir(self):
# def __init__(self, git_url, branch=None,
# ssh_identity=None, **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __init__(self,
# s3_path,
# sync_interval=15,
# output=True,
# dry=False,
# include_types=('*.txt', '*.csv', '*.json', '*.gz', '*.tar', '*.log', '*.pkl'),
# **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
# def __init__(self,
# gcp_path=None,
# sync_interval=15,
# output=True,
# dry=False,
# exclude_regex='*.tmp',
# **kwargs):
# def dar_build_archive(self, deps_dir):
# def dar_extract_command(self):
#
# Path: doodad/launch/launch_api.py
# def run_command(
# command,
# cli_args=None,
# mode=launch_mode.LocalMode(),
# mounts=tuple(),
# return_output=False,
# verbose=False,
# docker_image='ubuntu:18.04',
# singularity_image=None,
# container_type='docker',
# extra_container_flags='',
# ):
# def run_python(
# target,
# target_mount_dir='target',
# mounts=tuple(),
# docker_image='python:3',
# **kwargs
# ):
# def make_python_command(
# target,
# python_cmd='python',
# ):
#
# Path: doodad/darchive/archive_builder_docker.py
# THIS_FILE_DIR = os.path.dirname(__file__)
# MAKESELF_PATH = os.path.join(THIS_FILE_DIR, 'makeself.sh')
# MAKESELF_HEADER_PATH = os.path.join(THIS_FILE_DIR, 'makeself-header.sh')
# BEGIN_HEADER = '--- BEGIN DAR OUTPUT ---'
# DAR_PAYLOAD_MOUNT = 'dar_payload'
# FINAL_SCRIPT = './final_script.sh'
# def build_archive(archive_filename='runfile.dar',
# docker_image='ubuntu:18.04',
# singularity_image=None,
# container_type='docker',
# extra_container_flags='',
# payload_script='',
# mounts=(),
# use_gpu_image=False,
# verbose=False):
# def write_metadata(arch_dir):
# def write_docker_hook(
# arch_dir, image_name, mounts, script_name,
# extra_flags='',
# verbose=False, use_nvidia_docker=False):
# def write_singularity_hook(arch_dir, image_name, mounts,
# script_name,
# extra_flags='',
# verbose=False, use_nvidia_docker=False):
# def create_bind_flag(mnt):
# def write_run_script(arch_dir, mounts, payload_script, verbose=False):
# def compile_archive(archive_dir, output_file, script_name, verbose=False):
# def run_archive(filename, cli_args='', encoding='utf-8', shell_interpreter='sh', timeout=None, get_output=True):
# def _strip_stdout(output):
# def temp_archive_file():
. Output only the next line. | with archive_builder.temp_archive_file() as archive_file: |
Using the snippet: <|code_start|> verbose=verbose,
use_nvidia_docker=use_gpu_image)
elif container_type == 'docker':
write_docker_hook(archive_dir, docker_image, mounts,
extra_flags=extra_container_flags,
script_name=FINAL_SCRIPT,
verbose=verbose,
use_nvidia_docker=use_gpu_image)
else:
raise NotImplementedError()
write_metadata(archive_dir)
# create the self-extracting archive
compile_archive(archive_dir, archive_filename, FINAL_SCRIPT,
verbose=verbose)
finally:
shutil.rmtree(work_dir)
return archive_filename
def write_metadata(arch_dir):
with open(os.path.join(arch_dir, 'METADATA'), 'w') as f:
f.write('doodad_version=%s\n' % doodad.__version__)
f.write('unix_timestamp=%d\n' % time.time())
f.write('uuid=%s\n' % uuid.uuid4())
def write_docker_hook(
arch_dir, image_name, mounts, script_name,
extra_flags='',
verbose=False, use_nvidia_docker=False):
docker_hook_file = os.path.join(arch_dir, script_name)
<|code_end|>
, determine the next line of code. You have imports:
import os
import pathlib
import sys
import tempfile
import shutil
import time
import subprocess
import uuid
import contextlib
import uuid
import doodad
from doodad.utils import cmd_builder
and context (class names, function names, or code) available:
# Path: doodad/utils/cmd_builder.py
# class CommandBuilder(object):
# def __init__(self):
# def append(self, cmd, *args):
# def echo(self, msg):
# def to_string(self, separator=';'):
# def __str__(self):
# def __iter__(self):
# def dump_script(self):
. Output only the next line. | builder = cmd_builder.CommandBuilder() |
Given snippet: <|code_start|>
if atk_check.result.success:
hit_event = events.HitEvent(**atk_event.kwargs)
hit_event.def_mod = atk_check.result.dos
event.dispatch(hit_event)
else:
event.dispatch(events.MessageEvent("You missed!"))
@event.event_handler(events.HitEvent)
def hit_handler(self, hit_event):
def_check = events.SkillCheckEvent(
hit_event.defender, 'melee.shield')
event.dispatch(def_check)
if def_check.result.success:
event.dispatch(events.MessageEvent("It blocked your attack!"))
else:
dmg = self._roll_damage(hit_event.entity, hit_event.def_mod)
dmg_event = events.DamageEvent(**hit_event.kwargs)
dmg_event.dmg.bludgeoning = dmg
event.dispatch(dmg_event)
def _roll_damage(self, attacker, attack_dos):
# TODO: Actually get attributes; for now, fudge it and pretend Str=10
strength = 10
n = max(1, int(strength/5)-1)
m = 6 + 4 * n
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from roglick.engine.ecs import SystemBase, exceptions
from roglick.engine import event, random
from roglick import events
and context:
# Path: roglick/engine/ecs/exceptions.py
# class NoComponentForEntityError(Exception):
# class NotAComponentError(Exception):
# def __init__(self, entity, component_type):
# def __init__(self, obj):
#
# Path: roglick/engine/ecs/base.py
# class SystemBase(object):
# """Systems are the workhorses of the ECS architecture.
#
# A System takes care of the actual execution of game logic, each one being
# responsible for implementing a concrete subset of the game's overall rules.
# """
# def __init__(self):
# self._entity_manager = None
# self._world = None
#
# def set_entity_manager(self, entity_manager):
# self._entity_manager = entity_manager
#
# def set_world_manager(self, world_manager):
# self._world = world_manager
#
# def execute(self):
# """Execute this system on its EntityManager."""
# pass
#
# Path: roglick/engine/event.py
# class Event(object):
# def __init__(self, entity=None):
# def stop(self):
# def propagate(self):
# def __repr__(self):
# def register(handler, event_class=None):
# def dispatch(event):
# def event_handler(*events):
# def decorator(meth):
#
# Path: roglick/engine/random.py
# class Random(object):
# def __init__(self, seed=None, generator=None):
# def generator(self):
# def get_int(self, min=None, max=None):
# def flip_coin(self):
# def roll_dice(self, num=1, sides=6):
# def one_in(self, odds):
# def probability(self, prob):
# def choice(self, choices):
# def shuffle(self, deck):
# def get_int(*args, **kwargs):
# def flip_coin():
# def roll_dice(*args, **kwargs):
# def one_in(*args, **kwargs):
# def probability(*args, **kwargs):
# def choice(*args, **kwargs):
# def shuffle(*args, **kwargs):
#
# Path: roglick.py
which might include code, classes, or functions. Output only the next line. | dmg = random.roll_dice(n) + strength - m |
Here is a snippet: <|code_start|> self._name = name
self._key = key
self._attr = attr
self._parent = parent
@property
def name(self):
return self._name
@property
def key(self):
return self._key
@property
def attr(self):
return self._attr
@property
def parent(self):
if self._parent is None:
return None
else:
return skill_tree[self._parent]
def __repr__(self):
return "{cls}({skill})".format(
cls=self.__class__.__name__,
skill=self.name)
<|code_end|>
. Write the next line using the current file imports:
from roglick.engine import file_obj
and context from other files:
# Path: roglick/engine/file_obj.py
# class FileObj(object):
# class MultiFileObj(FileObj):
# def __init__(self, conf_file, obj_key=None):
# def keys(self):
# def random(self, rand=None):
# def _load_file(self, conf_file, obj_key=None):
# def _make_key(self, item):
# def _process_data(self, data):
# def _process_item(self, item):
# def __getitem__(self, key):
# def __getattr__(self, key):
# def __len__(self):
# def __iter__(self):
# def __contains__(self, item):
# def _load_file(self, file_pattern, obj_key=None):
, which may include functions, classes, or code. Output only the next line. | class SkillTree(file_obj.FileObj): |
Next line prediction: <|code_start|>
class EntityManager(object):
"""The EntityManager is responsible for creating and maintaining Entities
It accomplishes its job by creating monotonically increasing Entity IDs, as
well as maintaining a list of all the Component managers which, in turn,
keep track of all the Components.
"""
def __init__(self):
self._next_eid = 0
self._components = {}
# Since it's common enough, we'll go ahead and create an Entity for the
# player's character right away; no harm if it's not used
self._pc = self.create_entity()
@property
def pc(self):
return self._pc
def create_entity(self):
"""Creates and returns a new Entity."""
<|code_end|>
. Use current file imports:
(from .base import Entity
from .component import ComponentBase
from .exceptions import NoComponentForEntityError, NotAComponentError)
and context including class names, function names, or small code snippets from other files:
# Path: roglick/engine/ecs/base.py
# class Entity(object):
# """An Entity is really little more than a lookup key for Components"""
# __slots__ = ("_eid",)
#
# def __init__(self, eid):
# self._eid = eid
#
# def __repr__(self):
# return "{name}({eid})".format(
# name=type(self).__name__,
# eid=self._eid)
#
# def __hash__(self):
# return self._eid
#
# def __eq__(self, rhs):
# return hash(self) == hash(rhs)
#
# Path: roglick/engine/ecs/component.py
# class ComponentBase(object, metaclass=ComponentMeta):
# """Base class for Components to inherit from.
#
# Components should primarily just be data containers; logic should live
# elsewhere, mostly in Systems."""
# __slots__ = ()
# _properties = ()
#
# def __init__(self, *args, **kwargs):
# """A generic init method for initializing properties.
#
# Properties can be set on initialization via either positional or
# keyword arguments. The _properties property should be a tuple of
# 2-tuples in the form (k,v), where k is the property name and v is the
# property's default value; positional initialization follows the same
# order as _properties.
# """
# # Start by initializing our properties to default values
# for k,v in self._properties:
# setattr(self, k, v)
#
# # For any positional arguments, assign those values to our properties
# # This is done in order of our __slots__ property
# for k,v in zip(self.__slots__, args):
# setattr(self, k, v)
# # For any keyword arguments, assign those values to our properties
# # Keywords must of course match one of our properties
# for k in kwargs:
# setattr(self, k, kwargs[k])
#
# def __repr__(self):
# values = []
# for k, default in self._properties:
# values.append("{k}={v}".format(k=k, v=getattr(self, k)))
#
# values = ", ".join(values)
#
# return "{cls}({values})".format(
# cls = self.__class__.__name__,
# values = values)
#
# Path: roglick/engine/ecs/exceptions.py
# class NoComponentForEntityError(Exception):
# """Exception raised when an Entity does not have this Component"""
# def __init__(self, entity, component_type):
# super().__init__("{entity} has no {component_type}".format(
# entity=entity, component_type=component_type))
#
# class NotAComponentError(Exception):
# """Exception raised when using a component that doesn't subclass Component
# """
# def __init__(self, obj):
# super().__init__("{obj} is of type {kind} instead of Component".format(
# obj=obj, kind=type(obj).__name__))
. Output only the next line. | entity = Entity(self._next_eid) |
Given the following code snippet before the placeholder: <|code_start|> @property
def pc(self):
return self._pc
def create_entity(self):
"""Creates and returns a new Entity."""
entity = Entity(self._next_eid)
self._next_eid += 1
return entity
def destroy_entity(self, entity):
"""Remove an Entity and all its Components.
Since our only storage of Entities is as dictionary keys, it's
sufficient to remove all Components for the Entity; if an Entity
doesn't index any Components, for all intents and purposes it doesn't
exist.
"""
# We can't iterate directly over the dict, as we're modifying it
ctypes = self._components.keys()
for ctype in ctypes:
# We already have logic to do this, so be DRY
self.remove_component(entity, ctype)
def set_component(self, entity, component):
"""Set the given Component for the Entity.
This method will either add a new Component, or overwrite an existing
one.
"""
<|code_end|>
, predict the next line using imports from the current file:
from .base import Entity
from .component import ComponentBase
from .exceptions import NoComponentForEntityError, NotAComponentError
and context including class names, function names, and sometimes code from other files:
# Path: roglick/engine/ecs/base.py
# class Entity(object):
# """An Entity is really little more than a lookup key for Components"""
# __slots__ = ("_eid",)
#
# def __init__(self, eid):
# self._eid = eid
#
# def __repr__(self):
# return "{name}({eid})".format(
# name=type(self).__name__,
# eid=self._eid)
#
# def __hash__(self):
# return self._eid
#
# def __eq__(self, rhs):
# return hash(self) == hash(rhs)
#
# Path: roglick/engine/ecs/component.py
# class ComponentBase(object, metaclass=ComponentMeta):
# """Base class for Components to inherit from.
#
# Components should primarily just be data containers; logic should live
# elsewhere, mostly in Systems."""
# __slots__ = ()
# _properties = ()
#
# def __init__(self, *args, **kwargs):
# """A generic init method for initializing properties.
#
# Properties can be set on initialization via either positional or
# keyword arguments. The _properties property should be a tuple of
# 2-tuples in the form (k,v), where k is the property name and v is the
# property's default value; positional initialization follows the same
# order as _properties.
# """
# # Start by initializing our properties to default values
# for k,v in self._properties:
# setattr(self, k, v)
#
# # For any positional arguments, assign those values to our properties
# # This is done in order of our __slots__ property
# for k,v in zip(self.__slots__, args):
# setattr(self, k, v)
# # For any keyword arguments, assign those values to our properties
# # Keywords must of course match one of our properties
# for k in kwargs:
# setattr(self, k, kwargs[k])
#
# def __repr__(self):
# values = []
# for k, default in self._properties:
# values.append("{k}={v}".format(k=k, v=getattr(self, k)))
#
# values = ", ".join(values)
#
# return "{cls}({values})".format(
# cls = self.__class__.__name__,
# values = values)
#
# Path: roglick/engine/ecs/exceptions.py
# class NoComponentForEntityError(Exception):
# """Exception raised when an Entity does not have this Component"""
# def __init__(self, entity, component_type):
# super().__init__("{entity} has no {component_type}".format(
# entity=entity, component_type=component_type))
#
# class NotAComponentError(Exception):
# """Exception raised when using a component that doesn't subclass Component
# """
# def __init__(self, obj):
# super().__init__("{obj} is of type {kind} instead of Component".format(
# obj=obj, kind=type(obj).__name__))
. Output only the next line. | if not isinstance(component, ComponentBase): |
Given the code snippet: <|code_start|> exist.
"""
# We can't iterate directly over the dict, as we're modifying it
ctypes = self._components.keys()
for ctype in ctypes:
# We already have logic to do this, so be DRY
self.remove_component(entity, ctype)
def set_component(self, entity, component):
"""Set the given Component for the Entity.
This method will either add a new Component, or overwrite an existing
one.
"""
if not isinstance(component, ComponentBase):
# Maybe an unnecessary throwback to my C++ version...
raise NotAComponentError(component)
ctype = type(component)
# We need a dictionary for each type we store, so make sure we have one
if ctype not in self._components:
self._components[ctype] = {}
self._components[ctype][entity] = component
def get_component(self, entity, component_type):
"""Returns the Entity's Component or raises NoComponentForENtityError"""
try:
return self._components[component_type][entity]
except KeyError:
<|code_end|>
, generate the next line using the imports in this file:
from .base import Entity
from .component import ComponentBase
from .exceptions import NoComponentForEntityError, NotAComponentError
and context (functions, classes, or occasionally code) from other files:
# Path: roglick/engine/ecs/base.py
# class Entity(object):
# """An Entity is really little more than a lookup key for Components"""
# __slots__ = ("_eid",)
#
# def __init__(self, eid):
# self._eid = eid
#
# def __repr__(self):
# return "{name}({eid})".format(
# name=type(self).__name__,
# eid=self._eid)
#
# def __hash__(self):
# return self._eid
#
# def __eq__(self, rhs):
# return hash(self) == hash(rhs)
#
# Path: roglick/engine/ecs/component.py
# class ComponentBase(object, metaclass=ComponentMeta):
# """Base class for Components to inherit from.
#
# Components should primarily just be data containers; logic should live
# elsewhere, mostly in Systems."""
# __slots__ = ()
# _properties = ()
#
# def __init__(self, *args, **kwargs):
# """A generic init method for initializing properties.
#
# Properties can be set on initialization via either positional or
# keyword arguments. The _properties property should be a tuple of
# 2-tuples in the form (k,v), where k is the property name and v is the
# property's default value; positional initialization follows the same
# order as _properties.
# """
# # Start by initializing our properties to default values
# for k,v in self._properties:
# setattr(self, k, v)
#
# # For any positional arguments, assign those values to our properties
# # This is done in order of our __slots__ property
# for k,v in zip(self.__slots__, args):
# setattr(self, k, v)
# # For any keyword arguments, assign those values to our properties
# # Keywords must of course match one of our properties
# for k in kwargs:
# setattr(self, k, kwargs[k])
#
# def __repr__(self):
# values = []
# for k, default in self._properties:
# values.append("{k}={v}".format(k=k, v=getattr(self, k)))
#
# values = ", ".join(values)
#
# return "{cls}({values})".format(
# cls = self.__class__.__name__,
# values = values)
#
# Path: roglick/engine/ecs/exceptions.py
# class NoComponentForEntityError(Exception):
# """Exception raised when an Entity does not have this Component"""
# def __init__(self, entity, component_type):
# super().__init__("{entity} has no {component_type}".format(
# entity=entity, component_type=component_type))
#
# class NotAComponentError(Exception):
# """Exception raised when using a component that doesn't subclass Component
# """
# def __init__(self, obj):
# super().__init__("{obj} is of type {kind} instead of Component".format(
# obj=obj, kind=type(obj).__name__))
. Output only the next line. | raise NoComponentForEntityError(entity, component_type) |
Continue the code snippet: <|code_start|> return self._pc
def create_entity(self):
"""Creates and returns a new Entity."""
entity = Entity(self._next_eid)
self._next_eid += 1
return entity
def destroy_entity(self, entity):
"""Remove an Entity and all its Components.
Since our only storage of Entities is as dictionary keys, it's
sufficient to remove all Components for the Entity; if an Entity
doesn't index any Components, for all intents and purposes it doesn't
exist.
"""
# We can't iterate directly over the dict, as we're modifying it
ctypes = self._components.keys()
for ctype in ctypes:
# We already have logic to do this, so be DRY
self.remove_component(entity, ctype)
def set_component(self, entity, component):
"""Set the given Component for the Entity.
This method will either add a new Component, or overwrite an existing
one.
"""
if not isinstance(component, ComponentBase):
# Maybe an unnecessary throwback to my C++ version...
<|code_end|>
. Use current file imports:
from .base import Entity
from .component import ComponentBase
from .exceptions import NoComponentForEntityError, NotAComponentError
and context (classes, functions, or code) from other files:
# Path: roglick/engine/ecs/base.py
# class Entity(object):
# """An Entity is really little more than a lookup key for Components"""
# __slots__ = ("_eid",)
#
# def __init__(self, eid):
# self._eid = eid
#
# def __repr__(self):
# return "{name}({eid})".format(
# name=type(self).__name__,
# eid=self._eid)
#
# def __hash__(self):
# return self._eid
#
# def __eq__(self, rhs):
# return hash(self) == hash(rhs)
#
# Path: roglick/engine/ecs/component.py
# class ComponentBase(object, metaclass=ComponentMeta):
# """Base class for Components to inherit from.
#
# Components should primarily just be data containers; logic should live
# elsewhere, mostly in Systems."""
# __slots__ = ()
# _properties = ()
#
# def __init__(self, *args, **kwargs):
# """A generic init method for initializing properties.
#
# Properties can be set on initialization via either positional or
# keyword arguments. The _properties property should be a tuple of
# 2-tuples in the form (k,v), where k is the property name and v is the
# property's default value; positional initialization follows the same
# order as _properties.
# """
# # Start by initializing our properties to default values
# for k,v in self._properties:
# setattr(self, k, v)
#
# # For any positional arguments, assign those values to our properties
# # This is done in order of our __slots__ property
# for k,v in zip(self.__slots__, args):
# setattr(self, k, v)
# # For any keyword arguments, assign those values to our properties
# # Keywords must of course match one of our properties
# for k in kwargs:
# setattr(self, k, kwargs[k])
#
# def __repr__(self):
# values = []
# for k, default in self._properties:
# values.append("{k}={v}".format(k=k, v=getattr(self, k)))
#
# values = ", ".join(values)
#
# return "{cls}({values})".format(
# cls = self.__class__.__name__,
# values = values)
#
# Path: roglick/engine/ecs/exceptions.py
# class NoComponentForEntityError(Exception):
# """Exception raised when an Entity does not have this Component"""
# def __init__(self, entity, component_type):
# super().__init__("{entity} has no {component_type}".format(
# entity=entity, component_type=component_type))
#
# class NotAComponentError(Exception):
# """Exception raised when using a component that doesn't subclass Component
# """
# def __init__(self, obj):
# super().__init__("{obj} is of type {kind} instead of Component".format(
# obj=obj, kind=type(obj).__name__))
. Output only the next line. | raise NotAComponentError(component) |
Continue the code snippet: <|code_start|>
class FileObj(object):
def __init__(self, conf_file, obj_key=None):
self._data = {}
self._load_file(conf_file, obj_key)
def keys(self):
return self._data.keys()
<|code_end|>
. Use current file imports:
import json
from glob import glob
from roglick.engine import random
and context (classes, functions, or code) from other files:
# Path: roglick/engine/random.py
# class Random(object):
# def __init__(self, seed=None, generator=None):
# def generator(self):
# def get_int(self, min=None, max=None):
# def flip_coin(self):
# def roll_dice(self, num=1, sides=6):
# def one_in(self, odds):
# def probability(self, prob):
# def choice(self, choices):
# def shuffle(self, deck):
# def get_int(*args, **kwargs):
# def flip_coin():
# def roll_dice(*args, **kwargs):
# def one_in(*args, **kwargs):
# def probability(*args, **kwargs):
# def choice(*args, **kwargs):
# def shuffle(*args, **kwargs):
. Output only the next line. | def random(self, rand=None): |
Given the following code snippet before the placeholder: <|code_start|>
class Mob(file_obj.MultiFileObj):
def __init__(self):
super().__init__('data/mobs/*.json')
def _process_item(self, item):
<|code_end|>
, predict the next line using imports from the current file:
from roglick.engine import colors,file_obj
and context including class names, function names, and sometimes code from other files:
# Path: roglick/engine/colors.py
# class RGBBlendedColor(libtcod.Color):
# class HSVBlendedColor(libtcod.Color):
# def __init__(self, color1, color2, blend=0.50):
# def __init__(self, color1, color2, blend=0.50):
# def RGBtoHSV(r, g, b):
# def HSVtoRGB(h, s, v):
#
# Path: roglick/engine/file_obj.py
# class FileObj(object):
# class MultiFileObj(FileObj):
# def __init__(self, conf_file, obj_key=None):
# def keys(self):
# def random(self, rand=None):
# def _load_file(self, conf_file, obj_key=None):
# def _make_key(self, item):
# def _process_data(self, data):
# def _process_item(self, item):
# def __getitem__(self, key):
# def __getattr__(self, key):
# def __len__(self):
# def __iter__(self):
# def __contains__(self, item):
# def _load_file(self, file_pattern, obj_key=None):
. Output only the next line. | item['sprite']['color'] = getattr(colors, item['sprite']['color']) |
Predict the next line for this snippet: <|code_start|>
class Terrain(file_obj.FileObj):
def __init__(self, key):
super().__init__('data/terrain.json', key)
def _process_item(self, item):
<|code_end|>
with the help of current file imports:
from roglick.engine import colors,file_obj
and context from other files:
# Path: roglick/engine/colors.py
# class RGBBlendedColor(libtcod.Color):
# class HSVBlendedColor(libtcod.Color):
# def __init__(self, color1, color2, blend=0.50):
# def __init__(self, color1, color2, blend=0.50):
# def RGBtoHSV(r, g, b):
# def HSVtoRGB(h, s, v):
#
# Path: roglick/engine/file_obj.py
# class FileObj(object):
# class MultiFileObj(FileObj):
# def __init__(self, conf_file, obj_key=None):
# def keys(self):
# def random(self, rand=None):
# def _load_file(self, conf_file, obj_key=None):
# def _make_key(self, item):
# def _process_data(self, data):
# def _process_item(self, item):
# def __getitem__(self, key):
# def __getattr__(self, key):
# def __len__(self):
# def __iter__(self):
# def __contains__(self, item):
# def _load_file(self, file_pattern, obj_key=None):
, which may contain function names, class names, or code. Output only the next line. | item['color'] = getattr(colors, item['color']) |
Continue the code snippet: <|code_start|>
class PositionComponent(ComponentBase):
_properties = (('x', 0), ('y', 0))
class SpriteComponent(ComponentBase):
<|code_end|>
. Use current file imports:
from roglick.engine import colors
from roglick.engine.ecs import ComponentBase
and context (classes, functions, or code) from other files:
# Path: roglick/engine/colors.py
# class RGBBlendedColor(libtcod.Color):
# class HSVBlendedColor(libtcod.Color):
# def __init__(self, color1, color2, blend=0.50):
# def __init__(self, color1, color2, blend=0.50):
# def RGBtoHSV(r, g, b):
# def HSVtoRGB(h, s, v):
#
# Path: roglick/engine/ecs/component.py
# class ComponentBase(object, metaclass=ComponentMeta):
# """Base class for Components to inherit from.
#
# Components should primarily just be data containers; logic should live
# elsewhere, mostly in Systems."""
# __slots__ = ()
# _properties = ()
#
# def __init__(self, *args, **kwargs):
# """A generic init method for initializing properties.
#
# Properties can be set on initialization via either positional or
# keyword arguments. The _properties property should be a tuple of
# 2-tuples in the form (k,v), where k is the property name and v is the
# property's default value; positional initialization follows the same
# order as _properties.
# """
# # Start by initializing our properties to default values
# for k,v in self._properties:
# setattr(self, k, v)
#
# # For any positional arguments, assign those values to our properties
# # This is done in order of our __slots__ property
# for k,v in zip(self.__slots__, args):
# setattr(self, k, v)
# # For any keyword arguments, assign those values to our properties
# # Keywords must of course match one of our properties
# for k in kwargs:
# setattr(self, k, kwargs[k])
#
# def __repr__(self):
# values = []
# for k, default in self._properties:
# values.append("{k}={v}".format(k=k, v=getattr(self, k)))
#
# values = ", ".join(values)
#
# return "{cls}({values})".format(
# cls = self.__class__.__name__,
# values = values)
. Output only the next line. | _properties = (('glyph', ' '), ('color', colors.white)) |
Given the code snippet: <|code_start|>
def smoothstep(a, b, x):
"""Basic S-curve interpolation function.
Based on reference implementation available at
https://en.wikipedia.org/wiki/Smoothstep
"""
x = clamp((x - a)/(b - a), 0.0, 1.0)
return x*x*(3 - 2*x)
def smootherstep(a, b, x):
"""Improved S-curve interpolation function.
Based on reference implementation of the improved algorithm proposed by
Ken Perlin that is available at https://en.wikipedia.org/wiki/Smoothstep
"""
x = clamp((x - a)/(b - a), 0.0, 1.0)
return x*x*x*(x*(x*6 - 15) + 10);
def lerp(a, b, x):
"""Linear interpolation function."""
return a + x * (b - a)
class PerlinNoise2D(object):
def __init__(self, seed=None):
self.p = [x for x in range(256)]
if seed is None:
<|code_end|>
, generate the next line using the imports in this file:
from roglick.engine import random
from roglick.utils import clamp
and context (functions, classes, or occasionally code) from other files:
# Path: roglick/engine/random.py
# class Random(object):
# def __init__(self, seed=None, generator=None):
# def generator(self):
# def get_int(self, min=None, max=None):
# def flip_coin(self):
# def roll_dice(self, num=1, sides=6):
# def one_in(self, odds):
# def probability(self, prob):
# def choice(self, choices):
# def shuffle(self, deck):
# def get_int(*args, **kwargs):
# def flip_coin():
# def roll_dice(*args, **kwargs):
# def one_in(*args, **kwargs):
# def probability(*args, **kwargs):
# def choice(*args, **kwargs):
# def shuffle(*args, **kwargs):
#
# Path: roglick/utils.py
# def clamp(val, min_val, max_val):
# return max(min_val, min(max_val, val))
. Output only the next line. | seed = random.get_int() |
Predict the next line after this snippet: <|code_start|>
def smoothstep(a, b, x):
"""Basic S-curve interpolation function.
Based on reference implementation available at
https://en.wikipedia.org/wiki/Smoothstep
"""
<|code_end|>
using the current file's imports:
from roglick.engine import random
from roglick.utils import clamp
and any relevant context from other files:
# Path: roglick/engine/random.py
# class Random(object):
# def __init__(self, seed=None, generator=None):
# def generator(self):
# def get_int(self, min=None, max=None):
# def flip_coin(self):
# def roll_dice(self, num=1, sides=6):
# def one_in(self, odds):
# def probability(self, prob):
# def choice(self, choices):
# def shuffle(self, deck):
# def get_int(*args, **kwargs):
# def flip_coin():
# def roll_dice(*args, **kwargs):
# def one_in(*args, **kwargs):
# def probability(*args, **kwargs):
# def choice(*args, **kwargs):
# def shuffle(*args, **kwargs):
#
# Path: roglick/utils.py
# def clamp(val, min_val, max_val):
# return max(min_val, min(max_val, val))
. Output only the next line. | x = clamp((x - a)/(b - a), 0.0, 1.0) |
Next line prediction: <|code_start|> 'include_files': [],
'packages': ['asyncio'],
'excludes': ['tkinter']
},
'bdist_mac': {
'iconfile': 'icon-256.icns',
'bundle_name': 'Crazyflie client',
},
},
'executables': [Executable("bin/cfclient", icon='bitcraze.ico')],
}
if platform.system() == 'Darwin':
cxfreeze_options['options']['build_exe']['include_files'] = [
('/usr/local/lib/libusb-1.0.0.dylib', 'libusb.dylib'),
('/usr/local/lib/libSDL2-2.0.0.dylib', 'libSDL2.dylib'),
]
else:
cxfreeze_options = {}
# except:
# pass
if sys.version_info < (3, 7):
raise "must use python 3.7 or greater"
def relative(lst, base=''):
return list(map(lambda x: base + os.path.basename(x), lst))
try:
<|code_end|>
. Use current file imports:
(from setuptools import setup, find_packages
from glob import glob
from gitversion import get_version
from cx_Freeze import setup, Executable # noqa
import json
import codecs
import sys
import os
import platform)
and context including class names, function names, or small code snippets from other files:
# Path: gitversion.py
# def get_version(**kwargs):
# ''' Calculate a valid PEP440 version number based on git history.
#
# If possible the version is computed from the output of ``git describe``.
# If that is successful, the version string is written to the file
# ``RELEASE-VERSION``.
#
# If ``git describe`` fails (most likely because we’re in an unpacked
# copy of an sdist rather than in a git working copy) then we fall back
# on reading the contents of the ``RELEASE-VERSION`` file.
#
# '''
# cached_version = get_cached_version()
# git_version = get_git_version(**kwargs)
#
# if git_version is None:
# if cached_version is None:
# raise RuntimeError('can not determine version number')
# return cached_version
#
# if cached_version != git_version:
# set_cached_version(git_version)
# return git_version
. Output only the next line. | VERSION = get_version() |
Here is a snippet: <|code_start|>
class TestStagDesnityRatio(unittest.TestCase):
"""Unit tests for isentropic.stag_density_ratio"""
def test_still(self):
"""Check that the ratio is 1 when Mach=0."""
self.assertEqual(1, isentropic.stag_density_ratio(0, 1.2))
def test_sonic(self):
"""Check the ratio when Mach=1."""
# Rocket Propulsion Elements, 8th edition, page 59.
self.assertAlmostEqual(1.61, isentropic.stag_density_ratio(1, 1.2), places=2)
class TestVelocity(unittest.TestCase):
"""Unit tests for isentropic.velocity."""
def test_equal_pressure(self):
"""Test that the velocity is the same if the pressure is the same."""
v_1 = 100.
p = 1e6
v_2 = isentropic.velocity(v_1, p_1=p, T_1=300, p_2=p, gamma=1.2, m_molar=20e-3)
self.assertEqual(v_1, v_2)
def test_rpe_3_2(self):
"""Test against example problem 3-2 from Rocket Propulsion Elements."""
v_1 = 0
p_1 = 2.068e6
T_1 = 2222.
gamma = 1.3
<|code_end|>
. Write the next line using the current file imports:
import unittest
from proptools import isentropic
from proptools.constants import R_univ
and context from other files:
# Path: proptools/isentropic.py
# def stag_temperature_ratio(M, gamma):
# def stag_pressure_ratio(M, gamma):
# def stag_density_ratio(M, gamma):
# def velocity(v_1, p_1, T_1, p_2, gamma, m_molar): # pylint: disable=too-many-arguments
#
# Path: proptools/constants.py
, which may include functions, classes, or code. Output only the next line. | m_molar = R_univ / 345.7 |
Using the snippet: <|code_start|>"""Unit tests for solid rocket motor equations."""
class TestBurnAreaRatio(unittest.TestCase):
"""Unit tests for solid.burn_area_ratio."""
def test_ex_12_3(self):
"""Test against example problem 12-3 from Rocket Propulsion Elements."""
gamma = 1.26 # Given ratio of specific heats [units: dimensionless].
rho_solid = 1510. # Given solid propellant density [units: kilogram meter**-3].
p_c = 6.895e6 # Given chamber pressure [units: pascal].
n = 0.5 # Burn rate exponent, guess
a = 2.54e-3 * (p_c)**(-n) # Burn rate coefficient, from given burn rate of
# 0.1 inch second**-1 at 1000 psi [units: meter second**-1 pascal**-n].
c_star = 1209. # Given characteristic velocity [units: meter second**-1].
K_RPE = 1933. / 1.30 # Given burn area ratio [units: dimensionless].
<|code_end|>
, determine the next line of code. You have imports:
import unittest
import numpy as np
from proptools import solid
and context (class names, function names, or code) available:
# Path: proptools/solid.py
# def chamber_pressure(K, a, n, rho_solid, c_star):
# def burn_area_ratio(p_c, a, n, rho_solid, c_star):
# def burn_and_throat_area(F, p_c, p_e, a, n, rho_solid, c_star, gamma):
# def thrust_curve(A_b, x, A_t, A_e, p_a, a, n, rho_solid, c_star, gamma):
# C_F = nozzle.thrust_coef(p_c, p_e, gamma)
# F = nozzle.thrust(A_t, p_c, p_e, gamma, p_a, A_e / A_t)
. Output only the next line. | K = solid.burn_area_ratio(p_c, a, n, rho_solid, c_star) |
Given snippet: <|code_start|>def stag_density_ratio(M, gamma):
"""Stagnation density / static density ratio.
Arguments:
M (scalar): Mach number [units: dimensionless].
gamma (scalar): Heat capacity ratio [units: dimensionless].
Returns:
scalar: the stagnation density ratio :math:`\\rho_0 / \\rho` [units: dimensionless].
"""
return (1 + (gamma - 1) / 2 * M**2)**(1 / (gamma - 1))
def velocity(v_1, p_1, T_1, p_2, gamma, m_molar): # pylint: disable=too-many-arguments
"""Velocity relation between two points in an isentropic flow.
Given the velocity, pressure, and temperature at station 1 and the pressure at station 2,
find the velocity at station 2. See Rocket Propulsion Elements, 8th edition, equation 3-15b.
Arguments:
v_1 (scalar): Velocity at station 1 [units: meter second**-1].
p_1 (scalar): Pressure at station 1 [units: pascal].
T_1 (scalar): Temperature at station 1 [units kelvin].
p_2 (scalar): Pressure at station 2 [units: pascal].
gamma (scalar): Gas ratio of specific heats [units: dimensionless].
m_molar (scalar): Gas mean molar mass [units: kilogram mole**-1].
Returns:
scalar: velocity at station 2 [units: meter second**-1].
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from proptools.constants import R_univ
and context:
# Path: proptools/constants.py
which might include code, classes, or functions. Output only the next line. | return ((2 * gamma) / (gamma - 1) * R_univ * T_1 / m_molar |
Predict the next line for this snippet: <|code_start|>"""Plot the thrust curve of a solid rocket motor with a cylindrical propellant grain."""
# Grain geometry (Clinder with circular port)
r_in = 0.15 # Grain inner radius [units: meter].
r_out = 0.20 # Grain outer radius [units: meter].
length = 1.0 # Grain length [units: meter].
# Propellant properties
gamma = 1.26 # Exhaust gas ratio of specific heats [units: dimensionless].
rho_solid = 1510. # Solid propellant density [units: kilogram meter**-3].
n = 0.5 # Propellant burn rate exponent [units: dimensionless].
a = 2.54e-3 * (6.9e6)**(-n) # Burn rate coefficient, such that the propellant
# burns at 2.54 mm s**-1 at 6.9 MPa [units: meter second**-1 pascal**-n].
c_star = 1209. # Characteristic velocity [units: meter second**-1].
# Nozzle geometry
A_t = 839e-6 # Throat area [units: meter**2].
A_e = 8 * A_t # Exit area [units: meter**2].
p_a = 101e3 # Ambeint pressure during motor firing [units: pascal].
# Burning surface evolution
x = np.linspace(0, r_out - r_in) # Flame front progress steps [units: meter].
A_b = 2 * np.pi * (r_in + x) * length # Burn area at each flame progress step [units: meter**2].
# Compute thrust curve.
<|code_end|>
with the help of current file imports:
from matplotlib import pyplot as plt
from proptools import solid
import numpy as np
and context from other files:
# Path: proptools/solid.py
# def chamber_pressure(K, a, n, rho_solid, c_star):
# def burn_area_ratio(p_c, a, n, rho_solid, c_star):
# def burn_and_throat_area(F, p_c, p_e, a, n, rho_solid, c_star, gamma):
# def thrust_curve(A_b, x, A_t, A_e, p_a, a, n, rho_solid, c_star, gamma):
# C_F = nozzle.thrust_coef(p_c, p_e, gamma)
# F = nozzle.thrust(A_t, p_c, p_e, gamma, p_a, A_e / A_t)
, which may contain function names, class names, or code. Output only the next line. | t, p_c, F = solid.thrust_curve(A_b, x, A_t, A_e, p_a, a, n, rho_solid, c_star, gamma) |
Predict the next line after this snippet: <|code_start|>"""Plot C_F vs altitude."""
p_c = 10e6 # Chamber pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
p_e_1 = 100e3 # Nozzle exit pressure, 1st stage [units: pascal]
<|code_end|>
using the current file's imports:
import numpy as np
import skaero.atmosphere.coesa as atmo
from matplotlib import pyplot as plt
from proptools import nozzle
and any relevant context from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | exp_ratio_1 = nozzle.er_from_p(p_c, p_e_1, gamma) # Nozzle expansion ratio [units: dimensionless] |
Given the code snippet: <|code_start|>"""Find the chamber pressure and thrust of a solid rocket motor."""
# Propellant properties
gamma = 1.26 # Exhaust gas ratio of specific heats [units: dimensionless].
rho_solid = 1510. # Solid propellant density [units: kilogram meter**-3].
n = 0.5 # Propellant burn rate exponent [units: dimensionless].
a = 2.54e-3 * (6.9e6)**(-n) # Burn rate coefficient, such that the propellant
# burns at 2.54 mm s**-1 at 6.9 MPa [units: meter second**-1 pascal**-n].
c_star = 1209. # Characteristic velocity [units: meter second**-1].
# Motor geometry
A_t = 839e-6 # Throat area [units: meter**2].
A_b = 1.25 # Burn area [units: meter**2].
# Nozzle exit pressure [units: pascal].
p_e = 101e3
# Compute the chamber pressure [units: pascal].
<|code_end|>
, generate the next line using the imports in this file:
from proptools import solid, nozzle
and context (functions, classes, or occasionally code) from other files:
# Path: proptools/solid.py
# def chamber_pressure(K, a, n, rho_solid, c_star):
# def burn_area_ratio(p_c, a, n, rho_solid, c_star):
# def burn_and_throat_area(F, p_c, p_e, a, n, rho_solid, c_star, gamma):
# def thrust_curve(A_b, x, A_t, A_e, p_a, a, n, rho_solid, c_star, gamma):
# C_F = nozzle.thrust_coef(p_c, p_e, gamma)
# F = nozzle.thrust(A_t, p_c, p_e, gamma, p_a, A_e / A_t)
#
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | p_c = solid.chamber_pressure(A_b / A_t, a, n, rho_solid, c_star) |
Here is a snippet: <|code_start|>"""Find the chamber pressure and thrust of a solid rocket motor."""
# Propellant properties
gamma = 1.26 # Exhaust gas ratio of specific heats [units: dimensionless].
rho_solid = 1510. # Solid propellant density [units: kilogram meter**-3].
n = 0.5 # Propellant burn rate exponent [units: dimensionless].
a = 2.54e-3 * (6.9e6)**(-n) # Burn rate coefficient, such that the propellant
# burns at 2.54 mm s**-1 at 6.9 MPa [units: meter second**-1 pascal**-n].
c_star = 1209. # Characteristic velocity [units: meter second**-1].
# Motor geometry
A_t = 839e-6 # Throat area [units: meter**2].
A_b = 1.25 # Burn area [units: meter**2].
# Nozzle exit pressure [units: pascal].
p_e = 101e3
# Compute the chamber pressure [units: pascal].
p_c = solid.chamber_pressure(A_b / A_t, a, n, rho_solid, c_star)
# Compute the sea level thrust [units: newton].
<|code_end|>
. Write the next line using the current file imports:
from proptools import solid, nozzle
and context from other files:
# Path: proptools/solid.py
# def chamber_pressure(K, a, n, rho_solid, c_star):
# def burn_area_ratio(p_c, a, n, rho_solid, c_star):
# def burn_and_throat_area(F, p_c, p_e, a, n, rho_solid, c_star, gamma):
# def thrust_curve(A_b, x, A_t, A_e, p_a, a, n, rho_solid, c_star, gamma):
# C_F = nozzle.thrust_coef(p_c, p_e, gamma)
# F = nozzle.thrust(A_t, p_c, p_e, gamma, p_a, A_e / A_t)
#
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
, which may include functions, classes, or code. Output only the next line. | F = nozzle.thrust(A_t, p_c, p_e, gamma) |
Predict the next line after this snippet: <|code_start|> M[i] = y[1]
i += 1
choked = False
if not solver.successful() and abs(solver.y[1] - 1) < 1e-3:
choked = True
return (T_o, M, choked)
def main():
def f_f(x):
return 0
def f_q(x):
return 0
def f_A(x):
return 1 + x
mdot = 100
c_p = 2000
gamma = 1.4
R = c_p * (1 - 1 / gamma)
x = np.linspace(0, 1)
T_o_in = 300
for M_in in [0.2, 0.8, 1.2, 2]:
T_in = T_o_in * (1 + (gamma - 1) / 2 * M_in**2)**-1
v_in = M_in * (gamma * R * T_in)**0.5
rho_in = mdot / (v_in * f_A(0))
p_in = rho_in * R * T_in
p_o_in = p_in * (T_o_in / T_in)**(gamma / (gamma -1))
<|code_end|>
using the current file's imports:
from scipy.misc import derivative
from scipy.integrate import ode
from proptools import nozzle
from matplotlib import pyplot as plt
import numpy as np
and any relevant context from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | A_t = nozzle.throat_area(mdot, p_o_in, T_o_in, gamma, nozzle.R_univ / R) |
Here is a snippet: <|code_start|>
class TestStringMethods(unittest.TestCase):
def test_sample_8_3(self):
# Do sample problem 8-3 from Huzel and Huang.
stress = psi2pascal(38e3)
a = inch2meter(41.0)
b = inch2meter(29.4)
l_c = inch2meter(46.9)
E = psi2pascal(10.4e6)
v = 0.36
weld_eff = 1.0
p_to = psi2pascal(180) # Oxidizer max pressure 180 psi
p_tf = psi2pascal(170) # fuel max pressure 170 psi
rho = 0.101 * lbm2kilogram(1) / inch2meter(1)**3
# knuckle factor K = 0.80
<|code_end|>
. Write the next line using the current file imports:
import unittest
from proptools import tank_structure as ts
from proptools.units import inch2meter, psi2pascal, lbf2newton, lbm2kilogram
and context from other files:
# Path: proptools/tank_structure.py
# def crown_thickness(p_t, R, stress, weld_eff):
# def knuckle_thickness(p_t, a, b, stress, weld_eff):
# def cylinder_thickness(p_t, a, stress, weld_eff):
# def sphere_thickness(p_t, a, stress, weld_eff):
# def max_axial_load(p_t, a, t_c, l_c, E):
# def cylinder_mass(a, t_c, l_c, rho):
# def sphere_mass(a, t, rho):
# def ellipse_mass(a, b, t, rho):
# def cr_ex_press_sphere(a, t, E, v):
# def cr_ex_press_sphere_end(a, t, E):
# def cr_ex_press_ellipse_end(a, b, t, E, C_b=0.05):
# def cr_ex_press_cylinder(a, t_c, l_c, E, v):
# def sphere_volume(a):
# def ellipse_volume(a, b):
# def cylinder_volume(a, l_c):
# def knuckle_factor(ellipse_ratio):
# def ellipse_design_factor(ellipse_ratio):
# K = knuckle_factor(a / b)
#
# Path: proptools/units.py
# def inch2meter(x):
# return x * 0.0254
#
# def psi2pascal(x):
# return x * 6895
#
# def lbf2newton(x):
# return x * 4.448
#
# def lbm2kilogram(x):
# return x * 0.4536
, which may include functions, classes, or code. Output only the next line. | self.assertAlmostEqual(0.8, ts.knuckle_factor(a / b), delta=0.02) |
Here is a snippet: <|code_start|>
class TestStringMethods(unittest.TestCase):
def test_sample_8_3(self):
# Do sample problem 8-3 from Huzel and Huang.
stress = psi2pascal(38e3)
<|code_end|>
. Write the next line using the current file imports:
import unittest
from proptools import tank_structure as ts
from proptools.units import inch2meter, psi2pascal, lbf2newton, lbm2kilogram
and context from other files:
# Path: proptools/tank_structure.py
# def crown_thickness(p_t, R, stress, weld_eff):
# def knuckle_thickness(p_t, a, b, stress, weld_eff):
# def cylinder_thickness(p_t, a, stress, weld_eff):
# def sphere_thickness(p_t, a, stress, weld_eff):
# def max_axial_load(p_t, a, t_c, l_c, E):
# def cylinder_mass(a, t_c, l_c, rho):
# def sphere_mass(a, t, rho):
# def ellipse_mass(a, b, t, rho):
# def cr_ex_press_sphere(a, t, E, v):
# def cr_ex_press_sphere_end(a, t, E):
# def cr_ex_press_ellipse_end(a, b, t, E, C_b=0.05):
# def cr_ex_press_cylinder(a, t_c, l_c, E, v):
# def sphere_volume(a):
# def ellipse_volume(a, b):
# def cylinder_volume(a, l_c):
# def knuckle_factor(ellipse_ratio):
# def ellipse_design_factor(ellipse_ratio):
# K = knuckle_factor(a / b)
#
# Path: proptools/units.py
# def inch2meter(x):
# return x * 0.0254
#
# def psi2pascal(x):
# return x * 6895
#
# def lbf2newton(x):
# return x * 4.448
#
# def lbm2kilogram(x):
# return x * 0.4536
, which may include functions, classes, or code. Output only the next line. | a = inch2meter(41.0) |
Using the snippet: <|code_start|> self.assertAlmostEqual(inch2meter(0.183), ts.cylinder_thickness(
p_tf, a, stress, weld_eff), delta=inch2meter(0.005))
# Ellipse design factor E' = 4.56
self.assertAlmostEqual(4.56, ts.ellipse_design_factor(a / b), delta = 0.005)
# Oxidizer tank end weighs 126.4 lmb
self.assertAlmostEqual(2 * lbm2kilogram(126.4), ts.ellipse_mass(
a, b, inch2meter(0.145), rho), delta=lbm2kilogram(0.2))
# Cylindrical section weighs 223.3 lbm
self.assertAlmostEqual(lbm2kilogram(223.3), ts.cylinder_mass(
a, inch2meter(0.183), l_c, rho), delta=lbm2kilogram(0.2))
# Critical external pressure for ox tank ends = 13.4 psi.
self.assertAlmostEqual(psi2pascal(13.4), ts.cr_ex_press_ellipse_end(
a, b, inch2meter(0.145), E, C_b=0.10), delta=1e3)
# Critical external loading for fuel tank cylinder 10.8 psi
self.assertAlmostEqual(psi2pascal(10.8), ts.cr_ex_press_cylinder(
a, inch2meter(0.183), l_c, E, v), delta=1e3)
def test_sample_8_4(self):
# Do sample problem 8-4 from Huzel and Huang.
a = inch2meter(41.0)
l_c = inch2meter(46.9)
E = psi2pascal(10.4e6)
t_c = inch2meter(0.183)
<|code_end|>
, determine the next line of code. You have imports:
import unittest
from proptools import tank_structure as ts
from proptools.units import inch2meter, psi2pascal, lbf2newton, lbm2kilogram
and context (class names, function names, or code) available:
# Path: proptools/tank_structure.py
# def crown_thickness(p_t, R, stress, weld_eff):
# def knuckle_thickness(p_t, a, b, stress, weld_eff):
# def cylinder_thickness(p_t, a, stress, weld_eff):
# def sphere_thickness(p_t, a, stress, weld_eff):
# def max_axial_load(p_t, a, t_c, l_c, E):
# def cylinder_mass(a, t_c, l_c, rho):
# def sphere_mass(a, t, rho):
# def ellipse_mass(a, b, t, rho):
# def cr_ex_press_sphere(a, t, E, v):
# def cr_ex_press_sphere_end(a, t, E):
# def cr_ex_press_ellipse_end(a, b, t, E, C_b=0.05):
# def cr_ex_press_cylinder(a, t_c, l_c, E, v):
# def sphere_volume(a):
# def ellipse_volume(a, b):
# def cylinder_volume(a, l_c):
# def knuckle_factor(ellipse_ratio):
# def ellipse_design_factor(ellipse_ratio):
# K = knuckle_factor(a / b)
#
# Path: proptools/units.py
# def inch2meter(x):
# return x * 0.0254
#
# def psi2pascal(x):
# return x * 6895
#
# def lbf2newton(x):
# return x * 4.448
#
# def lbm2kilogram(x):
# return x * 0.4536
. Output only the next line. | self.assertAlmostEqual(lbf2newton(823900), ts.max_axial_load( |
Given the code snippet: <|code_start|>
class TestStringMethods(unittest.TestCase):
def test_sample_8_3(self):
# Do sample problem 8-3 from Huzel and Huang.
stress = psi2pascal(38e3)
a = inch2meter(41.0)
b = inch2meter(29.4)
l_c = inch2meter(46.9)
E = psi2pascal(10.4e6)
v = 0.36
weld_eff = 1.0
p_to = psi2pascal(180) # Oxidizer max pressure 180 psi
p_tf = psi2pascal(170) # fuel max pressure 170 psi
<|code_end|>
, generate the next line using the imports in this file:
import unittest
from proptools import tank_structure as ts
from proptools.units import inch2meter, psi2pascal, lbf2newton, lbm2kilogram
and context (functions, classes, or occasionally code) from other files:
# Path: proptools/tank_structure.py
# def crown_thickness(p_t, R, stress, weld_eff):
# def knuckle_thickness(p_t, a, b, stress, weld_eff):
# def cylinder_thickness(p_t, a, stress, weld_eff):
# def sphere_thickness(p_t, a, stress, weld_eff):
# def max_axial_load(p_t, a, t_c, l_c, E):
# def cylinder_mass(a, t_c, l_c, rho):
# def sphere_mass(a, t, rho):
# def ellipse_mass(a, b, t, rho):
# def cr_ex_press_sphere(a, t, E, v):
# def cr_ex_press_sphere_end(a, t, E):
# def cr_ex_press_ellipse_end(a, b, t, E, C_b=0.05):
# def cr_ex_press_cylinder(a, t_c, l_c, E, v):
# def sphere_volume(a):
# def ellipse_volume(a, b):
# def cylinder_volume(a, l_c):
# def knuckle_factor(ellipse_ratio):
# def ellipse_design_factor(ellipse_ratio):
# K = knuckle_factor(a / b)
#
# Path: proptools/units.py
# def inch2meter(x):
# return x * 0.0254
#
# def psi2pascal(x):
# return x * 6895
#
# def lbf2newton(x):
# return x * 4.448
#
# def lbm2kilogram(x):
# return x * 0.4536
. Output only the next line. | rho = 0.101 * lbm2kilogram(1) / inch2meter(1)**3 |
Using the snippet: <|code_start|>"""Unit tests for nozzle flow."""
class TestMassFlow(unittest.TestCase):
"""Unit tests for nozzle.mass_flow."""
def test_rpe_3_3(self):
"""Test against example problem 3-3 from Rocket Propulsion Elements."""
T_c = 2800.
gamma = 1.2
m_molar = R_univ / 360.
p_c = 2.039e6
A_t = 13.32e-4
<|code_end|>
, determine the next line of code. You have imports:
import unittest
import numpy as np
from proptools import nozzle
from proptools.constants import R_univ
and context (class names, function names, or code) available:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
#
# Path: proptools/constants.py
. Output only the next line. | m_dot = nozzle.mass_flow(A_t, p_c, T_c, gamma, m_molar) |
Predict the next line after this snippet: <|code_start|>"""Unit tests for nozzle flow."""
class TestMassFlow(unittest.TestCase):
"""Unit tests for nozzle.mass_flow."""
def test_rpe_3_3(self):
"""Test against example problem 3-3 from Rocket Propulsion Elements."""
T_c = 2800.
gamma = 1.2
<|code_end|>
using the current file's imports:
import unittest
import numpy as np
from proptools import nozzle
from proptools.constants import R_univ
and any relevant context from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
#
# Path: proptools/constants.py
. Output only the next line. | m_molar = R_univ / 360. |
Next line prediction: <|code_start|>"""Unit tests for convection models."""
class TestTaw(unittest.TestCase):
"""Unit tests for convection.adiabatic_wall_temperature"""
def test_ssme(self):
"""Test against the SSME example from 16.512 Lecture 7."""
# The T_aw given in the example (3400 K) does not seem to be correct.
# For now, I am not going to implement this test, for want of a reference
# example to compare to.
pass
class TestLongTube(unittest.TestCase):
"""Unit tests for convection.long_tube_coeff."""
def test_1(self):
"""No examples are given in Hill & Peterson, I had to come up with this."""
# Setup
Re = 1e6
Pr = 0.7
mass_flux = 1.
mu = 2e-5
c_p = 1000.
k = mu * c_p / Pr
D = Re * mu / mass_flux
# Action
<|code_end|>
. Use current file imports:
(import unittest
from math import pi
from proptools import convection, nozzle)
and context including class names, function names, or small code snippets from other files:
# Path: proptools/convection.py
# def adiabatic_wall_temperature(T_c, M, gamma, r=0.9, Pr=None):
# def long_tube_coeff(mass_flux, D, c_p, mu, k):
# def bartz(p_c, c_star, D_t, D, c_p, mu_e, Pr, sigma=1.):
# def bartz_sigma_sanchez(T_e, T_avg, w=0.6):
# def bartz_sigma_huzel(T_c, T_w, M, gamma):
# def film_adiabatic_wall_temperature(eta_film, T_aw, T_f):
# def film_efficiency(x, D, m_dot_core, m_dot_film, mu_core, Pr_film=1, film_param=1, cp_ratio=1):
# def rannie_transpiration_cooling(cool_flux_fraction, Pr_film, Re_bulk):
# G = cool_flux_fraction
# R = Re_bulk**0.1
#
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | h = convection.long_tube_coeff(mass_flux, D, c_p, mu, k) |
Given the following code snippet before the placeholder: <|code_start|>
class TestBartzSigmaHuzel(unittest.TestCase):
"""Unit tests for convection.bartz_sigma_huzel."""
def test_huzel_43_a1_throat(self):
"""Test against Huzel and Huang example problem 4-3, for the A-1 engine
at the throat."""
# Values given in the problem statement
T_c = 3411. # 6140 rankine
T_w = 0.8 * T_c # "Since the carbon-deposit approached to the gas temperature,
# a T_w/T_c value of 0.8 is used to determine the sigma values."
M = 1. # Mach=1 at throat
gamma = 1.2
# Answer given for the correction factor
sigma_huzel = 1.
sigma = convection.bartz_sigma_huzel(T_c, T_w, M, gamma)
self.assertTrue(abs(sigma - sigma_huzel) < 0.05)
def test_huzel_43_a1_exit(self):
"""Test against Huzel and Huang example problem 4-3, for the A-1 engine
at the exit."""
# Values given in the problem statement
T_c = 3411. # 6140 rankine
T_w = 0.8 * T_c # "Since the carbon-deposit approached to the gas temperature,
# a T_w/T_c value of 0.8 is used to determine the sigma values."
gamma = 1.2
<|code_end|>
, predict the next line using imports from the current file:
import unittest
from math import pi
from proptools import convection, nozzle
and context including class names, function names, and sometimes code from other files:
# Path: proptools/convection.py
# def adiabatic_wall_temperature(T_c, M, gamma, r=0.9, Pr=None):
# def long_tube_coeff(mass_flux, D, c_p, mu, k):
# def bartz(p_c, c_star, D_t, D, c_p, mu_e, Pr, sigma=1.):
# def bartz_sigma_sanchez(T_e, T_avg, w=0.6):
# def bartz_sigma_huzel(T_c, T_w, M, gamma):
# def film_adiabatic_wall_temperature(eta_film, T_aw, T_f):
# def film_efficiency(x, D, m_dot_core, m_dot_film, mu_core, Pr_film=1, film_param=1, cp_ratio=1):
# def rannie_transpiration_cooling(cool_flux_fraction, Pr_film, Re_bulk):
# G = cool_flux_fraction
# R = Re_bulk**0.1
#
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | M = nozzle.mach_from_er(5., gamma) # Expansion ratio of 5 |
Predict the next line for this snippet: <|code_start|>"""Unit tests for valve flow."""
class TestValveGasCv(unittest.TestCase):
def test_methane_example(self):
"""Test the methane example from
http://www.idealvalve.com/pdf/Flow-Calculation-for-Gases.pdf
"""
# Setup
p_1 = 790.83e3 # Inlet pressure, 100 psig [units: pascal]
p_2 = 101e3 # Outlet pressure [units: pascal]
T = 294.3 # gas temperature, 70 F [units: kelvin]
m_molar = 0.01604 # Methane molar mass [units: kilogram mole**-1]
flow_scfh = 600
<|code_end|>
with the help of current file imports:
import unittest
from proptools import valve
and context from other files:
# Path: proptools/valve.py
# def m_dot_to_scfh(m_dot, m_molar):
# def scfh_to_m_dot(flow_scfh, m_molar):
# def valve_gas_cv(m_dot, p_1, p_2, m_molar, T):
# def valve_gas_pressure(cv, m_dot, p_1, m_molar, T):
# def valve_gas_mass_flow(cv, p_1, p_2, m_molar, T):
# def demo_plots():
# T = 300 # units: kelvin
, which may contain function names, class names, or code. Output only the next line. | m_dot = valve.scfh_to_m_dot(flow_scfh, m_molar) |
Based on the snippet: <|code_start|>"""Compute the expansion ratio for a given pressure ratio."""
p_c = 10e6 # Chamber pressure [units: pascal]
p_e = 100e3 # Exit pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
# Solve for the expansion ratio [units: dimensionless]
<|code_end|>
, predict the immediate next line with the help of imports:
from proptools import nozzle
and context (classes, functions, sometimes code) from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | exp_ratio = nozzle.er_from_p(p_c, p_e, gamma) |
Next line prediction: <|code_start|>"""Compute the pressure ratio from a given expansion ratio."""
p_c = 10e6 # Chamber pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
exp_ratio = 11.9 # Expansion ratio [units: dimensionless]
# Solve for the exit pressure [units: pascal].
<|code_end|>
. Use current file imports:
(from scipy.optimize import fsolve
from proptools import nozzle)
and context including class names, function names, or small code snippets from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | p_e = p_c * nozzle.pressure_from_er(exp_ratio, gamma) |
Predict the next line after this snippet: <|code_start|>"""Plot thrust vs ambient pressure."""
p_c = 10e6 # Chamber pressure [units: pascal]
p_e = 100e3 # Exit pressure [units: pascal]
p_a = np.linspace(0, 100e3) # Ambient pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
A_t = np.pi * (0.1 / 2)**2 # Throat area [units: meter**2]
# Compute thrust [units: newton]
<|code_end|>
using the current file's imports:
import numpy as np
import skaero.atmosphere.coesa as atmo
from matplotlib import pyplot as plt
from proptools import nozzle
and any relevant context from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | F = nozzle.thrust(A_t, p_c, p_e, gamma, |
Predict the next line after this snippet: <|code_start|>"""Estimate exit velocity."""
# Declare engine design parameters
p_c = 10e6 # Chamber pressure [units: pascal]
p_e = 100e3 # Exit pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
m_molar = 20e-3 # Exhaust molar mass [units: kilogram mole**1]
T_c = 3000. # Chamber temperature [units: kelvin]
# Compute the exit velocity
<|code_end|>
using the current file's imports:
from proptools import isentropic
and any relevant context from other files:
# Path: proptools/isentropic.py
# def stag_temperature_ratio(M, gamma):
# def stag_pressure_ratio(M, gamma):
# def stag_density_ratio(M, gamma):
# def velocity(v_1, p_1, T_1, p_2, gamma, m_molar): # pylint: disable=too-many-arguments
. Output only the next line. | v_e = isentropic.velocity(v_1=0, p_1=p_c, T_1=T_c, p_2=p_e, gamma=gamma, m_molar=m_molar) |
Given the following code snippet before the placeholder: <|code_start|>"""Film cooting example"""
T_aw = 3200 # Adiabatic wall temperature of core flow [units: kelvin].
T_f = 1600 # Film temperature [units: kelvin].
T_w = 700 # Wall temperature [units: kelvin].
x = np.linspace(0, 1) # Distance downstream [units: meter].
D = 0.5 # Diameter [units: meter].
m_dot_core = np.pi / 4 * D**2 * 5.33 * 253 # Core mass flow [units: kilogram second**-1].
m_dot_film = (1./99) * m_dot_core
mu_core = 2e-5 / 0.66 # Dynamic viscosity of the core fluid [units: pascal second].
Pr_film = 0.8
film_param = 1.265
cp_ratio = 0.8
eta = np.array([
<|code_end|>
, predict the next line using imports from the current file:
import numpy as np
from matplotlib import pyplot as plt
from proptools import convection
and context including class names, function names, and sometimes code from other files:
# Path: proptools/convection.py
# def adiabatic_wall_temperature(T_c, M, gamma, r=0.9, Pr=None):
# def long_tube_coeff(mass_flux, D, c_p, mu, k):
# def bartz(p_c, c_star, D_t, D, c_p, mu_e, Pr, sigma=1.):
# def bartz_sigma_sanchez(T_e, T_avg, w=0.6):
# def bartz_sigma_huzel(T_c, T_w, M, gamma):
# def film_adiabatic_wall_temperature(eta_film, T_aw, T_f):
# def film_efficiency(x, D, m_dot_core, m_dot_film, mu_core, Pr_film=1, film_param=1, cp_ratio=1):
# def rannie_transpiration_cooling(cool_flux_fraction, Pr_film, Re_bulk):
# G = cool_flux_fraction
# R = Re_bulk**0.1
. Output only the next line. | convection.film_efficiency(x_, D, m_dot_core, m_dot_film, |
Given snippet: <|code_start|>"""Effect of expansion ratio on thrust coefficient."""
p_c = 10e6 # Chamber pressure [units: pascal]
p_a = 100e3 # Ambient pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
p_e = np.linspace(0.4 * p_a, 2 * p_a) # Exit pressure [units: pascal]
# Compute the expansion ratio and thrust coefficient for each p_e
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import numpy as np
from matplotlib import pyplot as plt
from proptools import nozzle
and context:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
which might include code, classes, or functions. Output only the next line. | exp_ratio = nozzle.er_from_p(p_c, p_e, gamma) |
Given snippet: <|code_start|>"""Generic electric propulsion design equations."""
from __future__ import division
def thrust(I_b, V_b, m_ion):
"""Thrust of an electric thruster.
Compute the ideal thrust of an electric thruster from the beam current and voltage,
assuming singly charged ions and no beam divergence.
Reference: Goebel and Katz, equation 2.3-8.
Arguments:
I_b (scalar): Beam current [units: ampere].
V_b (scalar): Beam voltage [units: volt].
m_ion (scalar): Ion mass [units: kilogram].
Returns:
scalar: Thrust force [units: newton].
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import numpy as np
from scipy.optimize import minimize
from proptools.constants import charge, amu_kg, g
and context:
# Path: proptools/constants.py
which might include code, classes, or functions. Output only the next line. | return (2 * (m_ion / charge) * V_b)**0.5 * I_b |
Here is a snippet: <|code_start|> If only ``V_b`` and ``m_ion`` are provided, the ideal specific impulse will be computed.
If ``divergence_correction``, ``double_fraction``, or ``mass_utilization`` are provided,
the specific impulse will be reduced by the corresponding efficiency factors.
Reference: Goebel and Katz, equation 2.4-8.
Arguments:
V_b (scalar): Beam voltage [units: volt].
m_ion (scalar): Ion mass [units: kilogram].
divergence_correction (scalar in (0, 1])): Thrust correction factor for beam divergence
[units: dimensionless].
double_fraction (scalar in [0, 1]): The doubly-charged ion current over the singly-charged
ion current, :math:`I^{++} / I^+` [units: dimensionless].
mass_utilization (scalar in (0, 1])): Mass utilization efficiency [units: dimensionless].
Returns:
scalar: the specific impulse [units: second].
"""
# Check inputs
if divergence_correction < 0 or divergence_correction > 1:
raise ValueError('divergence_correction {:.f} is not in [0, 1]'.format(
divergence_correction))
if mass_utilization < 0 or mass_utilization > 1:
raise ValueError('mass_utilization {:.f} is not in [0, 1]'.format(mass_utilization))
# Compute the efficiency factor
efficiency = (divergence_correction * double_ion_thrust_correction(double_fraction)
* mass_utilization)
# Compute the ideal specific impulse
<|code_end|>
. Write the next line using the current file imports:
import numpy as np
from scipy.optimize import minimize
from proptools.constants import charge, amu_kg, g
and context from other files:
# Path: proptools/constants.py
, which may include functions, classes, or code. Output only the next line. | I_sp_ideal = 1 / g * (2 * (charge / m_ion) * V_b)**0.5 |
Continue the code snippet: <|code_start|> gamma: working gas ratio of specific heats [units: none].
c_p: working gas heat capacity at const pressure
[units: joule kilogram**-1 kelvin**-1].
'''
# Turbine specific enthalpy drop [units: joule kilogram**-1]
dh_turb_ideal = turbine_enthalpy(p_o, p_te, T_o, gamma, c_p)
dh_turb = eta * dh_turb_ideal
# Turbine exit temperature
T_te = T_o - (dh_turb / c_p)
return T_te
def gg_dump_isp(p_o, p_te, p_ne, T_o, eta, gamma, c_p, m_molar):
'''Get the specific impulse of a Gas Generator turbine exhaust dump.
Arguments:
p_o: turbine inlet stagnation pressure [units: pascal].
p_te: turbine exit pressure [units: pascal].
p_ne: Dump nozzle exit pressure [units: pascal].
T_o: turbine inlet stagnation temperature [units: kelvin].
eta: turbine efficiency.
gamma: working gas ratio of specific heats [units: none].
c_p: working gas heat capacity at const pressure
[units: joule kilogram**-1 kelvin**-1].
m_molar: working gas molar mass [units: kilogram mole**-1].
'''
T_te = turbine_exit_temperature(p_o, p_te, T_o, eta, gamma, c_p)
# Dump nozzle thrust coefficient and characteristic velocity.
# Assume optimal expansion.
<|code_end|>
. Use current file imports:
from proptools import nozzle
from scipy.interpolate import RectBivariateSpline, interp1d
from matplotlib import pyplot as plt
import math
import numpy as np
and context (classes, functions, or code) from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | C_f = nozzle.thrust_coef(p_c=p_te, p_e=p_ne, gamma=gamma) |
Using the snippet: <|code_start|>"""Ideal characteristic velocity."""
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
m_molar = 20e-3 # Exhaust molar mass [units: kilogram mole**1]
T_c = 3000. # Chamber temperature [units: kelvin]
# Compute the characteristic velocity [units: meter second**-1]
<|code_end|>
, determine the next line of code. You have imports:
from proptools import nozzle
and context (class names, function names, or code) available:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | c_star = nozzle.c_star(gamma, m_molar, T_c) |
Given the following code snippet before the placeholder: <|code_start|>"""Estimate specific impulse, thrust and mass flow."""
# Declare engine design parameters
p_c = 10e6 # Chamber pressure [units: pascal]
p_e = 100e3 # Exit pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
m_molar = 20e-3 # Exhaust molar mass [units: kilogram mole**1]
T_c = 3000. # Chamber temperature [units: kelvin]
A_t = pi * (0.1 / 2)**2 # Throat area [units: meter**2]
# Predict engine performance
<|code_end|>
, predict the next line using imports from the current file:
from math import pi
from proptools import nozzle
and context including class names, function names, and sometimes code from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | C_f = nozzle.thrust_coef(p_c, p_e, gamma) # Thrust coefficient [units: dimensionless] |
Given the code snippet: <|code_start|>"""Plot thrust vs chmaber pressure."""
p_c = np.linspace(1e6, 20e6) # Chamber pressure [units: pascal]
p_e = 100e3 # Exit pressure [units: pascal]
p_a = 100e3 # Ambient pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
A_t = np.pi * (0.1 / 2)**2 # Throat area [units: meter**2]
# Compute thrust [units: newton]
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
from matplotlib import pyplot as plt
from proptools import nozzle
and context (functions, classes, or occasionally code) from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
. Output only the next line. | F = nozzle.thrust(A_t, p_c, p_e, gamma) |
Given snippet: <|code_start|> c_star (scalar): Propellant combustion characteristic velocity [units: meter second**-1].
Returns:
scalar: Ratio of burning area to throat area, :math:`K = A_b/A_t` [units: dimensionless].
"""
return p_c**(1 - n) / (rho_solid * a * c_star)
def burn_and_throat_area(F, p_c, p_e, a, n, rho_solid, c_star, gamma):
"""Given thrust and chamber pressure, and propellant properties, find the burn area and throat area.
Assumes that the exit pressure is matched (:math:`p_e = p_a`).
Arguments:
F (scalar): Thrust force [units: newton].
p_c (scalar): Chamber pressure [units: pascal].
p_e (scalar): Nozzle exit pressure [units: pascal].
a (scalar): Propellant burn rate coefficient [units: meter second**-1 pascal**-n].
n (scalar): Propellant burn rate exponent [units: none].
rho_solid (scalar): Solid propellant density [units: kilogram meter**-3].
c_star (scalar): Propellant combustion characteristic velocity [units: meter second**-1].
gamma (scalar): Exhaust gas ratio of specific heats [units: dimensionless].
Returns:
tuple: tuple containing:
A_b (scalar): Burn area [units: meter**2].
A_t (scalar): Throat area [units: meter**2].
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from scipy.integrate import cumtrapz
from proptools import nozzle
and context:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
which might include code, classes, or functions. Output only the next line. | C_F = nozzle.thrust_coef(p_c, p_e, gamma) |
Here is a snippet: <|code_start|>"""Check that the nozzle is choked and find the mass flow."""
# Declare engine design parameters
p_c = 10e6 # Chamber pressure [units: pascal]
p_e = 100e3 # Exit pressure [units: pascal]
gamma = 1.2 # Exhaust heat capacity ratio [units: dimensionless]
m_molar = 20e-3 # Exhaust molar mass [units: kilogram mole**1]
T_c = 3000. # Chamber temperature [units: kelvin]
A_t = pi * (0.1 / 2)**2 # Throat area [units: meter**2]
# Check choking
<|code_end|>
. Write the next line using the current file imports:
from math import pi
from proptools import nozzle
and context from other files:
# Path: proptools/nozzle.py
# def thrust_coef(p_c, p_e, gamma, p_a=None, er=None):
# def c_star(gamma, m_molar, T_c):
# def er_from_p(p_c, p_e, gamma):
# def pressure_from_er(er, gamma):
# def throat_area(m_dot, p_c, T_c, gamma, m_molar):
# def mass_flow(A_t, p_c, T_c, gamma, m_molar):
# def thrust(A_t, p_c, p_e, gamma, p_a=None, er=None):
# def mach_from_er(er, gamma):
# def mach_from_pr(p_c, p_e, gamma):
# def is_choked(p_c, p_e, gamma):
# def mach_from_area_subsonic(area_ratio, gamma):
# def area_from_mach(M, gamma):
# C_F = (2 * gamma**2 / (gamma - 1) \
# * (2 / (gamma + 1))**((gamma + 1) / (gamma - 1)) \
# * (1 - (p_e / p_c)**((gamma - 1) / gamma))
# )**0.5
# R = R_univ / m_molar
# X = np.zeros((n,))
# M = np.zeros((n,))
# B = (y+1)/(y-1)
# X[0] = (u*k)**(B/(1-B))
# M[0] = X[0]
# P = 2 / (gamma + 1)
# Q = 1 - P
# E = 1 / Q
# R = area_ratio**2
# X = fsolve(
# lambda X: (P + Q * X)**E - R * X,
# X_init
# )
, which may include functions, classes, or code. Output only the next line. | if nozzle.is_choked(p_c, p_e, gamma): |
Given snippet: <|code_start|>try:
except ImportError:
logger = my_logger.get_logger('FormulaCalcTest')
class FormulaCalcTest(asynctest.TestCase):
loop = None # make pycharm happy
def setUp(self):
super(FormulaCalcTest, self).setUp()
self.redis_client = redis.StrictRedis(db=config.getint('REDIS', 'db', fallback=1), decode_responses=True)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import asyncio
import datetime
import ujson as json
import json
import asynctest
import redis
import pandas as pd
import numpy as np
import pydatacoll.utils.logger as my_logger
import pydatacoll.plugins.formula_calc as formula_calc
from test.mock_device import mock_data
from pydatacoll.utils.read_config import *
and context:
# Path: test/mock_device/mock_data.py
# def generate():
which might include code, classes, or functions. Output only the next line. | mock_data.generate() |
Here is a snippet: <|code_start|> config.getint('REDIS', 'port', fallback=6379)),
db=config.getint('REDIS', 'db', fallback=1))
sub_client = await aioredis.create_redis((config.get('REDIS', 'host', fallback='127.0.0.1'),
config.getint('REDIS', 'port', fallback=6379)),
db=config.getint('REDIS', 'db', fallback=1))
res = await sub_client.subscribe('channel:foo')
ch1 = res[0]
async def reader(ch):
while await ch.wait_message():
msg = await ch.get_json()
logger.debug("channel[%s] Got Message:%s", ch.name.decode(), msg)
logger.debug('quit reader!')
tsk = asyncio.ensure_future(reader(ch1))
res = await pub_client.publish_json('channel:foo', ["Hello", "world"])
self.assertEqual(res, 1)
await sub_client.unsubscribe('channel:foo')
await tsk
sub_client.close()
pub_client.close()
class InterfaceTest(asynctest.TestCase):
loop = None # make pycharm happy
def setUp(self):
self.redis_client = redis.StrictRedis(db=1, decode_responses=True)
<|code_end|>
. Write the next line using the current file imports:
import asyncio
import aiohttp
import aioredis
import asynctest
import redis
import ujson as json
import json
import pydatacoll.utils.logger as my_logger
from pydatacoll.resources.protocol import *
from pydatacoll.resources.redis_key import *
from test.mock_device import mock_data, iec104device
from pydatacoll import api_server
from pydatacoll.utils.read_config import *
and context from other files:
# Path: test/mock_device/mock_data.py
# def generate():
#
# Path: test/mock_device/iec104device.py
# class IEC104Device:
# def __init__(self, device, reader, writer):
# async def receive(self):
# def connection_lost(self):
# def inc_ssn(self):
# def inc_rsn(self):
# def start_timer(self, timer_id):
# def stop_timer(self, timer_id):
# def on_timer0(self):
# def on_timer1(self):
# def on_timer2(self):
# def on_timer3(self):
# async def handle_u(self, frame):
# async def handle_i(self, frame):
# async def send_frame(self, frame, check=True):
# async def check_to_send(self, frame):
# def save_frame(self, frame, send=True):
# async def generate_call_all_data(self):
# def generate_call_power_data(self):
# def create_servers(io_loop):
# def run_server():
#
# Path: pydatacoll/api_server.py
# HANDLER_TIME_OUT = config.getint('SERVER', 'web_timeout', fallback=10)
# class APIServer(ParamFunctionContainer):
# def __init__(self, port: int = None, production: bool = None, io_loop: asyncio.AbstractEventLoop = None):
# def _add_router(self):
# def _install_plugins(self):
# def _uninstall_plugins(self):
# def stop_server(self):
# def found_and_delete(self, match: str):
# async def _read_data(request):
# async def get_index(self, request):
# async def get_redis_key(self, _):
# async def get_device_protocol_list(self, _):
# async def get_term_protocol_list(self, _):
# async def get_formula_list(self, _):
# async def get_formula(self, request):
# async def get_device_list(self, _):
# async def get_device(self, request):
# async def get_term_list(self, _):
# async def get_term(self, request):
# async def get_item_list(self, _):
# async def get_item(self, request):
# async def get_device_term_list(self, request):
# async def get_term_item_list(self, request):
# async def get_term_item(self, request):
# async def get_data_list(self, request):
# async def get_data(self, request):
# async def create_formula(self, request):
# async def create_formula_batch(self, request):
# async def update_formula(self, request):
# async def del_formula(self, request):
# async def del_formula_batch(self, request):
# async def create_device(self, request):
# async def create_device_batch(self, request):
# async def update_device(self, request):
# async def del_device(self, request):
# async def del_device_batch(self, request):
# async def create_term(self, request):
# async def create_term_batch(self, request):
# async def update_term(self, request):
# async def del_term(self, request):
# async def del_term_batch(self, request):
# async def create_item(self, request):
# async def create_item_batch(self, request):
# async def update_item(self, request):
# async def del_item(self, request):
# async def del_item_batch(self, request):
# async def create_term_item(self, request):
# async def create_term_item_batch(self, request):
# async def update_term_item(self, request):
# async def del_term_item(self, request):
# async def del_term_item_batch(self, request):
# async def device_call(self, request):
# async def reader(ch):
# async def device_ctrl(self, request):
# async def reader(ch):
# async def formula_check(self, request):
# async def reader(ch):
# async def sql_check(self, request):
# async def reader(ch):
# def main():
, which may include functions, classes, or code. Output only the next line. | mock_data.generate() |
Using the snippet: <|code_start|> sub_client = await aioredis.create_redis((config.get('REDIS', 'host', fallback='127.0.0.1'),
config.getint('REDIS', 'port', fallback=6379)),
db=config.getint('REDIS', 'db', fallback=1))
res = await sub_client.subscribe('channel:foo')
ch1 = res[0]
async def reader(ch):
while await ch.wait_message():
msg = await ch.get_json()
logger.debug("channel[%s] Got Message:%s", ch.name.decode(), msg)
logger.debug('quit reader!')
tsk = asyncio.ensure_future(reader(ch1))
res = await pub_client.publish_json('channel:foo', ["Hello", "world"])
self.assertEqual(res, 1)
await sub_client.unsubscribe('channel:foo')
await tsk
sub_client.close()
pub_client.close()
class InterfaceTest(asynctest.TestCase):
loop = None # make pycharm happy
def setUp(self):
self.redis_client = redis.StrictRedis(db=1, decode_responses=True)
mock_data.generate()
self.server_list = list()
<|code_end|>
, determine the next line of code. You have imports:
import asyncio
import aiohttp
import aioredis
import asynctest
import redis
import ujson as json
import json
import pydatacoll.utils.logger as my_logger
from pydatacoll.resources.protocol import *
from pydatacoll.resources.redis_key import *
from test.mock_device import mock_data, iec104device
from pydatacoll import api_server
from pydatacoll.utils.read_config import *
and context (class names, function names, or code) available:
# Path: test/mock_device/mock_data.py
# def generate():
#
# Path: test/mock_device/iec104device.py
# class IEC104Device:
# def __init__(self, device, reader, writer):
# async def receive(self):
# def connection_lost(self):
# def inc_ssn(self):
# def inc_rsn(self):
# def start_timer(self, timer_id):
# def stop_timer(self, timer_id):
# def on_timer0(self):
# def on_timer1(self):
# def on_timer2(self):
# def on_timer3(self):
# async def handle_u(self, frame):
# async def handle_i(self, frame):
# async def send_frame(self, frame, check=True):
# async def check_to_send(self, frame):
# def save_frame(self, frame, send=True):
# async def generate_call_all_data(self):
# def generate_call_power_data(self):
# def create_servers(io_loop):
# def run_server():
#
# Path: pydatacoll/api_server.py
# HANDLER_TIME_OUT = config.getint('SERVER', 'web_timeout', fallback=10)
# class APIServer(ParamFunctionContainer):
# def __init__(self, port: int = None, production: bool = None, io_loop: asyncio.AbstractEventLoop = None):
# def _add_router(self):
# def _install_plugins(self):
# def _uninstall_plugins(self):
# def stop_server(self):
# def found_and_delete(self, match: str):
# async def _read_data(request):
# async def get_index(self, request):
# async def get_redis_key(self, _):
# async def get_device_protocol_list(self, _):
# async def get_term_protocol_list(self, _):
# async def get_formula_list(self, _):
# async def get_formula(self, request):
# async def get_device_list(self, _):
# async def get_device(self, request):
# async def get_term_list(self, _):
# async def get_term(self, request):
# async def get_item_list(self, _):
# async def get_item(self, request):
# async def get_device_term_list(self, request):
# async def get_term_item_list(self, request):
# async def get_term_item(self, request):
# async def get_data_list(self, request):
# async def get_data(self, request):
# async def create_formula(self, request):
# async def create_formula_batch(self, request):
# async def update_formula(self, request):
# async def del_formula(self, request):
# async def del_formula_batch(self, request):
# async def create_device(self, request):
# async def create_device_batch(self, request):
# async def update_device(self, request):
# async def del_device(self, request):
# async def del_device_batch(self, request):
# async def create_term(self, request):
# async def create_term_batch(self, request):
# async def update_term(self, request):
# async def del_term(self, request):
# async def del_term_batch(self, request):
# async def create_item(self, request):
# async def create_item_batch(self, request):
# async def update_item(self, request):
# async def del_item(self, request):
# async def del_item_batch(self, request):
# async def create_term_item(self, request):
# async def create_term_item_batch(self, request):
# async def update_term_item(self, request):
# async def del_term_item(self, request):
# async def del_term_item_batch(self, request):
# async def device_call(self, request):
# async def reader(ch):
# async def device_ctrl(self, request):
# async def reader(ch):
# async def formula_check(self, request):
# async def reader(ch):
# async def sql_check(self, request):
# async def reader(ch):
# def main():
. Output only the next line. | self.server_list = iec104device.create_servers(self.loop) |
Predict the next line after this snippet: <|code_start|> config.getint('REDIS', 'port', fallback=6379)),
db=config.getint('REDIS', 'db', fallback=1))
res = await sub_client.subscribe('channel:foo')
ch1 = res[0]
async def reader(ch):
while await ch.wait_message():
msg = await ch.get_json()
logger.debug("channel[%s] Got Message:%s", ch.name.decode(), msg)
logger.debug('quit reader!')
tsk = asyncio.ensure_future(reader(ch1))
res = await pub_client.publish_json('channel:foo', ["Hello", "world"])
self.assertEqual(res, 1)
await sub_client.unsubscribe('channel:foo')
await tsk
sub_client.close()
pub_client.close()
class InterfaceTest(asynctest.TestCase):
loop = None # make pycharm happy
def setUp(self):
self.redis_client = redis.StrictRedis(db=1, decode_responses=True)
mock_data.generate()
self.server_list = list()
self.server_list = iec104device.create_servers(self.loop)
<|code_end|>
using the current file's imports:
import asyncio
import aiohttp
import aioredis
import asynctest
import redis
import ujson as json
import json
import pydatacoll.utils.logger as my_logger
from pydatacoll.resources.protocol import *
from pydatacoll.resources.redis_key import *
from test.mock_device import mock_data, iec104device
from pydatacoll import api_server
from pydatacoll.utils.read_config import *
and any relevant context from other files:
# Path: test/mock_device/mock_data.py
# def generate():
#
# Path: test/mock_device/iec104device.py
# class IEC104Device:
# def __init__(self, device, reader, writer):
# async def receive(self):
# def connection_lost(self):
# def inc_ssn(self):
# def inc_rsn(self):
# def start_timer(self, timer_id):
# def stop_timer(self, timer_id):
# def on_timer0(self):
# def on_timer1(self):
# def on_timer2(self):
# def on_timer3(self):
# async def handle_u(self, frame):
# async def handle_i(self, frame):
# async def send_frame(self, frame, check=True):
# async def check_to_send(self, frame):
# def save_frame(self, frame, send=True):
# async def generate_call_all_data(self):
# def generate_call_power_data(self):
# def create_servers(io_loop):
# def run_server():
#
# Path: pydatacoll/api_server.py
# HANDLER_TIME_OUT = config.getint('SERVER', 'web_timeout', fallback=10)
# class APIServer(ParamFunctionContainer):
# def __init__(self, port: int = None, production: bool = None, io_loop: asyncio.AbstractEventLoop = None):
# def _add_router(self):
# def _install_plugins(self):
# def _uninstall_plugins(self):
# def stop_server(self):
# def found_and_delete(self, match: str):
# async def _read_data(request):
# async def get_index(self, request):
# async def get_redis_key(self, _):
# async def get_device_protocol_list(self, _):
# async def get_term_protocol_list(self, _):
# async def get_formula_list(self, _):
# async def get_formula(self, request):
# async def get_device_list(self, _):
# async def get_device(self, request):
# async def get_term_list(self, _):
# async def get_term(self, request):
# async def get_item_list(self, _):
# async def get_item(self, request):
# async def get_device_term_list(self, request):
# async def get_term_item_list(self, request):
# async def get_term_item(self, request):
# async def get_data_list(self, request):
# async def get_data(self, request):
# async def create_formula(self, request):
# async def create_formula_batch(self, request):
# async def update_formula(self, request):
# async def del_formula(self, request):
# async def del_formula_batch(self, request):
# async def create_device(self, request):
# async def create_device_batch(self, request):
# async def update_device(self, request):
# async def del_device(self, request):
# async def del_device_batch(self, request):
# async def create_term(self, request):
# async def create_term_batch(self, request):
# async def update_term(self, request):
# async def del_term(self, request):
# async def del_term_batch(self, request):
# async def create_item(self, request):
# async def create_item_batch(self, request):
# async def update_item(self, request):
# async def del_item(self, request):
# async def del_item_batch(self, request):
# async def create_term_item(self, request):
# async def create_term_item_batch(self, request):
# async def update_term_item(self, request):
# async def del_term_item(self, request):
# async def del_term_item_batch(self, request):
# async def device_call(self, request):
# async def reader(ch):
# async def device_ctrl(self, request):
# async def reader(ch):
# async def formula_check(self, request):
# async def reader(ch):
# async def sql_check(self, request):
# async def reader(ch):
# def main():
. Output only the next line. | self.api_server = api_server.APIServer(io_loop=self.loop, port=8080) |
Using the snippet: <|code_start|> {'username': '',
'password': password},
# Invalid password
{'username': user_id,
'password': None}
]
for data in bad_data:
self.api.post("/sessions",
data=data,
status_code=422)
def test_invalid_token(self):
"""Try to do a request using invalid token."""
self.api.get("/users", token=u"There is no token!", status_code=401)
class PasswordVerificationTest(WebTest):
"""Test if password verfication and rehashing works."""
def test_verify_hash(self):
"""Test verify hash.
Also needs app context to access config.
"""
with self.app.app_context():
hashed = self.app.config['PASSWORD_CONTEXT'].hash("some_pw")
# Correct password
self.assertTrue(
<|code_end|>
, determine the next line of code. You have imports:
from bson import ObjectId
from passlib.context import CryptContext
from passlib.hash import pbkdf2_sha256
from amivapi.auth.sessions import verify_password
from amivapi.tests.utils import WebTest
and context (class names, function names, or code) available:
# Path: amivapi/auth/sessions.py
# def verify_password(user, plaintext):
# """Check password of user, rehash if necessary.
#
# It is possible that the password is None, e.g. if the user is authenticated
# via LDAP. In this case default to "not verified".
#
# Args:
# user (dict): the user in question.
# plaintext (string): password to check
#
# Returns:
# bool: True if password matches. False if it doesn't or if there is no
# password set and/or provided.
# """
# password_context = app.config['PASSWORD_CONTEXT']
#
# if (plaintext is None) or (user['password'] is None):
# return False
#
# is_valid = password_context.verify(plaintext, user['password'])
#
# if is_valid and password_context.needs_update(user['password']):
# # update password - hook will handle hashing
# update = {'password': plaintext}
# with admin_permissions():
# patch_internal("users", payload=update, _id=user['_id'])
# return is_valid
#
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
. Output only the next line. | verify_password({'password': hashed}, "some_pw") |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Sentry integration tests."""
# Get test dsn from environment
SENTRY_DSN = getenv('SENTRY_TEST_DSN')
class SentryIntegrationTest(WebTestNoAuth):
"""Raise an error to check if it gets sent to Sentry."""
def setUp(self, *args, **kwargs):
"""Extended setUp: Move environment variables to config."""
extra_config = {
'SENTRY_DSN': SENTRY_DSN,
'SENTRY_ENVIRONMENT': 'testing',
}
extra_config.update(kwargs)
super().setUp(*args, **extra_config)
<|code_end|>
, determine the next line of code. You have imports:
from os import getenv
from flask.signals import got_request_exception
from amivapi.tests.utils import WebTestNoAuth, skip_if_false
import warnings
and context (class names, function names, or code) available:
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# def skip_if_false(condition, reason):
# """Decorator to mark tests to be skipped if condition is false."""
# def _skip(func):
# return func if condition else pytest.mark.skip(reason=reason)(func)
# return _skip
. Output only the next line. | @skip_if_false(SENTRY_DSN, "Sentry test requires environment variable " |
Given the following code snippet before the placeholder: <|code_start|>Send emails to users when they have a new entry on the blacklist or one of their
entries get resolved/deleted.
"""
def _get_email(item):
"""Retrieve the user email for a blacklist entry."""
id_field = current_app.config['ID_FIELD']
lookup = {id_field: item['user']}
user = current_app.data.find_one('users', None, **lookup)
return user['email']
@schedulable
def send_removed_mail(item):
"""Send scheduled email when a blacklist entry times out."""
_item = current_app.data.find_one('blacklist', None, {"_id": item['_id']})
# Check that the end date is still correct and has not changed again
if _item is None:
return # Entry was deleted, no mail to send anymore
if _item.get('end_time') is None:
return # Entry was patched to last indefinitely, so no mail to send.
if _item['end_time'].replace(tzinfo=None) != item['end_time']:
return # Entry was edited, so this is outdated.
email = _get_email(_item)
fields = {'reason': _item['reason']}
<|code_end|>
, predict the next line using imports from the current file:
from flask import current_app
from amivapi.utils import mail
from datetime import datetime
from amivapi.cron import schedulable, schedule_task
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/utils.py
# def mail(to, subject, text):
# """Send a mail to a list of recipients.
#
# The mail is sent from the address specified by `API_MAIL` in the config,
# and the subject formatted according to `API_MAIL_SUBJECT`.
#
#
# Args:
# to(list of strings): List of recipient addresses
# subject(string): Subject string
# text(string): Mail content
# """
# sender = app.config['API_MAIL']
# subject = app.config['API_MAIL_SUBJECT'].format(subject=subject)
#
# if app.config.get('TESTING', False):
# app.test_mails.append({
# 'subject': subject,
# 'from': sender,
# 'receivers': to,
# 'text': text
# })
# elif config.SMTP_SERVER and config.SMTP_PORT:
# msg = MIMEText(text)
# msg['Subject'] = subject
# msg['From'] = sender
# msg['To'] = ';'.join([to] if isinstance(to, str) else to)
#
# try:
# with smtplib.SMTP(config.SMTP_SERVER,
# port=config.SMTP_PORT,
# timeout=config.SMTP_TIMEOUT) as smtp:
# status_code, _ = smtp.starttls()
# if status_code != 220:
# app.logger.error("Failed to create secure "
# "SMTP connection!")
# return
#
# if config.SMTP_USERNAME and config.SMTP_PASSWORD:
# smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
# else:
# smtp.ehlo()
#
# try:
# smtp.sendmail(msg['From'], to, msg.as_string())
# except smtplib.SMTPRecipientsRefused:
# error = ("Failed to send mail:\n"
# "From: %s\nTo: %s\n"
# "Subject: %s\n\n%s")
# app.logger.error(error % (sender, str(to), subject, text))
# except smtplib.SMTPException as e:
# app.logger.error("SMTP error trying to send mails: %s" % e)
#
# Path: amivapi/cron.py
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
. Output only the next line. | mail(email, |
Continue the code snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Logic to send emails on blacklist changes.
Send emails to users when they have a new entry on the blacklist or one of their
entries get resolved/deleted.
"""
def _get_email(item):
"""Retrieve the user email for a blacklist entry."""
id_field = current_app.config['ID_FIELD']
lookup = {id_field: item['user']}
user = current_app.data.find_one('users', None, **lookup)
return user['email']
<|code_end|>
. Use current file imports:
from flask import current_app
from amivapi.utils import mail
from datetime import datetime
from amivapi.cron import schedulable, schedule_task
and context (classes, functions, or code) from other files:
# Path: amivapi/utils.py
# def mail(to, subject, text):
# """Send a mail to a list of recipients.
#
# The mail is sent from the address specified by `API_MAIL` in the config,
# and the subject formatted according to `API_MAIL_SUBJECT`.
#
#
# Args:
# to(list of strings): List of recipient addresses
# subject(string): Subject string
# text(string): Mail content
# """
# sender = app.config['API_MAIL']
# subject = app.config['API_MAIL_SUBJECT'].format(subject=subject)
#
# if app.config.get('TESTING', False):
# app.test_mails.append({
# 'subject': subject,
# 'from': sender,
# 'receivers': to,
# 'text': text
# })
# elif config.SMTP_SERVER and config.SMTP_PORT:
# msg = MIMEText(text)
# msg['Subject'] = subject
# msg['From'] = sender
# msg['To'] = ';'.join([to] if isinstance(to, str) else to)
#
# try:
# with smtplib.SMTP(config.SMTP_SERVER,
# port=config.SMTP_PORT,
# timeout=config.SMTP_TIMEOUT) as smtp:
# status_code, _ = smtp.starttls()
# if status_code != 220:
# app.logger.error("Failed to create secure "
# "SMTP connection!")
# return
#
# if config.SMTP_USERNAME and config.SMTP_PASSWORD:
# smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
# else:
# smtp.ehlo()
#
# try:
# smtp.sendmail(msg['From'], to, msg.as_string())
# except smtplib.SMTPRecipientsRefused:
# error = ("Failed to send mail:\n"
# "From: %s\nTo: %s\n"
# "Subject: %s\n\n%s")
# app.logger.error(error % (sender, str(to), subject, text))
# except smtplib.SMTPException as e:
# app.logger.error("SMTP error trying to send mails: %s" % e)
#
# Path: amivapi/cron.py
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
. Output only the next line. | @schedulable |
Based on the snippet: <|code_start|> return # Entry was patched to last indefinitely, so no mail to send.
if _item['end_time'].replace(tzinfo=None) != item['end_time']:
return # Entry was edited, so this is outdated.
email = _get_email(_item)
fields = {'reason': _item['reason']}
mail(email,
'Your blacklist entry has been removed!',
current_app.config['BLACKLIST_REMOVED'].format(**fields))
def notify_new_blacklist(items):
"""Send an email to a user who has a new blacklist entry."""
for item in items:
email = _get_email(item)
fields = {
'reason': item['reason'],
'reply_to': current_app.config['BLACKLIST_REPLY_TO']
}
if item['price']:
fields['price'] = item['price']/100 # convert Rappen to CHF
template = current_app.config['BLACKLIST_ADDED_EMAIL_W_PRICE']
else:
template = current_app.config['BLACKLIST_ADDED_EMAIL_WO_PRICE']
mail(email, 'You have been blacklisted!', template.format(**fields))
# If the end time is already known, schedule removal mail
if item['end_time'] and item['end_time'] > datetime.utcnow():
<|code_end|>
, predict the immediate next line with the help of imports:
from flask import current_app
from amivapi.utils import mail
from datetime import datetime
from amivapi.cron import schedulable, schedule_task
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/utils.py
# def mail(to, subject, text):
# """Send a mail to a list of recipients.
#
# The mail is sent from the address specified by `API_MAIL` in the config,
# and the subject formatted according to `API_MAIL_SUBJECT`.
#
#
# Args:
# to(list of strings): List of recipient addresses
# subject(string): Subject string
# text(string): Mail content
# """
# sender = app.config['API_MAIL']
# subject = app.config['API_MAIL_SUBJECT'].format(subject=subject)
#
# if app.config.get('TESTING', False):
# app.test_mails.append({
# 'subject': subject,
# 'from': sender,
# 'receivers': to,
# 'text': text
# })
# elif config.SMTP_SERVER and config.SMTP_PORT:
# msg = MIMEText(text)
# msg['Subject'] = subject
# msg['From'] = sender
# msg['To'] = ';'.join([to] if isinstance(to, str) else to)
#
# try:
# with smtplib.SMTP(config.SMTP_SERVER,
# port=config.SMTP_PORT,
# timeout=config.SMTP_TIMEOUT) as smtp:
# status_code, _ = smtp.starttls()
# if status_code != 220:
# app.logger.error("Failed to create secure "
# "SMTP connection!")
# return
#
# if config.SMTP_USERNAME and config.SMTP_PASSWORD:
# smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
# else:
# smtp.ehlo()
#
# try:
# smtp.sendmail(msg['From'], to, msg.as_string())
# except smtplib.SMTPRecipientsRefused:
# error = ("Failed to send mail:\n"
# "From: %s\nTo: %s\n"
# "Subject: %s\n\n%s")
# app.logger.error(error % (sender, str(to), subject, text))
# except smtplib.SMTPException as e:
# app.logger.error("SMTP error trying to send mails: %s" % e)
#
# Path: amivapi/cron.py
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
. Output only the next line. | schedule_task(item['end_time'], send_removed_mail, item) |
Here is a snippet: <|code_start|> u"male" if int(data['swissEduPersonGender']) == 1 else u"female"
# See file docstring for explanation of `deparmentNumber` field
# In some rare cases, the departmentNumber field is either empty
# or missing -> normalize to empty string
department_info = next(iter(
data.get('description') or data.get('departmentNumber') or []
), '')
department_map = current_app.config['LDAP_DEPARTMENT_MAP'].items()
department = (dept for phrase, dept in department_map
if phrase in department_info)
res['department'] = next(department, None) # None if no match
# Membership: One of our departments and VSETH member
is_member = ((res['department'] is not None) and
('VSETH Mitglied' in data['ou']))
res['membership'] = u"regular" if is_member else u"none"
# For members, send newsletter to True by default
if is_member:
res['send_newsletter'] = True
return res
def _create_or_update_user(ldap_data):
"""Try to find user in database. Update if it exists, create otherwise."""
query = {'nethz': ldap_data['nethz']}
db_data = current_app.data.driver.db['users'].find_one(query)
<|code_end|>
. Write the next line using the current file imports:
from eve.methods.patch import patch_internal
from eve.methods.post import post_internal
from flask import current_app
from nethz.ldap import AuthenticatedLdap
from amivapi.utils import admin_permissions
and context from other files:
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
, which may include functions, classes, or code. Output only the next line. | with admin_permissions(): |
Here is a snippet: <|code_start|> def create_user_lookup_filter(self, user_id):
"""Create a filter for item lookup.
Not a member: Can see only himself
A Member: Can see everyone
Note: Users will only see complete info for themselves.
But excluding other fields will be done in a hook later.
Args:
user_id (str): Id of the user. No public methods -> wont be None
Returns:
dict: The filter, will be combined with other filters in the hook.
Return None if no filters should be applied.
"""
# Find out if not member
collection = current_app.data.driver.db['users']
# set projection to only return membership
result = collection.find_one({'_id': ObjectId(user_id)},
{'membership': 1})
if result['membership'] == "none":
# Can't see others
return {'_id': user_id}
else:
# Can see everyone (fields will be filtered later)
return {}
<|code_end|>
. Write the next line using the current file imports:
from bson import ObjectId
from flask import current_app, g
from amivapi.auth import AmivTokenAuth
from amivapi.utils import on_post_hook
and context from other files:
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/utils.py
# def on_post_hook(func):
# """Wrapper for an Eve `on_post_METHOD_resource` hook.
#
# The hook receives only a flask response object, which is difficult to
# manipulate.
# This wrapper extracts the data as dict and set the data again after the
# wrapped function has manipulated it.
# The function is only called for successful requests, otherwise there
# is no payload.
#
# If we are in passthrough mode, e.g. for sending files, modifying the
# payload is not possible and the function is not called.
#
# The wrapped function can look like this:
#
# my_hook(payload):
# ...
#
# or, for hooks that don't specify the resource:
#
# my_hook(payload):
# ...
# """
# @wraps(func)
# def wrapped(*args):
# """This is the hook eve will see."""
# response = args[-1]
# if (response.status_code in range(200, 300) and
# not response.direct_passthrough):
# payload = json.loads(response.get_data(as_text=True))
# func(*args, payload)
# response.set_data(json.dumps(payload))
# return wrapped
, which may include functions, classes, or code. Output only the next line. | @on_post_hook |
Next line prediction: <|code_start|> 'nullable': False,
'unique_combination': ['event'],
'required': True,
'excludes': 'email',
},
'additional_fields': {
"description": "Additional signup information, must match the "
"schema defined in `additional_fields` of the "
"event.",
"example": None,
'nullable': True,
'default': None,
'type': 'string',
'json_event_field': True,
},
'email': {
"description": "If a user is signed up, this field is "
"read-only and shows the user's email. If the "
"event is *public*, this field can be used "
"*instead* of the `user` field to sign up "
"an unregistered person via email. "
"Cannot be modified with PATCH.",
"example": None,
'email_signup_must_be_allowed': True,
'no_user_mail': True,
'maxlength': 100,
'not_patchable': True,
'nullable': False,
<|code_end|>
. Use current file imports:
(from amivapi.settings import EMAIL_REGEX
from .authorization import EventAuth, EventSignupAuth)
and context including class names, function names, or small code snippets from other files:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# Path: amivapi/events/authorization.py
# class EventAuth(AmivTokenAuth):
# """Auth for events."""
#
# def has_item_write_permission(self, user_id, item):
# """The group moderator is allowed to change things."""
# # Return true if a moderator exists and it is equal to the current user
# return item.get('moderator') and (
# user_id == str(get_id(item['moderator'])))
#
# class EventSignupAuth(AmivTokenAuth):
# def create_user_lookup_filter(self, user_id):
# """Users can see own signups and signups for moderated events.
# """
# # Find events the user moderates
# event_collection = current_app.data.driver.db['events']
# events = event_collection.find({'moderator': ObjectId(user_id)},
# {'_id': 1})
# moderated_events = [event['_id'] for event in events]
#
# return {'$or': [
# {'user': user_id},
# {'event': {'$in': moderated_events}}
# ]}
#
# def has_item_write_permission(self, user_id, item):
# """Users can modify their signups within the registration window.
# Moderators can not modify signups from other users.
# """
# if isinstance(item['event'], dict):
# event = item['event']
# else:
# # Event is not embedded, get the event first
# lookup = {current_app.config['ID_FIELD']: item['event']}
# event = current_app.data.find_one('events', None, **lookup)
#
# # Remove tzinfo to compare to utcnow (API only accepts UTC anyways)
# time_register_start = event['time_register_start'].replace(tzinfo=None)
# time_register_end = event['time_register_end'].replace(tzinfo=None)
#
# # Only the user itself can modify the item (not moderators), and only
# # within the signup window
# return (('user' in item) and
# (user_id == str(get_id(item['user']))) and
# (time_register_start <= dt.utcnow() <= time_register_end))
#
# def has_resource_write_permission(self, user_id):
# """Anyone can sign up. Further requirements are enforced with validators
# to allow precise error messages.
#
# Users may only sign themselves up and anyone may POST with an email
# address.
# """
# return True
. Output only the next line. | 'regex': EMAIL_REGEX, |
Given the following code snippet before the placeholder: <|code_start|> },
"required": ["SBB_Abo"]
}
```
> Currently, we only support JSON Schema
> [Draft 4](https://json-schema.org/specification-links.html#draft-4) and
> additionally require `additionalProperties` to be `false`.
An event can now provide the following object (again, as a string) with the
signup:
```
{
"SBB_Abo": "GA"
}
```
""")
description_signups = ("""
A signup to an [Event](#tag/Event).
""")
eventdomain = {
'events': {
'description': description_events,
'resource_methods': ['GET', 'POST'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
<|code_end|>
, predict the next line using imports from the current file:
from amivapi.settings import EMAIL_REGEX
from .authorization import EventAuth, EventSignupAuth
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# Path: amivapi/events/authorization.py
# class EventAuth(AmivTokenAuth):
# """Auth for events."""
#
# def has_item_write_permission(self, user_id, item):
# """The group moderator is allowed to change things."""
# # Return true if a moderator exists and it is equal to the current user
# return item.get('moderator') and (
# user_id == str(get_id(item['moderator'])))
#
# class EventSignupAuth(AmivTokenAuth):
# def create_user_lookup_filter(self, user_id):
# """Users can see own signups and signups for moderated events.
# """
# # Find events the user moderates
# event_collection = current_app.data.driver.db['events']
# events = event_collection.find({'moderator': ObjectId(user_id)},
# {'_id': 1})
# moderated_events = [event['_id'] for event in events]
#
# return {'$or': [
# {'user': user_id},
# {'event': {'$in': moderated_events}}
# ]}
#
# def has_item_write_permission(self, user_id, item):
# """Users can modify their signups within the registration window.
# Moderators can not modify signups from other users.
# """
# if isinstance(item['event'], dict):
# event = item['event']
# else:
# # Event is not embedded, get the event first
# lookup = {current_app.config['ID_FIELD']: item['event']}
# event = current_app.data.find_one('events', None, **lookup)
#
# # Remove tzinfo to compare to utcnow (API only accepts UTC anyways)
# time_register_start = event['time_register_start'].replace(tzinfo=None)
# time_register_end = event['time_register_end'].replace(tzinfo=None)
#
# # Only the user itself can modify the item (not moderators), and only
# # within the signup window
# return (('user' in item) and
# (user_id == str(get_id(item['user']))) and
# (time_register_start <= dt.utcnow() <= time_register_end))
#
# def has_resource_write_permission(self, user_id):
# """Anyone can sign up. Further requirements are enforced with validators
# to allow precise error messages.
#
# Users may only sign themselves up and anyone may POST with an email
# address.
# """
# return True
. Output only the next line. | 'authentication': EventAuth, |
Using the snippet: <|code_start|> 'or unconfirmed email signups.',
'readonly': True,
'type': 'integer'
},
'moderator': {
'description': '`_id` of a user which will be the event '
'moderator, who can modify the event.',
'example': 'ed1ac3fa99034762f7b55e5a',
'type': 'objectid',
'data_relation': {
'resource': 'users',
'embeddable': True,
},
'nullable': True,
'default': None,
},
},
},
'eventsignups': {
'resource_title': 'Event Signups',
'item_title': 'Event Signup',
'description': description_signups,
'resource_methods': ['GET', 'POST'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
<|code_end|>
, determine the next line of code. You have imports:
from amivapi.settings import EMAIL_REGEX
from .authorization import EventAuth, EventSignupAuth
and context (class names, function names, or code) available:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# Path: amivapi/events/authorization.py
# class EventAuth(AmivTokenAuth):
# """Auth for events."""
#
# def has_item_write_permission(self, user_id, item):
# """The group moderator is allowed to change things."""
# # Return true if a moderator exists and it is equal to the current user
# return item.get('moderator') and (
# user_id == str(get_id(item['moderator'])))
#
# class EventSignupAuth(AmivTokenAuth):
# def create_user_lookup_filter(self, user_id):
# """Users can see own signups and signups for moderated events.
# """
# # Find events the user moderates
# event_collection = current_app.data.driver.db['events']
# events = event_collection.find({'moderator': ObjectId(user_id)},
# {'_id': 1})
# moderated_events = [event['_id'] for event in events]
#
# return {'$or': [
# {'user': user_id},
# {'event': {'$in': moderated_events}}
# ]}
#
# def has_item_write_permission(self, user_id, item):
# """Users can modify their signups within the registration window.
# Moderators can not modify signups from other users.
# """
# if isinstance(item['event'], dict):
# event = item['event']
# else:
# # Event is not embedded, get the event first
# lookup = {current_app.config['ID_FIELD']: item['event']}
# event = current_app.data.find_one('events', None, **lookup)
#
# # Remove tzinfo to compare to utcnow (API only accepts UTC anyways)
# time_register_start = event['time_register_start'].replace(tzinfo=None)
# time_register_end = event['time_register_end'].replace(tzinfo=None)
#
# # Only the user itself can modify the item (not moderators), and only
# # within the signup window
# return (('user' in item) and
# (user_id == str(get_id(item['user']))) and
# (time_register_start <= dt.utcnow() <= time_register_end))
#
# def has_resource_write_permission(self, user_id):
# """Anyone can sign up. Further requirements are enforced with validators
# to allow precise error messages.
#
# Users may only sign themselves up and anyone may POST with an email
# address.
# """
# return True
. Output only the next line. | 'authentication': EventSignupAuth, |
Based on the snippet: <|code_start|> """Read permission allows get, but no writing."""
key = self.new_object("apikeys", permissions={'apikeys': 'read'})
token = key['token']
item_url = '/apikeys/%s' % key['_id']
self.api.get('/apikeys', token=token, status_code=200)
self.api.get(item_url, token=token, status_code=200)
self.api.post('/apikeys', data={}, token=token, status_code=403)
def test_readwrite_permission(self):
"""Readwrite permission allows everything."""
key = self.new_object("apikeys", permissions={'apikeys': 'readwrite'})
token = key['token']
item_url = '/apikeys/%s' % key['_id']
etag = {'If-Match': key['_etag']}
self.api.get('/apikeys', token=token, status_code=200)
self.api.get(item_url, token=token, status_code=200)
self.api.delete(item_url, headers=etag, token=token, status_code=204)
def test_different_resource(self):
"""Test that apikeys for different resources have no effect."""
key = self.new_object("apikeys", permissions={'users': 'readwrite'})
token = key['token']
self.api.get('/apikeys', token=token, status_code=403)
<|code_end|>
, predict the immediate next line with the help of imports:
from amivapi.tests.utils import WebTest, WebTestNoAuth
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
#
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | class ApiKeyModelTests(WebTestNoAuth): |
Given the following code snippet before the placeholder: <|code_start|> event = self.new_object('events', spots=1,
selection_strategy='fcfs')
user1 = self.new_object('users')
user2 = self.new_object('users')
user1_signup = self.api.post('/eventsignups', data={
'user': str(user1['_id']),
'event': str(event['_id'])
}, token=self.get_user_token(user1['_id']), status_code=201).json
self.assertTrue(user1_signup['accepted'])
# Check that a normal user cannot provide the accepted field
self.api.post('/eventsignups', data={
'user': str(user2['_id']),
'event': str(event['_id']),
'accepted': True
}, token=self.get_user_token(user2['_id']), status_code=422)
# Check that admins can always provide the accepted field
user2_signup = self.api.post('/eventsignups', data={
'user': str(user2['_id']),
'event': str(event['_id']),
'accepted': True
}, token=self.get_root_token(), status_code=201).json
self.assertTrue(user2_signup['accepted'])
<|code_end|>
, predict the next line using imports from the current file:
from amivapi.tests.utils import WebTestNoAuth, WebTest
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
. Output only the next line. | class EventsignupQueueTest(WebTestNoAuth): |
Predict the next line for this snippet: <|code_start|>"""Delete cascading system.
This adds an option 'cascade_delete' to data_relations in the schema. If it is
set to true, then deleting the referenced object will also delete the
referencing object. If false, the reference will be set to NULL, when the
referenced object is deleted.
"""
def cascade_delete(resource, item):
"""Cascade DELETE.
Hook to delete all objects, which have the 'cascade_delete' option set
in the data_relation and relate to the object, which was just deleted.
"""
domain = current_app.config['DOMAIN']
deleted_id = item[domain[resource]['id_field']]
for res, res_domain in domain.items():
# Filter schema of `res` to get all fields containing references
# to the resource of the deleted item
relations = ((field, field_def['data_relation'])
for field, field_def in res_domain['schema'].items()
if 'data_relation' in field_def and
field_def['data_relation'].get('resource') == resource)
for field, data_relation in relations:
# All items in `res` with reference to the deleted item
lookup = {field: deleted_id}
<|code_end|>
with the help of current file imports:
from eve.methods.delete import deleteitem_internal
from eve.methods.patch import patch_internal
from flask import current_app
from werkzeug.exceptions import NotFound
from amivapi.utils import admin_permissions
and context from other files:
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
, which may contain function names, class names, or code. Output only the next line. | with admin_permissions(): |
Based on the snippet: <|code_start|> 'not_patchable_unless_admin': True,
'nullable': True,
'default': None,
'example': 'itet'
},
'gender': {
'type': 'string',
'allowed': ['male', 'female'],
'not_patchable_unless_admin': True,
'required': True,
'example': 'male',
},
# Fields the user can modify himself
'password': {
'type': 'string',
'minlength': 7,
'maxlength': 100,
'empty': False,
'nullable': True,
'default': None,
'description': 'Leave empty to use just LDAP authentification. '
'People without LDAP should use this field.',
'session_younger_than': timedelta(minutes=1),
'example': "Hunter2",
'writeonly': True, # 'writeonly' only affects the docs
},
'email': {
'type': 'string',
'maxlength': 100,
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import timedelta
from textwrap import dedent
from amivapi.settings import EMAIL_REGEX
from .security import UserAuth
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# Path: amivapi/users/security.py
# class UserAuth(AmivTokenAuth):
# """Provides auth for /users resource.
#
# This is an example of how to implement the AmivTokenAuth.
#
# Main Goals:
#
# - Registered users can see nethz/name of everyone, full data of themselves
# - Registered users can change their own data (nobody else)
#
#
# We dont have to care about:
#
# - Admins, since for them no filters etc are applied
# - Unregistered users, since no methods are public.
#
# Since only admins can POST, we do not need to implement a custom
# `has_resource_write_permission` - the default is fine.
# """
#
# def has_item_write_permission(self, user_id, item):
# """Check if *user* is allowed to write *item*.
#
# This includes PATCH and DELETE.
#
# User can only write his own data.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return str(item['_id']) == user_id
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup.
#
# Not a member: Can see only himself
# A Member: Can see everyone
#
# Note: Users will only see complete info for themselves.
# But excluding other fields will be done in a hook later.
#
# Args:
# user_id (str): Id of the user. No public methods -> wont be None
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return None if no filters should be applied.
# """
# # Find out if not member
# collection = current_app.data.driver.db['users']
# # set projection to only return membership
# result = collection.find_one({'_id': ObjectId(user_id)},
# {'membership': 1})
#
# if result['membership'] == "none":
# # Can't see others
# return {'_id': user_id}
# else:
# # Can see everyone (fields will be filtered later)
# return {}
. Output only the next line. | 'regex': EMAIL_REGEX, |
Given the following code snippet before the placeholder: <|code_start|><br />
## Security
In addition to the usual
[permissions](#section/Authentication-and-Authorization/Authorization),
some further constraints are in place:
- Passwords are salted and hashed, and they are *never* returned by the API,
not even to admins. Furthermore, showing passwords can not be forced
by projections.
- **Users** can only view all of their own fields.
For other users, only `firstname`, `lastname` and `nethz` are visible.
- **Admins** can view the complete fields for all users.
- All fields synchronized with ETHZ (see above) *cannot be modified* by users.
""")
userdomain = {
'users': {
'description': description,
'additional_lookup': {'field': 'nethz',
'url': 'string'},
'resource_methods': ['GET', 'POST'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
<|code_end|>
, predict the next line using imports from the current file:
from datetime import timedelta
from textwrap import dedent
from amivapi.settings import EMAIL_REGEX
from .security import UserAuth
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# Path: amivapi/users/security.py
# class UserAuth(AmivTokenAuth):
# """Provides auth for /users resource.
#
# This is an example of how to implement the AmivTokenAuth.
#
# Main Goals:
#
# - Registered users can see nethz/name of everyone, full data of themselves
# - Registered users can change their own data (nobody else)
#
#
# We dont have to care about:
#
# - Admins, since for them no filters etc are applied
# - Unregistered users, since no methods are public.
#
# Since only admins can POST, we do not need to implement a custom
# `has_resource_write_permission` - the default is fine.
# """
#
# def has_item_write_permission(self, user_id, item):
# """Check if *user* is allowed to write *item*.
#
# This includes PATCH and DELETE.
#
# User can only write his own data.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return str(item['_id']) == user_id
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup.
#
# Not a member: Can see only himself
# A Member: Can see everyone
#
# Note: Users will only see complete info for themselves.
# But excluding other fields will be done in a hook later.
#
# Args:
# user_id (str): Id of the user. No public methods -> wont be None
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return None if no filters should be applied.
# """
# # Find out if not member
# collection = current_app.data.driver.db['users']
# # set projection to only return membership
# result = collection.find_one({'_id': ObjectId(user_id)},
# {'membership': 1})
#
# if result['membership'] == "none":
# # Can't see others
# return {'_id': user_id}
# else:
# # Can see everyone (fields will be filtered later)
# return {}
. Output only the next line. | 'authentication': UserAuth, |
Predict the next line after this snippet: <|code_start|>(this would grant the API key rights to see all users and see
and modify/delete all sessions).
> **IMPORTANT: The most powerful permission**
>
> If you grant an API key `readwrite` permissions to API keys, this key will
> be able to create new API keys with any permissions and also modify it's
> own permissions!
>
> As a result, **`readwrite` permissions for api keys should only be assigned
> with great care**!
Just like [sessions](#tag/Session), API keys return a token which can be
sent in the header
[as described above](#section/Authentication-and-Authorization).
""")
apikeydomain = {
'apikeys': {
'resource_title': 'API Keys',
'item_title': 'API Key',
'description': description,
'public_methods': [],
'public_item_methods': [],
'resource_methods': ['GET', 'POST'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
<|code_end|>
using the current file's imports:
from datetime import datetime as dt
from flask import abort, current_app, g
from amivapi.auth.auth import AdminOnlyAuth
from amivapi.utils import register_domain
from secrets import token_urlsafe
from amivapi.utils import token_urlsafe
and any relevant context from other files:
# Path: amivapi/auth/auth.py
# class AdminOnlyAuth(AmivTokenAuth):
# """Auth class to use if no access at all is given for non admins of a
# resource."""
# def create_user_lookup_filter(self, user):
# """If this hook gets called, the user is not an admin for this resource.
# Therefore no results should be given. To give a more precise error
# message, we abort. Otherwise normal users would just see an empty list.
# """
# return None
#
# Path: amivapi/utils.py
# def register_domain(app, domain):
# """Add all resources in a domain to the app.
#
# The domain has to be deep-copied first because eve will modify it
# (since it heavily relies on setdefault()), which can cause problems
# especially in test environments, since the defaults don't get properly
# erase sometimes.
#
# TODO: Make tests better maybe so this is no problem anymore?
#
# Args:
# app (Eve object): The app to extend
# domain (dict): The domain to be added to the app, will not be changed
# """
# domain_copy = deepcopy(domain)
#
# for resource, settings in domain_copy.items():
# # Add default for the resource title:
# # Capitalize like Eve does with item titles
# settings.setdefault('resource_title', resource.capitalize())
#
# app.register_resource(resource, settings)
# _better_schema_defaults(app, resource, settings)
. Output only the next line. | 'authentication': AdminOnlyAuth, |
Using the snippet: <|code_start|> 'example': 'Xfh3abXzLoezpwO9WT7oRw',
},
'permissions': {
'description': 'The permissions the API key grants. The value '
'is an object with resources as keys and the '
'permissions as a values.',
'example': {
'users': 'read',
'beverages': 'readwrite',
},
'type': 'dict',
'keysrules': {'type': 'string',
'api_resources': True},
'valuesrules': {'type': 'string',
'allowed': ['read', 'readwrite']},
'required': True,
}
},
}
}
def generate_tokens(items):
for item in items:
item['token'] = token_urlsafe()
def init_apikeys(app):
"""Register API Key resource and add auth hook."""
<|code_end|>
, determine the next line of code. You have imports:
from datetime import datetime as dt
from flask import abort, current_app, g
from amivapi.auth.auth import AdminOnlyAuth
from amivapi.utils import register_domain
from secrets import token_urlsafe
from amivapi.utils import token_urlsafe
and context (class names, function names, or code) available:
# Path: amivapi/auth/auth.py
# class AdminOnlyAuth(AmivTokenAuth):
# """Auth class to use if no access at all is given for non admins of a
# resource."""
# def create_user_lookup_filter(self, user):
# """If this hook gets called, the user is not an admin for this resource.
# Therefore no results should be given. To give a more precise error
# message, we abort. Otherwise normal users would just see an empty list.
# """
# return None
#
# Path: amivapi/utils.py
# def register_domain(app, domain):
# """Add all resources in a domain to the app.
#
# The domain has to be deep-copied first because eve will modify it
# (since it heavily relies on setdefault()), which can cause problems
# especially in test environments, since the defaults don't get properly
# erase sometimes.
#
# TODO: Make tests better maybe so this is no problem anymore?
#
# Args:
# app (Eve object): The app to extend
# domain (dict): The domain to be added to the app, will not be changed
# """
# domain_copy = deepcopy(domain)
#
# for resource, settings in domain_copy.items():
# # Add default for the resource title:
# # Capitalize like Eve does with item titles
# settings.setdefault('resource_title', resource.capitalize())
#
# app.register_resource(resource, settings)
# _better_schema_defaults(app, resource, settings)
. Output only the next line. | register_domain(app, apikeydomain) |
Based on the snippet: <|code_start|>Contains model for blacklist.
"""
blacklist_description = ("""
People normally get blacklisted if they don't appear to an event they signed up
for, but other cases could be possible (Bad behaviour, etc). Once on the
blacklist, they shouldn't be able to sign up for any event until they pay
for that event or do something else the board decides (e.g. help at an event).
An entry on the blacklist always has a reason and a start_date (normally the
date of the event where the user didn't appear), most often also a price the
user has to pay to be removed from the blacklist.
One person can have multiple blacklist entries and old entries are not deleted,
but marked with an end date.
Only users with admin-rights for the blacklist can see all entries and create
new ones/edit them. A single user only has the right to see his own blacklist
entries, but he cannot edit them.
""")
blacklist = {
'blacklist': {
'description': blacklist_description,
'resource_methods': ['POST', 'GET'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
<|code_end|>
, predict the immediate next line with the help of imports:
from amivapi.blacklist.authorization import BlacklistAuth
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/blacklist/authorization.py
# class BlacklistAuth(AmivTokenAuth):
# def create_user_lookup_filter(self, user_id):
# """Users can see their own signups."""
# return {'user': user_id}
#
# def has_item_write_permission(self, user_id, item):
# """Only admins have right permission, but we don't
# have to care about them"""
# return False
. Output only the next line. | 'authentication': BlacklistAuth, |
Next line prediction: <|code_start|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
""" Test that sessions get cleaned up after enough time passed. """
class TestSessionExpiry(WebTest):
def test_session_expiry(self):
with self.app.app_context(), freeze_time() as frozen_time:
self.new_object("users", nethz="pablo", password="password")
self.api.post('/sessions',
data={"username": "pablo", "password": "password"},
status_code=201)
frozen_time.tick(delta=self.app.config['SESSION_TIMEOUT'] -
timedelta(days=1))
<|code_end|>
. Use current file imports:
(from datetime import timedelta
from freezegun import freeze_time
from amivapi.cron import run_scheduled_tasks
from amivapi.tests.utils import WebTest)
and context including class names, function names, or small code snippets from other files:
# Path: amivapi/cron.py
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
. Output only the next line. | run_scheduled_tasks() |
Given the code snippet: <|code_start|> response = make_response(render_template("loginpage.html",
client_id=client_id,
user=user,
error_msg=error_msg))
response.set_cookie('token', token)
return response
description = ("""
Only registered OAuth clients are allowed to use the central login.
This whitelisting is necessary to prevent phishing attacks.
[More on OAuth with the API][1].
Only **admins** are allowed to access this resource.
[1]: #section/OAuth
""")
oauthclients_domain = {
'oauthclients': {
'resource_title': 'OAuth Clients',
'item_title': 'OAuth Client',
'description': description,
'public_methods': [],
'public_item_methods': [],
'resource_methods': ['GET', 'POST'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
<|code_end|>
, generate the next line using the imports in this file:
from urllib.parse import urlencode
from bson import ObjectId
from cerberus import Validator
from eve.methods.post import post_internal
from flask import (
make_response,
abort,
Blueprint,
current_app,
g,
redirect,
render_template,
request,
)
from werkzeug.exceptions import Unauthorized
from amivapi.auth.auth import AdminOnlyAuth, authenticate_token
from amivapi.settings import REDIRECT_URI_REGEX
from amivapi.utils import register_domain
and context (functions, classes, or occasionally code) from other files:
# Path: amivapi/auth/auth.py
# class AdminOnlyAuth(AmivTokenAuth):
# """Auth class to use if no access at all is given for non admins of a
# resource."""
# def create_user_lookup_filter(self, user):
# """If this hook gets called, the user is not an admin for this resource.
# Therefore no results should be given. To give a more precise error
# message, we abort. Otherwise normal users would just see an empty list.
# """
# return None
#
# def authenticate_token(token):
# """Authenticate user and set g.current_token, g.current_session and
# g.current_user.
#
# See also the authenticate function.
# """
# # Set defaults
# g.current_token = g.current_session = g.current_user = None
#
# if token:
# g.current_token = token
#
# # Get session
# sessions = current_app.data.driver.db['sessions']
# session = sessions.find_one({'token': token})
#
# if session:
# # Update timestamp (remove microseconds to match mongo precision)
# new_time = dt.utcnow().replace(microsecond=0)
# sessions.update_one({'_id': session['_id']},
# {'$set': {
# '_updated': new_time
# }})
# session['_updated'] = new_time
#
# # Save user_id and session with updated timestamp in g
# g.current_session = session
# g.current_user = str(session['user']) # ObjectId to str
#
# Path: amivapi/settings.py
# REDIRECT_URI_REGEX = '^((http://)?localhost[^#]*|https://[^#]+)$'
#
# Path: amivapi/utils.py
# def register_domain(app, domain):
# """Add all resources in a domain to the app.
#
# The domain has to be deep-copied first because eve will modify it
# (since it heavily relies on setdefault()), which can cause problems
# especially in test environments, since the defaults don't get properly
# erase sometimes.
#
# TODO: Make tests better maybe so this is no problem anymore?
#
# Args:
# app (Eve object): The app to extend
# domain (dict): The domain to be added to the app, will not be changed
# """
# domain_copy = deepcopy(domain)
#
# for resource, settings in domain_copy.items():
# # Add default for the resource title:
# # Capitalize like Eve does with item titles
# settings.setdefault('resource_title', resource.capitalize())
#
# app.register_resource(resource, settings)
# _better_schema_defaults(app, resource, settings)
. Output only the next line. | 'authentication': AdminOnlyAuth, |
Given snippet: <|code_start|> client = db.find_one({'client_id': client_id})
if not client:
abort(422, 'Unknown client_id. Please contact the author of the page '
'that sent you here.')
if not redirect_uri:
redirect_uri = client['redirect_uri']
if not redirect_uri.startswith(client['redirect_uri']):
abort(422, "Redirect URI is not whitelisted for client_id!")
return redirect_uri
def oauth_redirect(redirect_uri, state):
"""Process login and redirect user.
Loads and validates all inputs from request. First check if the request
contains a cookie with token to use, otherwise check for login data in
form.
Returns:
flask.Response: Flask redirect response
Raises:
werkzeug.exceptions.Unauthorized: If the user cannot be authenticated
"""
# First check for token in cookie
token = request.cookies.get('token')
if token:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from urllib.parse import urlencode
from bson import ObjectId
from cerberus import Validator
from eve.methods.post import post_internal
from flask import (
make_response,
abort,
Blueprint,
current_app,
g,
redirect,
render_template,
request,
)
from werkzeug.exceptions import Unauthorized
from amivapi.auth.auth import AdminOnlyAuth, authenticate_token
from amivapi.settings import REDIRECT_URI_REGEX
from amivapi.utils import register_domain
and context:
# Path: amivapi/auth/auth.py
# class AdminOnlyAuth(AmivTokenAuth):
# """Auth class to use if no access at all is given for non admins of a
# resource."""
# def create_user_lookup_filter(self, user):
# """If this hook gets called, the user is not an admin for this resource.
# Therefore no results should be given. To give a more precise error
# message, we abort. Otherwise normal users would just see an empty list.
# """
# return None
#
# def authenticate_token(token):
# """Authenticate user and set g.current_token, g.current_session and
# g.current_user.
#
# See also the authenticate function.
# """
# # Set defaults
# g.current_token = g.current_session = g.current_user = None
#
# if token:
# g.current_token = token
#
# # Get session
# sessions = current_app.data.driver.db['sessions']
# session = sessions.find_one({'token': token})
#
# if session:
# # Update timestamp (remove microseconds to match mongo precision)
# new_time = dt.utcnow().replace(microsecond=0)
# sessions.update_one({'_id': session['_id']},
# {'$set': {
# '_updated': new_time
# }})
# session['_updated'] = new_time
#
# # Save user_id and session with updated timestamp in g
# g.current_session = session
# g.current_user = str(session['user']) # ObjectId to str
#
# Path: amivapi/settings.py
# REDIRECT_URI_REGEX = '^((http://)?localhost[^#]*|https://[^#]+)$'
#
# Path: amivapi/utils.py
# def register_domain(app, domain):
# """Add all resources in a domain to the app.
#
# The domain has to be deep-copied first because eve will modify it
# (since it heavily relies on setdefault()), which can cause problems
# especially in test environments, since the defaults don't get properly
# erase sometimes.
#
# TODO: Make tests better maybe so this is no problem anymore?
#
# Args:
# app (Eve object): The app to extend
# domain (dict): The domain to be added to the app, will not be changed
# """
# domain_copy = deepcopy(domain)
#
# for resource, settings in domain_copy.items():
# # Add default for the resource title:
# # Capitalize like Eve does with item titles
# settings.setdefault('resource_title', resource.capitalize())
#
# app.register_resource(resource, settings)
# _better_schema_defaults(app, resource, settings)
which might include code, classes, or functions. Output only the next line. | authenticate_token(token) |
Continue the code snippet: <|code_start|>def _append_url_params(url, **params):
"""Helper to add addtional parameters to an url query string."""
if '?' not in url:
url += '?'
return '%s&%s' % (url, urlencode(params))
def validate_oauth_authorization_request(response_type, client_id,
redirect_uri, scope, state):
"""Validate an OAuth authentication request for an implicit grant.
See https://tools.ietf.org/html/rfc6749#section-4.2.1
Returns:
str: The actual URL the client should be redirected to.
"""
oauth_schema = {
'response_type': {
'required': True,
'type': 'string',
'allowed': ['token']
},
'client_id': {
'required': True,
'type': 'string',
'empty': False
},
'redirect_uri': {
'type': 'string',
'nullable': True,
<|code_end|>
. Use current file imports:
from urllib.parse import urlencode
from bson import ObjectId
from cerberus import Validator
from eve.methods.post import post_internal
from flask import (
make_response,
abort,
Blueprint,
current_app,
g,
redirect,
render_template,
request,
)
from werkzeug.exceptions import Unauthorized
from amivapi.auth.auth import AdminOnlyAuth, authenticate_token
from amivapi.settings import REDIRECT_URI_REGEX
from amivapi.utils import register_domain
and context (classes, functions, or code) from other files:
# Path: amivapi/auth/auth.py
# class AdminOnlyAuth(AmivTokenAuth):
# """Auth class to use if no access at all is given for non admins of a
# resource."""
# def create_user_lookup_filter(self, user):
# """If this hook gets called, the user is not an admin for this resource.
# Therefore no results should be given. To give a more precise error
# message, we abort. Otherwise normal users would just see an empty list.
# """
# return None
#
# def authenticate_token(token):
# """Authenticate user and set g.current_token, g.current_session and
# g.current_user.
#
# See also the authenticate function.
# """
# # Set defaults
# g.current_token = g.current_session = g.current_user = None
#
# if token:
# g.current_token = token
#
# # Get session
# sessions = current_app.data.driver.db['sessions']
# session = sessions.find_one({'token': token})
#
# if session:
# # Update timestamp (remove microseconds to match mongo precision)
# new_time = dt.utcnow().replace(microsecond=0)
# sessions.update_one({'_id': session['_id']},
# {'$set': {
# '_updated': new_time
# }})
# session['_updated'] = new_time
#
# # Save user_id and session with updated timestamp in g
# g.current_session = session
# g.current_user = str(session['user']) # ObjectId to str
#
# Path: amivapi/settings.py
# REDIRECT_URI_REGEX = '^((http://)?localhost[^#]*|https://[^#]+)$'
#
# Path: amivapi/utils.py
# def register_domain(app, domain):
# """Add all resources in a domain to the app.
#
# The domain has to be deep-copied first because eve will modify it
# (since it heavily relies on setdefault()), which can cause problems
# especially in test environments, since the defaults don't get properly
# erase sometimes.
#
# TODO: Make tests better maybe so this is no problem anymore?
#
# Args:
# app (Eve object): The app to extend
# domain (dict): The domain to be added to the app, will not be changed
# """
# domain_copy = deepcopy(domain)
#
# for resource, settings in domain_copy.items():
# # Add default for the resource title:
# # Capitalize like Eve does with item titles
# settings.setdefault('resource_title', resource.capitalize())
#
# app.register_resource(resource, settings)
# _better_schema_defaults(app, resource, settings)
. Output only the next line. | 'regex': REDIRECT_URI_REGEX |
Continue the code snippet: <|code_start|>
'type': 'string',
'required': True,
'nullable': False,
'empty': False,
'unique': True,
'no_html': True,
'maxlength': 100,
},
'redirect_uri': {
'description': "URL the API uses for redirects. This should "
"be the OAuth endpoint of your application. "
"Avoid further redirection from this URL to"
"prevent phishing attacks. Must use HTTPS "
"(for development, HTTP is allowed for "
"`localhost` URLs).",
'example': 'https://admin.organisation.ch',
'type': 'string',
'required': True,
'unique': True,
'regex': REDIRECT_URI_REGEX,
}
}
}
}
def init_oauth(app):
"""Register oauthclient resource."""
<|code_end|>
. Use current file imports:
from urllib.parse import urlencode
from bson import ObjectId
from cerberus import Validator
from eve.methods.post import post_internal
from flask import (
make_response,
abort,
Blueprint,
current_app,
g,
redirect,
render_template,
request,
)
from werkzeug.exceptions import Unauthorized
from amivapi.auth.auth import AdminOnlyAuth, authenticate_token
from amivapi.settings import REDIRECT_URI_REGEX
from amivapi.utils import register_domain
and context (classes, functions, or code) from other files:
# Path: amivapi/auth/auth.py
# class AdminOnlyAuth(AmivTokenAuth):
# """Auth class to use if no access at all is given for non admins of a
# resource."""
# def create_user_lookup_filter(self, user):
# """If this hook gets called, the user is not an admin for this resource.
# Therefore no results should be given. To give a more precise error
# message, we abort. Otherwise normal users would just see an empty list.
# """
# return None
#
# def authenticate_token(token):
# """Authenticate user and set g.current_token, g.current_session and
# g.current_user.
#
# See also the authenticate function.
# """
# # Set defaults
# g.current_token = g.current_session = g.current_user = None
#
# if token:
# g.current_token = token
#
# # Get session
# sessions = current_app.data.driver.db['sessions']
# session = sessions.find_one({'token': token})
#
# if session:
# # Update timestamp (remove microseconds to match mongo precision)
# new_time = dt.utcnow().replace(microsecond=0)
# sessions.update_one({'_id': session['_id']},
# {'$set': {
# '_updated': new_time
# }})
# session['_updated'] = new_time
#
# # Save user_id and session with updated timestamp in g
# g.current_session = session
# g.current_user = str(session['user']) # ObjectId to str
#
# Path: amivapi/settings.py
# REDIRECT_URI_REGEX = '^((http://)?localhost[^#]*|https://[^#]+)$'
#
# Path: amivapi/utils.py
# def register_domain(app, domain):
# """Add all resources in a domain to the app.
#
# The domain has to be deep-copied first because eve will modify it
# (since it heavily relies on setdefault()), which can cause problems
# especially in test environments, since the defaults don't get properly
# erase sometimes.
#
# TODO: Make tests better maybe so this is no problem anymore?
#
# Args:
# app (Eve object): The app to extend
# domain (dict): The domain to be added to the app, will not be changed
# """
# domain_copy = deepcopy(domain)
#
# for resource, settings in domain_copy.items():
# # Add default for the resource title:
# # Capitalize like Eve does with item titles
# settings.setdefault('resource_title', resource.capitalize())
#
# app.register_resource(resource, settings)
# _better_schema_defaults(app, resource, settings)
. Output only the next line. | register_domain(app, oauthclients_domain) |
Given the following code snippet before the placeholder: <|code_start|> },
'uploader': {
'description': 'The user who uploaded the files (read-only).',
'example': 'ea059fa90df4703316da25d8',
'type': 'objectid',
'data_relation': {
'resource': 'users',
'embeddable': True,
},
# Must be nullable: e.g. if root user uploads there is no user
'nullable': True,
'readonly': True,
},
'author': {
'description': 'Original author of the uploaded files '
'(professor, assistant, copyright owner)',
'example': "Pablo",
'type': 'string',
'maxlength': 100,
'empty': False,
'nullable': True,
'default': None,
'no_html': True,
'allow_summary': True,
},
'department': {
<|code_end|>
, predict the next line using imports from the current file:
from amivapi.settings import DEPARTMENT_LIST
from .authorization import StudydocsAuth
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/settings.py
# DEPARTMENT_LIST = [
# 'itet',
# 'mavt',
# 'arch',
# 'baug',
# 'bsse',
# 'infk',
# 'matl',
# 'biol',
# 'chab',
# 'math',
# 'phys',
# 'erdw',
# 'usys',
# 'hest',
# 'mtec',
# 'gess'
# ]
#
# Path: amivapi/studydocs/authorization.py
# class StudydocsAuth(AmivTokenAuth):
# def has_item_write_permission(self, user_id, item):
# return str(get_id(item['uploader'])) == user_id
#
# def has_resource_write_permission(self, user_id):
# # All users can create studydocs
# return True
. Output only the next line. | 'example': DEPARTMENT_LIST[0], |
Continue the code snippet: <|code_start|> Prof. Okay: 5
...
lecture:
...
...
```
The summary is only computed for documents matching the current `where` query,
e.g. when searching for ITET documents, only professors related to ITET
documents will show up in the summary.
""")
class StudyDocValidator(object):
"""Custom Validator to register `allow_summary` property."""
def _validate_allow_summary(self, *args, **kwargs):
"""{'type': 'boolean'}"""
studydocdomain = {
'studydocuments': {
'resource_title': "Study Documents",
'item_title': "Study Document",
'description': description,
'resource_methods': ['GET', 'POST'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
<|code_end|>
. Use current file imports:
from amivapi.settings import DEPARTMENT_LIST
from .authorization import StudydocsAuth
and context (classes, functions, or code) from other files:
# Path: amivapi/settings.py
# DEPARTMENT_LIST = [
# 'itet',
# 'mavt',
# 'arch',
# 'baug',
# 'bsse',
# 'infk',
# 'matl',
# 'biol',
# 'chab',
# 'math',
# 'phys',
# 'erdw',
# 'usys',
# 'hest',
# 'mtec',
# 'gess'
# ]
#
# Path: amivapi/studydocs/authorization.py
# class StudydocsAuth(AmivTokenAuth):
# def has_item_write_permission(self, user_id, item):
# return str(get_id(item['uploader'])) == user_id
#
# def has_resource_write_permission(self, user_id):
# # All users can create studydocs
# return True
. Output only the next line. | 'authentication': StudydocsAuth, |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Tests for purchases module"""
pdfpath = join(dirname(__file__), "../fixtures", 'test.pdf')
lenapath = join(dirname(__file__), "../fixtures", 'lena.png')
class JobOffersTest(utils.WebTestNoAuth):
"""Test basic functionality of joboffers"""
def test_add_joboffer_nomedia(self):
""" Usecase: A firm wants to post a joboffer on the website without any media
"""
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime, timedelta
from io import BytesIO
from os.path import dirname, join
from amivapi.settings import DATE_FORMAT
from amivapi.tests import utils
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/settings.py
# DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
#
# Path: amivapi/tests/utils.py
# class TestClient(FlaskClient):
# class TestResponse(Response):
# class WebTest(unittest.TestCase, FixtureMixin):
# class WebTestNoAuth(WebTest):
# def open(self, *args, **kwargs):
# def json(self):
# def setUp(self, **extra_config):
# def tearDown(self):
# def get_user_token(self, user_id, created=None):
# def get_root_token(self):
# def setUp(self, **extra_config):
# def authenticate_root(resource):
# def skip_if_false(condition, reason):
# def _skip(func):
. Output only the next line. | time_end = (datetime.utcnow() + timedelta(days=2)).strftime(DATE_FORMAT) |
Here is a snippet: <|code_start|> run_scheduled_tasks()
self.assertTrue(CronTest.has_run)
self.assertEqual(CronTest.received_arg, "arg")
def test_periodic_func(self):
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
# We need to define the function in here to make sure the first
# call is scheduled to the frozen time
@periodic(timedelta(minutes=5), "arg")
def periodic_function(arg):
CronTest.run_count += 1
CronTest.received_arg = arg
self.assertEqual(CronTest.run_count, 0)
run_scheduled_tasks()
# Check the function has run and got the correct argument
self.assertEqual(CronTest.run_count, 1)
self.assertEqual(CronTest.received_arg, "arg")
# Now check that it is called at correct intervals
for _ in range(0, 32):
frozen_time.tick(delta=timedelta(minutes=1))
run_scheduled_tasks()
self.assertEqual(CronTest.run_count, 7)
# Do some cleanup! Else this will get called in other tests..
<|code_end|>
. Write the next line using the current file imports:
from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth
and context from other files:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
, which may include functions, classes, or code. Output only the next line. | cron.periodic_functions.remove(periodic_function) |
Next line prediction: <|code_start|> "2016-01-01 00:00:00") as frozen_time:
# We need to define the function in here to make sure the first
# call is scheduled to the frozen time
@periodic(timedelta(minutes=5), "arg")
def periodic_function(arg):
CronTest.run_count += 1
CronTest.received_arg = arg
self.assertEqual(CronTest.run_count, 0)
run_scheduled_tasks()
# Check the function has run and got the correct argument
self.assertEqual(CronTest.run_count, 1)
self.assertEqual(CronTest.received_arg, "arg")
# Now check that it is called at correct intervals
for _ in range(0, 32):
frozen_time.tick(delta=timedelta(minutes=1))
run_scheduled_tasks()
self.assertEqual(CronTest.run_count, 7)
# Do some cleanup! Else this will get called in other tests..
cron.periodic_functions.remove(periodic_function)
def test_scheduling_unknown_function_fails(self):
with self.app.app_context():
def test_func():
pass
<|code_end|>
. Use current file imports:
(from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth)
and context including class names, function names, or small code snippets from other files:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | with self.assertRaises(NotSchedulable): |
Given snippet: <|code_start|>
super().setUp()
def test_scheduled_function_gets_called(self):
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
@schedulable
def scheduled_function(arg):
CronTest.has_run = True
CronTest.received_arg = arg
schedule_task(datetime(2016, 1, 1, 1, 0, 0),
scheduled_function,
"arg")
run_scheduled_tasks()
self.assertFalse(CronTest.has_run)
frozen_time.tick(delta=timedelta(hours=1))
run_scheduled_tasks()
self.assertTrue(CronTest.has_run)
self.assertEqual(CronTest.received_arg, "arg")
def test_periodic_func(self):
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
# We need to define the function in here to make sure the first
# call is scheduled to the frozen time
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth
and context:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
which might include code, classes, or functions. Output only the next line. | @periodic(timedelta(minutes=5), "arg") |
Continue the code snippet: <|code_start|>""" Test scheduler """
class CronTest(WebTestNoAuth):
has_run = False
received_arg = None
run_count = 0
def setUp(self):
CronTest.has_run = False
CronTest.received_arg = None
CronTest.run_count = 0
super().setUp()
def test_scheduled_function_gets_called(self):
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
@schedulable
def scheduled_function(arg):
CronTest.has_run = True
CronTest.received_arg = arg
schedule_task(datetime(2016, 1, 1, 1, 0, 0),
scheduled_function,
"arg")
<|code_end|>
. Use current file imports:
from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth
and context (classes, functions, or code) from other files:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | run_scheduled_tasks() |
Given the code snippet: <|code_start|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
""" Test scheduler """
class CronTest(WebTestNoAuth):
has_run = False
received_arg = None
run_count = 0
def setUp(self):
CronTest.has_run = False
CronTest.received_arg = None
CronTest.run_count = 0
super().setUp()
def test_scheduled_function_gets_called(self):
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
<|code_end|>
, generate the next line using the imports in this file:
from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth
and context (functions, classes, or occasionally code) from other files:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | @schedulable |
Given snippet: <|code_start|> # Check the function has run and got the correct argument
self.assertEqual(CronTest.run_count, 1)
self.assertEqual(CronTest.received_arg, "arg")
# Now check that it is called at correct intervals
for _ in range(0, 32):
frozen_time.tick(delta=timedelta(minutes=1))
run_scheduled_tasks()
self.assertEqual(CronTest.run_count, 7)
# Do some cleanup! Else this will get called in other tests..
cron.periodic_functions.remove(periodic_function)
def test_scheduling_unknown_function_fails(self):
with self.app.app_context():
def test_func():
pass
with self.assertRaises(NotSchedulable):
schedule_task(datetime.utcnow(), test_func)
def test_schedule_once_soon_works(self):
with self.app.app_context():
CronTest.run_count = 0
@schedulable
def inc():
CronTest.run_count += 1
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth
and context:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
which might include code, classes, or functions. Output only the next line. | schedule_once_soon(inc) |
Continue the code snippet: <|code_start|>#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
""" Test scheduler """
class CronTest(WebTestNoAuth):
has_run = False
received_arg = None
run_count = 0
def setUp(self):
CronTest.has_run = False
CronTest.received_arg = None
CronTest.run_count = 0
super().setUp()
def test_scheduled_function_gets_called(self):
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
@schedulable
def scheduled_function(arg):
CronTest.has_run = True
CronTest.received_arg = arg
<|code_end|>
. Use current file imports:
from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth
and context (classes, functions, or code) from other files:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | schedule_task(datetime(2016, 1, 1, 1, 0, 0), |
Based on the snippet: <|code_start|>
@schedulable
def inc():
CronTest.run_count += 1
schedule_once_soon(inc)
print("nmow")
schedule_once_soon(inc)
run_scheduled_tasks()
self.assertEqual(CronTest.run_count, 1)
def test_update_scheduled_task(self):
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
@schedulable
def tester(arg):
CronTest.has_run = True
CronTest.received_arg = arg
schedule_task(datetime(2016, 1, 1, 1, 0, 0),
tester,
"arg")
run_scheduled_tasks()
self.assertFalse(CronTest.has_run)
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime, timedelta
from freezegun import freeze_time
from amivapi import cron
from amivapi.cron import (
NotSchedulable,
periodic,
run_scheduled_tasks,
schedulable,
schedule_once_soon,
schedule_task,
update_scheduled_task
)
from amivapi.tests.utils import WebTestNoAuth
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# def schedulable(func):
# def periodic(period, *args):
# def wrap(func):
# def wrapped():
# def schedule_task(time, func, *args):
# def update_scheduled_task(time, func, *args):
# def schedule_once_soon(func, *args):
# def func_str(func):
# def run_scheduled_tasks():
# def init_app(app):
#
# Path: amivapi/cron.py
# class NotSchedulable(Exception):
# pass
#
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# def schedulable(func):
# """ Registers a function to be in the table of schedulable functions.
# This is necessary, as we can not save references to python functions in the
# database.
# """
# schedulable_functions[func_str(func)] = func
# return func
#
# def schedule_once_soon(func, *args):
# """ Schedules a function to be run as soon as the scheduler is run the next
# time. Also check, that it is not already scheduled to be run first.
# """
# if current_app.data.driver.db['scheduled_tasks'].count_documents(
# {'function': func_str(func)}) != 0:
# return
# schedule_task(datetime.utcnow(), func, *args)
#
# def schedule_task(time, func, *args):
# """ Schedule a task at some point in the future. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].insert_one({
# 'time': time,
# 'function': func_s,
# 'args': pickle.dumps(args)
# })
#
# def update_scheduled_task(time, func, *args):
# """ Update a scheduled task that was previously registered. """
# func_s = func_str(func)
#
# if func_s not in schedulable_functions:
# raise NotSchedulable("%s is not schedulable. Did you forget the "
# "@schedulable decorator?" % func.__name__)
#
# current_app.data.driver.db['scheduled_tasks'].update_one({
# 'function': func_s
# },
# {'$set': {
# 'time': time,
# 'args': pickle.dumps(args)
# }})
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | update_scheduled_task(datetime(2016, 1, 1, 3, 20, 0), |
Next line prediction: <|code_start|>
# Check mail
mail = self.app.test_mails[1]
self.assertEqual(mail['receivers'], 'bla@bla.bl')
expected_text = (
"Congratulations, your blacklist entry with the following reason "
"has been removed:\n\nTest1\n\nBest Regards,\nAMIV"
)
self.assertEqual(mail['text'], expected_text)
def test_receive_scheduled_email_on_create(self):
"""Test if a user receives an email if the end_time is reached"""
with self.app.app_context(), freeze_time(
"2017-01-01 00:00:00") as frozen_time:
user_id = 24 * '0'
blacklist_id = 24 * '1'
# Create user and blacklist entry
self.load_fixture({
'users': [{'_id': user_id, 'email': "bla@bla.bl"}]
})
self.load_fixture({
'blacklist': [{
'_id': blacklist_id,
'user': user_id,
'reason': "Test1",
'end_time': datetime(2017, 1, 2)
}]
})
<|code_end|>
. Use current file imports:
(from amivapi.tests.utils import WebTest
from amivapi.cron import (
run_scheduled_tasks
)
from datetime import datetime
from datetime import timedelta
from freezegun import freeze_time)
and context including class names, function names, or small code snippets from other files:
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
#
# Path: amivapi/cron.py
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
. Output only the next line. | run_scheduled_tasks() |
Given the code snippet: <|code_start|> database. We accept all registered users instantly, others need to click the
confirmation link first"""
for item in items:
if item.get('user', None) is None:
item['confirmed'] = False
else:
item['confirmed'] = True
@email_blueprint.route('/confirm_email/<token>')
def on_confirm_email(token):
"""Email confirmation endpoint.
We try to confirm the specified signup and redirect to a webpage.
"""
try:
s = URLSafeSerializer(get_token_secret())
signup_id = ObjectId(s.loads(token))
except BadSignature:
return "Unknown token"
patch_internal('eventsignups', {'confirmed': True},
skip_validation=True, concurrency_check=False,
**{current_app.config['ID_FIELD']: signup_id})
# Now the user may be able to get accepted, so update the events waiting
# list
lookup = {current_app.config['ID_FIELD']: signup_id}
signup = current_app.data.find_one('eventsignups', None, **lookup)
<|code_end|>
, generate the next line using the imports in this file:
from bson import ObjectId
from eve.methods.delete import deleteitem_internal
from eve.methods.patch import patch_internal
from flask import Blueprint, current_app, redirect
from itsdangerous import BadSignature, URLSafeSerializer
from amivapi.events.queue import update_waiting_list
from amivapi.events.utils import get_token_secret
and context (functions, classes, or occasionally code) from other files:
# Path: amivapi/events/queue.py
# def update_waiting_list(event_id):
# """Fill up missing people in an event with people from the waiting list.
# This gets triggered by different hooks, whenever the list needs to be
# updated.
#
# 1. After a new signup is created.
# 2. After a signup was deleted.
# 3. After an external signup was confirmed.
#
# Returns:
# list: ids of all singups which are newly accepted.
# """
# id_field = current_app.config['ID_FIELD']
# lookup = {id_field: event_id}
# event = current_app.data.find_one('events', None, **lookup)
#
# accepted_ids = []
#
# if event['selection_strategy'] == 'fcfs':
# lookup = {'event': event_id, 'accepted': True}
# signup_count = (
# current_app.data.driver.db['eventsignups'].count_documents(lookup))
#
# # 0 spots == infinite spots
# if event['spots'] == 0 or signup_count < event['spots']:
# lookup = {'event': event_id, 'accepted': False, 'confirmed': True}
# new_list = current_app.data.driver.db['eventsignups'].find(
# lookup).sort('_created', ASCENDING)
#
# if event['spots'] > 0:
# to_accept = new_list.limit(event['spots'] - signup_count)
# else:
# # infinite spots, so just accept everyone
# to_accept = new_list
#
# for new_accepted in to_accept:
# accepted_ids.append(new_accepted['_id'])
# # Set accepted flag
# current_app.data.update('eventsignups', new_accepted[id_field],
# {'accepted': True}, new_accepted)
#
# # Notify user
# notify_signup_accepted(event, new_accepted)
#
# return accepted_ids
#
# Path: amivapi/events/utils.py
# def get_token_secret():
# db = current_app.data.driver.db['config']
# result = db.find_one({'TOKEN_SECRET': {'$exists': True}})
# return result['TOKEN_SECRET']
. Output only the next line. | update_waiting_list(signup['event']) |
Using the snippet: <|code_start|># license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Email confirmation logic.
Needed when external users want to sign up for public events or users want to
sign off via links.
"""
email_blueprint = Blueprint('emails', __name__)
def add_confirmed_before_insert(items):
"""Add the confirmed field to a event signup before it is inserted to the
database. We accept all registered users instantly, others need to click the
confirmation link first"""
for item in items:
if item.get('user', None) is None:
item['confirmed'] = False
else:
item['confirmed'] = True
@email_blueprint.route('/confirm_email/<token>')
def on_confirm_email(token):
"""Email confirmation endpoint.
We try to confirm the specified signup and redirect to a webpage.
"""
try:
<|code_end|>
, determine the next line of code. You have imports:
from bson import ObjectId
from eve.methods.delete import deleteitem_internal
from eve.methods.patch import patch_internal
from flask import Blueprint, current_app, redirect
from itsdangerous import BadSignature, URLSafeSerializer
from amivapi.events.queue import update_waiting_list
from amivapi.events.utils import get_token_secret
and context (class names, function names, or code) available:
# Path: amivapi/events/queue.py
# def update_waiting_list(event_id):
# """Fill up missing people in an event with people from the waiting list.
# This gets triggered by different hooks, whenever the list needs to be
# updated.
#
# 1. After a new signup is created.
# 2. After a signup was deleted.
# 3. After an external signup was confirmed.
#
# Returns:
# list: ids of all singups which are newly accepted.
# """
# id_field = current_app.config['ID_FIELD']
# lookup = {id_field: event_id}
# event = current_app.data.find_one('events', None, **lookup)
#
# accepted_ids = []
#
# if event['selection_strategy'] == 'fcfs':
# lookup = {'event': event_id, 'accepted': True}
# signup_count = (
# current_app.data.driver.db['eventsignups'].count_documents(lookup))
#
# # 0 spots == infinite spots
# if event['spots'] == 0 or signup_count < event['spots']:
# lookup = {'event': event_id, 'accepted': False, 'confirmed': True}
# new_list = current_app.data.driver.db['eventsignups'].find(
# lookup).sort('_created', ASCENDING)
#
# if event['spots'] > 0:
# to_accept = new_list.limit(event['spots'] - signup_count)
# else:
# # infinite spots, so just accept everyone
# to_accept = new_list
#
# for new_accepted in to_accept:
# accepted_ids.append(new_accepted['_id'])
# # Set accepted flag
# current_app.data.update('eventsignups', new_accepted[id_field],
# {'accepted': True}, new_accepted)
#
# # Notify user
# notify_signup_accepted(event, new_accepted)
#
# return accepted_ids
#
# Path: amivapi/events/utils.py
# def get_token_secret():
# db = current_app.data.driver.db['config']
# result = db.find_one({'TOKEN_SECRET': {'$exists': True}})
# return result['TOKEN_SECRET']
. Output only the next line. | s = URLSafeSerializer(get_token_secret()) |
Next line prediction: <|code_start|>"""A command line interface for AMIVApi."""
try:
except ImportError:
bjoern = False
@group()
def cli():
"""Manage amivapi."""
config_option = option("--config",
type=Path(exists=True, dir_okay=False, readable=True),
help="use specified config file")
@cli.command()
@config_option
def recreate_mailing_lists(config):
"""(Re-)create mailing lists for all groups.
1. Delete all mailing list files.
2. Create new mailing list files.
For every group, we call the update_group function for this
"""
<|code_end|>
. Use current file imports:
(from os import listdir, remove
from os.path import join, isdir
from datetime import datetime as dt
from time import sleep
from click import argument, echo, group, option, Path, Choice, ClickException
from amivapi.bootstrap import create_app
from amivapi.cron import run_scheduled_tasks
from amivapi import ldap
from amivapi.groups.mailing_lists import updated_group
import bjoern)
and context including class names, function names, or small code snippets from other files:
# Path: amivapi/bootstrap.py
# def create_app(config_file=None, **kwargs):
# """
# Create a new eve app object and initialize everything.
#
# User configuration can be loaded in the following order:
#
# 1. Use the `config_file` arg to specify a file
# 2. If `config_file` is `None`, you set the environment variable
# `AMIVAPI_CONFIG` to the path of your config file
# 3. If no environment variable is set either, `config.py` in the current
# working directory is used
#
# Args:
# config (path): Specify config file to use.
# kwargs: All other key-value arguments will be used to update the config
# Returns:
# (Eve): The Eve application
# """
# # Load config
# config = Config(getcwd())
# config.from_object("amivapi.settings")
#
# # Specified path > environment var > default path; abspath for better log
# user_config = abspath(config_file or getenv('AMIVAPI_CONFIG', 'config.py'))
# try:
# config.from_pyfile(user_config)
# config_status = "Config loaded: %s" % user_config
# except IOError:
# config_status = "No config found."
#
# config.update(kwargs)
#
# # Initialize empty domain to create Eve object, register resources later
# config['DOMAIN'] = {}
#
# app = Eve("amivapi", # Flask needs this name to find the static folder
# settings=config,
# validator=ValidatorAMIV)
# app.logger.info(config_status)
#
# # Set up error logging with sentry
# init_sentry(app)
#
# # Create LDAP connector
# ldap.init_app(app)
#
# # Initialize modules to register resources, validation, hooks, auth, etc.
# users.init_app(app)
# auth.init_app(app)
# events.init_app(app)
# groups.init_app(app)
# blacklist.init_app(app)
# joboffers.init_app(app)
# studydocs.init_app(app)
# cascade.init_app(app)
# cron.init_app(app)
# documentation.init_app(app)
#
# # Fix that eve doesn't run hooks on embedded documents
# app.on_fetched_item += utils.run_embedded_hooks_fetched_item
# app.on_fetched_resource += utils.run_embedded_hooks_fetched_resource
#
# return app
#
# Path: amivapi/cron.py
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/groups/mailing_lists.py
# def updated_group(updates, original):
# """Update group mailing lists if any address changes."""
# # Remove no longer needed forwards
# if 'receive_from' in updates:
# original_addresses = original.get('receive_from') or []
# remove_files(address for address in original_addresses
# if address not in updates['receive_from'])
# # Update remaining forwards
# if ('receive_from' in updates) or ('forward_to' in updates):
# make_files(original['_id'])
. Output only the next line. | app = create_app(config_file=config) |
Predict the next line after this snippet: <|code_start|>
2. Create new mailing list files.
For every group, we call the update_group function for this
"""
app = create_app(config_file=config)
directory = app.config.get('MAILING_LIST_DIR')
prefix = app.config['MAILING_LIST_FILE_PREFIX']
if not directory:
echo('No directory for mailing lists specified in config.')
return
# Delete existing files
if isdir(directory):
for filename in listdir(directory):
if filename.startswith(prefix):
remove(join(directory, filename))
# Create new files
with app.app_context():
groups = app.data.driver.db['groups'].find({})
for g in groups:
updated_group(g, g) # Use group as update and original
def run_cron(app):
"""Run scheduled tasks with the given app."""
echo("Executing scheduled tasks...")
with app.app_context():
<|code_end|>
using the current file's imports:
from os import listdir, remove
from os.path import join, isdir
from datetime import datetime as dt
from time import sleep
from click import argument, echo, group, option, Path, Choice, ClickException
from amivapi.bootstrap import create_app
from amivapi.cron import run_scheduled_tasks
from amivapi import ldap
from amivapi.groups.mailing_lists import updated_group
import bjoern
and any relevant context from other files:
# Path: amivapi/bootstrap.py
# def create_app(config_file=None, **kwargs):
# """
# Create a new eve app object and initialize everything.
#
# User configuration can be loaded in the following order:
#
# 1. Use the `config_file` arg to specify a file
# 2. If `config_file` is `None`, you set the environment variable
# `AMIVAPI_CONFIG` to the path of your config file
# 3. If no environment variable is set either, `config.py` in the current
# working directory is used
#
# Args:
# config (path): Specify config file to use.
# kwargs: All other key-value arguments will be used to update the config
# Returns:
# (Eve): The Eve application
# """
# # Load config
# config = Config(getcwd())
# config.from_object("amivapi.settings")
#
# # Specified path > environment var > default path; abspath for better log
# user_config = abspath(config_file or getenv('AMIVAPI_CONFIG', 'config.py'))
# try:
# config.from_pyfile(user_config)
# config_status = "Config loaded: %s" % user_config
# except IOError:
# config_status = "No config found."
#
# config.update(kwargs)
#
# # Initialize empty domain to create Eve object, register resources later
# config['DOMAIN'] = {}
#
# app = Eve("amivapi", # Flask needs this name to find the static folder
# settings=config,
# validator=ValidatorAMIV)
# app.logger.info(config_status)
#
# # Set up error logging with sentry
# init_sentry(app)
#
# # Create LDAP connector
# ldap.init_app(app)
#
# # Initialize modules to register resources, validation, hooks, auth, etc.
# users.init_app(app)
# auth.init_app(app)
# events.init_app(app)
# groups.init_app(app)
# blacklist.init_app(app)
# joboffers.init_app(app)
# studydocs.init_app(app)
# cascade.init_app(app)
# cron.init_app(app)
# documentation.init_app(app)
#
# # Fix that eve doesn't run hooks on embedded documents
# app.on_fetched_item += utils.run_embedded_hooks_fetched_item
# app.on_fetched_resource += utils.run_embedded_hooks_fetched_resource
#
# return app
#
# Path: amivapi/cron.py
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/groups/mailing_lists.py
# def updated_group(updates, original):
# """Update group mailing lists if any address changes."""
# # Remove no longer needed forwards
# if 'receive_from' in updates:
# original_addresses = original.get('receive_from') or []
# remove_files(address for address in original_addresses
# if address not in updates['receive_from'])
# # Update remaining forwards
# if ('receive_from' in updates) or ('forward_to' in updates):
# make_files(original['_id'])
. Output only the next line. | run_scheduled_tasks() |
Given the following code snippet before the placeholder: <|code_start|> run_cron(app)
execution_time = dt.utcnow() - checkpoint
echo('Tasks executed, total execution time: %.3f seconds.'
% execution_time.total_seconds())
if execution_time > interval:
echo('Warning: Execution time exceeds interval length.')
sleep((interval - execution_time).total_seconds())
@cli.command()
@config_option
@option('--all', 'sync_all', is_flag=True, help="Sync all users.")
@argument('nethz', nargs=-1)
def ldap_sync(config, sync_all, nethz):
"""Synchronize users with eth ldap.
Examples:
amivapi ldap_sync --all
amivapi ldap_sync adietmue bconrad blumh
"""
app = create_app(config_file=config)
if not app.config['ldap_connector']:
echo("LDAP is not enabled, can't proceed!")
else:
with app.test_request_context():
if sync_all:
<|code_end|>
, predict the next line using imports from the current file:
from os import listdir, remove
from os.path import join, isdir
from datetime import datetime as dt
from time import sleep
from click import argument, echo, group, option, Path, Choice, ClickException
from amivapi.bootstrap import create_app
from amivapi.cron import run_scheduled_tasks
from amivapi import ldap
from amivapi.groups.mailing_lists import updated_group
import bjoern
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/bootstrap.py
# def create_app(config_file=None, **kwargs):
# """
# Create a new eve app object and initialize everything.
#
# User configuration can be loaded in the following order:
#
# 1. Use the `config_file` arg to specify a file
# 2. If `config_file` is `None`, you set the environment variable
# `AMIVAPI_CONFIG` to the path of your config file
# 3. If no environment variable is set either, `config.py` in the current
# working directory is used
#
# Args:
# config (path): Specify config file to use.
# kwargs: All other key-value arguments will be used to update the config
# Returns:
# (Eve): The Eve application
# """
# # Load config
# config = Config(getcwd())
# config.from_object("amivapi.settings")
#
# # Specified path > environment var > default path; abspath for better log
# user_config = abspath(config_file or getenv('AMIVAPI_CONFIG', 'config.py'))
# try:
# config.from_pyfile(user_config)
# config_status = "Config loaded: %s" % user_config
# except IOError:
# config_status = "No config found."
#
# config.update(kwargs)
#
# # Initialize empty domain to create Eve object, register resources later
# config['DOMAIN'] = {}
#
# app = Eve("amivapi", # Flask needs this name to find the static folder
# settings=config,
# validator=ValidatorAMIV)
# app.logger.info(config_status)
#
# # Set up error logging with sentry
# init_sentry(app)
#
# # Create LDAP connector
# ldap.init_app(app)
#
# # Initialize modules to register resources, validation, hooks, auth, etc.
# users.init_app(app)
# auth.init_app(app)
# events.init_app(app)
# groups.init_app(app)
# blacklist.init_app(app)
# joboffers.init_app(app)
# studydocs.init_app(app)
# cascade.init_app(app)
# cron.init_app(app)
# documentation.init_app(app)
#
# # Fix that eve doesn't run hooks on embedded documents
# app.on_fetched_item += utils.run_embedded_hooks_fetched_item
# app.on_fetched_resource += utils.run_embedded_hooks_fetched_resource
#
# return app
#
# Path: amivapi/cron.py
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/groups/mailing_lists.py
# def updated_group(updates, original):
# """Update group mailing lists if any address changes."""
# # Remove no longer needed forwards
# if 'receive_from' in updates:
# original_addresses = original.get('receive_from') or []
# remove_files(address for address in original_addresses
# if address not in updates['receive_from'])
# # Update remaining forwards
# if ('receive_from' in updates) or ('forward_to' in updates):
# make_files(original['_id'])
. Output only the next line. | res = ldap.sync_all() |
Given snippet: <|code_start|>
@cli.command()
@config_option
def recreate_mailing_lists(config):
"""(Re-)create mailing lists for all groups.
1. Delete all mailing list files.
2. Create new mailing list files.
For every group, we call the update_group function for this
"""
app = create_app(config_file=config)
directory = app.config.get('MAILING_LIST_DIR')
prefix = app.config['MAILING_LIST_FILE_PREFIX']
if not directory:
echo('No directory for mailing lists specified in config.')
return
# Delete existing files
if isdir(directory):
for filename in listdir(directory):
if filename.startswith(prefix):
remove(join(directory, filename))
# Create new files
with app.app_context():
groups = app.data.driver.db['groups'].find({})
for g in groups:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from os import listdir, remove
from os.path import join, isdir
from datetime import datetime as dt
from time import sleep
from click import argument, echo, group, option, Path, Choice, ClickException
from amivapi.bootstrap import create_app
from amivapi.cron import run_scheduled_tasks
from amivapi import ldap
from amivapi.groups.mailing_lists import updated_group
import bjoern
and context:
# Path: amivapi/bootstrap.py
# def create_app(config_file=None, **kwargs):
# """
# Create a new eve app object and initialize everything.
#
# User configuration can be loaded in the following order:
#
# 1. Use the `config_file` arg to specify a file
# 2. If `config_file` is `None`, you set the environment variable
# `AMIVAPI_CONFIG` to the path of your config file
# 3. If no environment variable is set either, `config.py` in the current
# working directory is used
#
# Args:
# config (path): Specify config file to use.
# kwargs: All other key-value arguments will be used to update the config
# Returns:
# (Eve): The Eve application
# """
# # Load config
# config = Config(getcwd())
# config.from_object("amivapi.settings")
#
# # Specified path > environment var > default path; abspath for better log
# user_config = abspath(config_file or getenv('AMIVAPI_CONFIG', 'config.py'))
# try:
# config.from_pyfile(user_config)
# config_status = "Config loaded: %s" % user_config
# except IOError:
# config_status = "No config found."
#
# config.update(kwargs)
#
# # Initialize empty domain to create Eve object, register resources later
# config['DOMAIN'] = {}
#
# app = Eve("amivapi", # Flask needs this name to find the static folder
# settings=config,
# validator=ValidatorAMIV)
# app.logger.info(config_status)
#
# # Set up error logging with sentry
# init_sentry(app)
#
# # Create LDAP connector
# ldap.init_app(app)
#
# # Initialize modules to register resources, validation, hooks, auth, etc.
# users.init_app(app)
# auth.init_app(app)
# events.init_app(app)
# groups.init_app(app)
# blacklist.init_app(app)
# joboffers.init_app(app)
# studydocs.init_app(app)
# cascade.init_app(app)
# cron.init_app(app)
# documentation.init_app(app)
#
# # Fix that eve doesn't run hooks on embedded documents
# app.on_fetched_item += utils.run_embedded_hooks_fetched_item
# app.on_fetched_resource += utils.run_embedded_hooks_fetched_resource
#
# return app
#
# Path: amivapi/cron.py
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/groups/mailing_lists.py
# def updated_group(updates, original):
# """Update group mailing lists if any address changes."""
# # Remove no longer needed forwards
# if 'receive_from' in updates:
# original_addresses = original.get('receive_from') or []
# remove_files(address for address in original_addresses
# if address not in updates['receive_from'])
# # Update remaining forwards
# if ('receive_from' in updates) or ('forward_to' in updates):
# make_files(original['_id'])
which might include code, classes, or functions. Output only the next line. | updated_group(g, g) # Use group as update and original |
Given the code snippet: <|code_start|> SECRET_KEY: {'$exists': True}
})
self.assertIsNone(db_item)
def test_create_secret(self):
"""Test that init_secret creates a secret token & adds it to the db."""
super().setUp()
with self.app.app_context():
db_item = self.db['config'].find_one({
SECRET_KEY: {'$exists': True, '$nin': [None, '']}
})
self.assertIsNotNone(db_item)
def test_existing_secret(self):
"""Test that a secret from the database is not overwritten."""
# We need to run the setup to be able to use an app context
super().setUp()
old_secret = 'Trololololo'
# Set the secret in the database
with self.app.app_context():
self.db['config'].update_one(
{SECRET_KEY: {'$exists': True}},
{'$set': {SECRET_KEY: old_secret}}
)
# This should now not change the token
<|code_end|>
, generate the next line using the imports in this file:
from unittest.mock import patch
from amivapi.tests.utils import WebTestNoAuth
from amivapi.events.utils import (
create_token_secret_on_startup,
get_token_secret
)
and context (functions, classes, or occasionally code) from other files:
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# Path: amivapi/events/utils.py
# def create_token_secret_on_startup(app):
# """Create a token secret in the database if it doesn't exist.
#
# The secret key is stored in the database to ensure consistency.
# The database collection holding this key is called `config`.
# """
# with app.app_context(): # Context for db connection
# config = app.data.driver.db['config']
# result = config.find_one(
# {'TOKEN_SECRET': {'$exists': True, '$nin': [None, '']}})
#
# if result is None:
# config.insert_one({'TOKEN_SECRET': token_urlsafe()})
#
# def get_token_secret():
# db = current_app.data.driver.db['config']
# result = db.find_one({'TOKEN_SECRET': {'$exists': True}})
# return result['TOKEN_SECRET']
. Output only the next line. | create_token_secret_on_startup(self.app) |
Predict the next line after this snippet: <|code_start|> self.assertIsNone(db_item)
def test_create_secret(self):
"""Test that init_secret creates a secret token & adds it to the db."""
super().setUp()
with self.app.app_context():
db_item = self.db['config'].find_one({
SECRET_KEY: {'$exists': True, '$nin': [None, '']}
})
self.assertIsNotNone(db_item)
def test_existing_secret(self):
"""Test that a secret from the database is not overwritten."""
# We need to run the setup to be able to use an app context
super().setUp()
old_secret = 'Trololololo'
# Set the secret in the database
with self.app.app_context():
self.db['config'].update_one(
{SECRET_KEY: {'$exists': True}},
{'$set': {SECRET_KEY: old_secret}}
)
# This should now not change the token
create_token_secret_on_startup(self.app)
with self.app.app_context():
<|code_end|>
using the current file's imports:
from unittest.mock import patch
from amivapi.tests.utils import WebTestNoAuth
from amivapi.events.utils import (
create_token_secret_on_startup,
get_token_secret
)
and any relevant context from other files:
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# Path: amivapi/events/utils.py
# def create_token_secret_on_startup(app):
# """Create a token secret in the database if it doesn't exist.
#
# The secret key is stored in the database to ensure consistency.
# The database collection holding this key is called `config`.
# """
# with app.app_context(): # Context for db connection
# config = app.data.driver.db['config']
# result = config.find_one(
# {'TOKEN_SECRET': {'$exists': True, '$nin': [None, '']}})
#
# if result is None:
# config.insert_one({'TOKEN_SECRET': token_urlsafe()})
#
# def get_token_secret():
# db = current_app.data.driver.db['config']
# result = db.find_one({'TOKEN_SECRET': {'$exists': True}})
# return result['TOKEN_SECRET']
. Output only the next line. | self.assertEqual(get_token_secret(), old_secret) |
Predict the next line for this snippet: <|code_start|> return
raise BadFixtureException("Requested eventsignup creation, but no "
"unique user/event combination is "
"available anymore. Parsed object: %s"
% obj)
def preprocess_joboffers(self, schema, obj, fixture):
"""Add title to JobOffers to make them valid. """
obj.setdefault(
'title_de',
self.create_random_value(schema['title_de']))
obj.setdefault(
'description_de',
self.create_random_value(schema['description_de']))
def create_random_value(self, definition):
"""Create a random value for the given cerberus field description."""
# If there is a list of allowed values, just pick one
if 'allowed' in definition:
return random.choice(definition['allowed'])
t = definition['type']
if t == 'string':
minimum_length = 0 if definition.get('empty', True) else 1
length = random.randint(minimum_length,
definition.get('maxlength', 100))
if 'regex' in definition:
letters_and_digits = string.ascii_letters + string.digits
<|code_end|>
with the help of current file imports:
from datetime import date, datetime, timedelta
from os.path import dirname, join
from contextlib import contextmanager
from bson import ObjectId
from eve.methods.post import post_internal
from werkzeug.datastructures import FileStorage
from amivapi.settings import EMAIL_REGEX, REDIRECT_URI_REGEX
from amivapi.utils import admin_permissions
import random
import string
and context from other files:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# REDIRECT_URI_REGEX = '^((http://)?localhost[^#]*|https://[^#]+)$'
#
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
, which may contain function names, class names, or code. Output only the next line. | if definition['regex'] == EMAIL_REGEX: |
Given snippet: <|code_start|> """Add title to JobOffers to make them valid. """
obj.setdefault(
'title_de',
self.create_random_value(schema['title_de']))
obj.setdefault(
'description_de',
self.create_random_value(schema['description_de']))
def create_random_value(self, definition):
"""Create a random value for the given cerberus field description."""
# If there is a list of allowed values, just pick one
if 'allowed' in definition:
return random.choice(definition['allowed'])
t = definition['type']
if t == 'string':
minimum_length = 0 if definition.get('empty', True) else 1
length = random.randint(minimum_length,
definition.get('maxlength', 100))
if 'regex' in definition:
letters_and_digits = string.ascii_letters + string.digits
if definition['regex'] == EMAIL_REGEX:
return "%s@%s.%s" % (
''.join(random.choice(letters_and_digits)
for _ in range(max(1, length - 27))),
''.join(random.choice(letters_and_digits)
for _ in range(20)),
''.join(random.choice(letters_and_digits)
for _ in range(5)))
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from datetime import date, datetime, timedelta
from os.path import dirname, join
from contextlib import contextmanager
from bson import ObjectId
from eve.methods.post import post_internal
from werkzeug.datastructures import FileStorage
from amivapi.settings import EMAIL_REGEX, REDIRECT_URI_REGEX
from amivapi.utils import admin_permissions
import random
import string
and context:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# REDIRECT_URI_REGEX = '^((http://)?localhost[^#]*|https://[^#]+)$'
#
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
which might include code, classes, or functions. Output only the next line. | elif definition['regex'] == REDIRECT_URI_REGEX: |
Given the code snippet: <|code_start|> }
],
'events': [
{
'title': 'mytestevent'
}
]
})
"""
added_objects = []
# Check that all resources are valid
fixture_resources = set(fixture.keys())
all_resources = set(self.app.config['DOMAIN'].keys())
if not set(fixture_resources).issubset(all_resources):
raise BadFixtureException("Unknown resources: %s"
% (fixture_resources - all_resources))
# We need to sort in the order of dependencies. It is for example
# not possible to add sessions before we have users, as we need valid
# object IDs for the relations.
for resource, obj in self.sorted_by_dependencies(fixture):
schema = self.app.config['DOMAIN'][resource]['schema']
# Note that we pass the current state of the fixture to resolve
# fields, which depend on already inserted content
self.preprocess_fixture_object(resource, schema, obj, fixture)
# Add it to the database
with self.app.test_request_context("/" + resource, method='POST'):
<|code_end|>
, generate the next line using the imports in this file:
from datetime import date, datetime, timedelta
from os.path import dirname, join
from contextlib import contextmanager
from bson import ObjectId
from eve.methods.post import post_internal
from werkzeug.datastructures import FileStorage
from amivapi.settings import EMAIL_REGEX, REDIRECT_URI_REGEX
from amivapi.utils import admin_permissions
import random
import string
and context (functions, classes, or occasionally code) from other files:
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
#
# REDIRECT_URI_REGEX = '^((http://)?localhost[^#]*|https://[^#]+)$'
#
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
. Output only the next line. | with admin_permissions(), self.writeable_id(schema): |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.