Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Continue the code snippet: <|code_start|> 'nethz': 'user%i' % next(counter),
'password': 'pass',
'gender': random.choice(['male', 'female']),
'firstname': 'Pablo%i' % next(counter),
'lastname': 'AMIV%i' % next(counter),
'membership': random.choice(['none', 'regular',
'extraordinary', 'honorary']),
'email': 'pablo%i@example.com' % next(counter)
}
return post(BASE_URL + '/users', data=data,
auth=(ROOT_PW, '')).json()['_id']
def get_token(username, password):
data = {
'username': username,
'password': password
}
return post(BASE_URL + '/sessions', data=data).json()['token']
def create_event():
data = {
'title_en': 'event%i' % next(counter),
'description_en': 'party%i' % next(counter),
'catchphrase_en': 'dance%i' % next(counter),
'show_announce': random.choice([True, False]),
'show_infoscreen': random.choice([True, False]),
'show_website': random.choice([True, False]),
'spots': 0,
<|code_end|>
. Use current file imports:
from amivapi.settings import DATE_FORMAT
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
from io import BytesIO
from itertools import count
from time import sleep, time
from sys import argv, stdout
import random
import requests
import statistics
import traceback
and context (classes, functions, or code) from other files:
# Path: amivapi/settings.py
# DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
. Output only the next line. | 'time_register_start': datetime(1970, 1, 1).strftime(DATE_FORMAT), |
Predict the next line after this snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Group settings.
Contains models for groups and group mmeberships.
"""
class GroupAuth(AmivTokenAuth):
"""Auth for groups."""
def has_item_write_permission(self, user_id, item):
"""The group moderator is allowed to change things."""
# Return true if a moderator exists and it is equal to the current user
return item.get('moderator') and (
<|code_end|>
using the current file's imports:
from bson import ObjectId
from flask import current_app
from amivapi.utils import get_id
from amivapi.auth import AmivTokenAuth
from amivapi.settings import EMAIL_REGEX
and any relevant context from other files:
# Path: amivapi/utils.py
# def get_id(item):
# """Get the id of a field in a relation. Depending on the embedding clause
# a field returned by a mongo query may be an ID or an object. This function
# will get the ID in both cases.
#
# Args:
# item: Either an object from the database as a dict or an object id as
# str or objectid.
#
# Returns:
# ObjectId with the user ID
# """
# try:
# return ObjectId(item)
# except TypeError:
# return ObjectId(item['_id'])
#
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
. Output only the next line. | user_id == str(get_id(item['moderator']))) |
Given snippet: <|code_start|> },
'receive_from': {
'description': 'Email addresses from which the group will '
'receive mails. Must only contain a local '
'address (everything before the @), as the '
'rest is determined by the (external) mail '
'server.',
'example': ['kulturteam', 'events', 'kultur'],
'type': 'list',
'unique_elements': True,
'unique_elements_for_resource': True,
'schema': {
'type': 'string',
'maxlength': 100,
'regex': r'[a-z0-9_\.-]+'
},
'nullable': True,
'default': None,
},
'forward_to': {
'description': 'Additional addresses to which this group will '
'forward emails.',
'example': ['external@backup.ch'],
'type': 'list',
'unique_elements': True,
'schema': {
'type': 'string',
'maxlength': 100,
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from bson import ObjectId
from flask import current_app
from amivapi.utils import get_id
from amivapi.auth import AmivTokenAuth
from amivapi.settings import EMAIL_REGEX
and context:
# Path: amivapi/utils.py
# def get_id(item):
# """Get the id of a field in a relation. Depending on the embedding clause
# a field returned by a mongo query may be an ID or an object. This function
# will get the ID in both cases.
#
# Args:
# item: Either an object from the database as a dict or an object id as
# str or objectid.
#
# Returns:
# ObjectId with the user ID
# """
# try:
# return ObjectId(item)
# except TypeError:
# return ObjectId(item['_id'])
#
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/settings.py
# EMAIL_REGEX = '^.+@.+$'
which might include code, classes, or functions. Output only the next line. | 'regex': EMAIL_REGEX |
Given snippet: <|code_start|> 'type': 'string',
'readonly': True,
}
},
}
}
# Login Hook
def process_login(items):
"""Hook to add token on POST to /sessions.
Attempts to first login via LDAP (if enabled), then login via database.
If the login is successful, the fields "username" and "password" are
removed and the fields "user" and "token" are added, which will be stored
in the db.
If the login is unsuccessful, abort(401)
Args:
items (list): List of items as passed by EVE to post hooks.
"""
for item in items:
username = item['username']
password = item['password']
# LDAP
if (app.config.get('ldap_connector') and
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import datetime
from amivapi import ldap
from amivapi.auth import AmivTokenAuth
from amivapi.cron import periodic
from amivapi.utils import admin_permissions, get_id
from bson import ObjectId
from bson.errors import InvalidId
from eve.methods.patch import patch_internal
from eve.utils import debug_error_message
from flask import abort
from flask import current_app as app
from ldap3.core.exceptions import LDAPException
from secrets import token_urlsafe
from amivapi.utils import token_urlsafe
and context:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/cron.py
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
#
# def get_id(item):
# """Get the id of a field in a relation. Depending on the embedding clause
# a field returned by a mongo query may be an ID or an object. This function
# will get the ID in both cases.
#
# Args:
# item: Either an object from the database as a dict or an object id as
# str or objectid.
#
# Returns:
# ObjectId with the user ID
# """
# try:
# return ObjectId(item)
# except TypeError:
# return ObjectId(item['_id'])
which might include code, classes, or functions. Output only the next line. | ldap.authenticate_user(username, password)): |
Here is a snippet: <|code_start|>def verify_password(user, plaintext):
"""Check password of user, rehash if necessary.
It is possible that the password is None, e.g. if the user is authenticated
via LDAP. In this case default to "not verified".
Args:
user (dict): the user in question.
plaintext (string): password to check
Returns:
bool: True if password matches. False if it doesn't or if there is no
password set and/or provided.
"""
password_context = app.config['PASSWORD_CONTEXT']
if (plaintext is None) or (user['password'] is None):
return False
is_valid = password_context.verify(plaintext, user['password'])
if is_valid and password_context.needs_update(user['password']):
# update password - hook will handle hashing
update = {'password': plaintext}
with admin_permissions():
patch_internal("users", payload=update, _id=user['_id'])
return is_valid
# Regular task to clean up expired sessions
<|code_end|>
. Write the next line using the current file imports:
import datetime
from amivapi import ldap
from amivapi.auth import AmivTokenAuth
from amivapi.cron import periodic
from amivapi.utils import admin_permissions, get_id
from bson import ObjectId
from bson.errors import InvalidId
from eve.methods.patch import patch_internal
from eve.utils import debug_error_message
from flask import abort
from flask import current_app as app
from ldap3.core.exceptions import LDAPException
from secrets import token_urlsafe
from amivapi.utils import token_urlsafe
and context from other files:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/cron.py
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
#
# def get_id(item):
# """Get the id of a field in a relation. Depending on the embedding clause
# a field returned by a mongo query may be an ID or an object. This function
# will get the ID in both cases.
#
# Args:
# item: Either an object from the database as a dict or an object id as
# str or objectid.
#
# Returns:
# ObjectId with the user ID
# """
# try:
# return ObjectId(item)
# except TypeError:
# return ObjectId(item['_id'])
, which may include functions, classes, or code. Output only the next line. | @periodic(datetime.timedelta(days=1)) |
Here is a snippet: <|code_start|>
# Add token (str) and user_id (ObejctId)
item['user'] = user_id
item['token'] = token
def verify_password(user, plaintext):
"""Check password of user, rehash if necessary.
It is possible that the password is None, e.g. if the user is authenticated
via LDAP. In this case default to "not verified".
Args:
user (dict): the user in question.
plaintext (string): password to check
Returns:
bool: True if password matches. False if it doesn't or if there is no
password set and/or provided.
"""
password_context = app.config['PASSWORD_CONTEXT']
if (plaintext is None) or (user['password'] is None):
return False
is_valid = password_context.verify(plaintext, user['password'])
if is_valid and password_context.needs_update(user['password']):
# update password - hook will handle hashing
update = {'password': plaintext}
<|code_end|>
. Write the next line using the current file imports:
import datetime
from amivapi import ldap
from amivapi.auth import AmivTokenAuth
from amivapi.cron import periodic
from amivapi.utils import admin_permissions, get_id
from bson import ObjectId
from bson.errors import InvalidId
from eve.methods.patch import patch_internal
from eve.utils import debug_error_message
from flask import abort
from flask import current_app as app
from ldap3.core.exceptions import LDAPException
from secrets import token_urlsafe
from amivapi.utils import token_urlsafe
and context from other files:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/cron.py
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
#
# def get_id(item):
# """Get the id of a field in a relation. Depending on the embedding clause
# a field returned by a mongo query may be an ID or an object. This function
# will get the ID in both cases.
#
# Args:
# item: Either an object from the database as a dict or an object id as
# str or objectid.
#
# Returns:
# ObjectId with the user ID
# """
# try:
# return ObjectId(item)
# except TypeError:
# return ObjectId(item['_id'])
, which may include functions, classes, or code. Output only the next line. | with admin_permissions(): |
Given snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Sessions endpoint."""
# Change when we drop python3.5 support
try:
except ImportError:
class SessionAuth(AmivTokenAuth):
"""Simple auth for session.
No resource write check needed since POST is public.
"""
def has_item_write_permission(self, user_id, item):
"""Allow users to modify only their own sessions."""
# item['user'] is Objectid, convert to str
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import datetime
from amivapi import ldap
from amivapi.auth import AmivTokenAuth
from amivapi.cron import periodic
from amivapi.utils import admin_permissions, get_id
from bson import ObjectId
from bson.errors import InvalidId
from eve.methods.patch import patch_internal
from eve.utils import debug_error_message
from flask import abort
from flask import current_app as app
from ldap3.core.exceptions import LDAPException
from secrets import token_urlsafe
from amivapi.utils import token_urlsafe
and context:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/cron.py
# def periodic(period, *args):
# """ Decorator to mark a function to be executed periodically.
# Args:
# period: timedelta object describing the time between two calls
# args: arguments to be passed every time
# """
# def wrap(func):
# @wraps(func)
# def wrapped():
# schedule_task(datetime.utcnow() + period, wrapped)
# func(*args)
#
# schedulable(wrapped)
#
# # if init_app has already run, schedule the first execution
# if current_app:
# schedule_once_soon(wrapped)
# # As this decorator is run very early, there might not be an app yet.
# # Therefore we save the functions to a list to be scheduled on app init.
# periodic_functions.append(wrapped)
#
# return wrapped
# return wrap
#
# Path: amivapi/utils.py
# @contextmanager
# def admin_permissions():
# """Switch to a context with admin rights and restore state afterwards.
#
# Use as context:
# >> with admin_rights():
# >> do_something()
# """
# old_admin = g.get('resource_admin')
# g.resource_admin = True
#
# yield
#
# if old_admin is not None: # None means it wasn't set before..
# g.resource_admin = old_admin
#
# def get_id(item):
# """Get the id of a field in a relation. Depending on the embedding clause
# a field returned by a mongo query may be an ID or an object. This function
# will get the ID in both cases.
#
# Args:
# item: Either an object from the database as a dict or an object id as
# str or objectid.
#
# Returns:
# ObjectId with the user ID
# """
# try:
# return ObjectId(item)
# except TypeError:
# return ObjectId(item['_id'])
which might include code, classes, or functions. Output only the next line. | return user_id == str(get_id(item['user'])) |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Test that additional fields are projected for events. These are:
- events.signup_count
- eventsignups.email
- eventsignups.confirmed
- eventsignups.position
"""
<|code_end|>
, determine the next line of code. You have imports:
from datetime import timedelta
from freezegun import freeze_time
from amivapi.tests.utils import WebTestNoAuth
import re
and context (class names, function names, or code) available:
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | class EventProjectionTest(WebTestNoAuth): |
Given snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Authorization for events and eventsignups resources"""
class EventAuth(AmivTokenAuth):
"""Auth for events."""
def has_item_write_permission(self, user_id, item):
"""The group moderator is allowed to change things."""
# Return true if a moderator exists and it is equal to the current user
return item.get('moderator') and (
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from bson import ObjectId
from flask import g, current_app
from datetime import datetime as dt
from amivapi.auth import AmivTokenAuth
from amivapi.utils import get_id
and context:
# Path: amivapi/auth/auth.py
# class AmivTokenAuth(BasicAuth):
# """Amiv authentication and authorization base class.
#
# Subclass and overwrite functions if you don't want default behaviour.
# """
#
# def authorized(self, allowed_roles, resource, method):
# """Authorize Request.
#
# This is the method Eve will call if the endpoint is not public.
#
# We use this by setting `g.auth_required` to inform auth hook to abort
# later if user can't be identified.
#
# Do NOT overwrite this when subclassing `AmivTokenAuth`.
# """
# g.auth_required = True
# return True
#
# def has_resource_write_permission(self, user_id):
# """Check if the user is alllowed to write to the resource.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user_id (str): The if of the user
#
# Returns:
# bool: True if user has permission to write, False otherwise.
# """
# return False
#
# def has_item_write_permission(self, user_id, item):
# """Check if the user is allowed to modify the item.
#
# Implement this function for your resource.
# Default behaviour: No user has write permission.
#
# Args:
# user (str): The id of the user that wants to access the item
# item (dict): The item the user wants to change or delete.
# Attention! If they are any ObjectIds in here, Eve will not have
# converted them yet, so be sure to cast them to str if you want
# to compare them to e.g. g.current_user
#
# Returns:
# bool: True if user has permission to change the item, False if not.
# """
# return False
#
# def create_user_lookup_filter(self, user_id):
# """Create a filter for item lookup in GET, PATCH and DELETE.
#
# Implement this function for your resource.
# Default behaviour: No lookup filter.
#
# Args:
# user_id (str): The id of the user
#
# Returns:
# dict: The filter, will be combined with other filters in the hook.
# Return empty dict if no filters should be applied.
# Return None if no lookup should be possible for the user.
# """
# return {}
#
# Path: amivapi/utils.py
# def get_id(item):
# """Get the id of a field in a relation. Depending on the embedding clause
# a field returned by a mongo query may be an ID or an object. This function
# will get the ID in both cases.
#
# Args:
# item: Either an object from the database as a dict or an object id as
# str or objectid.
#
# Returns:
# ObjectId with the user ID
# """
# try:
# return ObjectId(item)
# except TypeError:
# return ObjectId(item['_id'])
which might include code, classes, or functions. Output only the next line. | user_id == str(get_id(item['moderator']))) |
Predict the next line after this snippet: <|code_start|>
Includes item and field permissions as well as password hashing.
"""
class PasswordHashing(utils.WebTestNoAuth):
"""Tests password hashing."""
def assertVerify(self, plaintext, hashed_password):
"""Assert the hash matches the password."""
self.assertTrue(pbkdf2_sha256.verify(plaintext, hashed_password))
def test_hash_on_insert(self):
"""Test Hash insert function.
Because of how Eve handles hooks, they all modify the input arguments.
Need app context to find config.
"""
with self.app.app_context():
# First test hash on insert
items = [
{'password': "some_pw"},
{'password': "other_pw"}
]
# Hash passwords in list items
<|code_end|>
using the current file's imports:
import json
from bson import ObjectId
from passlib.hash import pbkdf2_sha256
from amivapi.tests import utils
from amivapi.users.security import (
hash_on_insert, hash_on_update)
and any relevant context from other files:
# Path: amivapi/tests/utils.py
# class TestClient(FlaskClient):
# class TestResponse(Response):
# class WebTest(unittest.TestCase, FixtureMixin):
# class WebTestNoAuth(WebTest):
# def open(self, *args, **kwargs):
# def json(self):
# def setUp(self, **extra_config):
# def tearDown(self):
# def get_user_token(self, user_id, created=None):
# def get_root_token(self):
# def setUp(self, **extra_config):
# def authenticate_root(resource):
# def skip_if_false(condition, reason):
# def _skip(func):
#
# Path: amivapi/users/security.py
# def hash_on_insert(items):
# """Hook for user insert.
#
# Hash the password if it is not None.
# (When logging in via LDAP the password should not be stored and therefore
# it can be none.)
#
# Args:
# items (list): List of new items as passed by the on_insert event.
# """
# for user in items:
# _hash_password(user)
#
# def hash_on_update(updates, original):
# """Hook for user update or replace.
#
# Hash the password if it is not None.
# (When logging in via LDAP the password should not be stored and therefore
# it can be none.)
#
# Args:
# updates (dict): dict of changed user data
# original (dict): dict of user data before the update
# """
# _hash_password(updates)
. Output only the next line. | hash_on_insert(items) |
Based on the snippet: <|code_start|> self.assertTrue(pbkdf2_sha256.verify(plaintext, hashed_password))
def test_hash_on_insert(self):
"""Test Hash insert function.
Because of how Eve handles hooks, they all modify the input arguments.
Need app context to find config.
"""
with self.app.app_context():
# First test hash on insert
items = [
{'password': "some_pw"},
{'password': "other_pw"}
]
# Hash passwords in list items
hash_on_insert(items)
# Check hashed
self.assertVerify("some_pw", items[0]['password'])
self.assertVerify("other_pw", items[1]['password'])
def test_hash_on_update(self):
"""Test hash on update. Works like test for hash on insert."""
with self.app.app_context():
data = {'password': "some_pw"}
# Second param is original data, but the hash function ignores
# it so we can just set it to None
<|code_end|>
, predict the immediate next line with the help of imports:
import json
from bson import ObjectId
from passlib.hash import pbkdf2_sha256
from amivapi.tests import utils
from amivapi.users.security import (
hash_on_insert, hash_on_update)
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/tests/utils.py
# class TestClient(FlaskClient):
# class TestResponse(Response):
# class WebTest(unittest.TestCase, FixtureMixin):
# class WebTestNoAuth(WebTest):
# def open(self, *args, **kwargs):
# def json(self):
# def setUp(self, **extra_config):
# def tearDown(self):
# def get_user_token(self, user_id, created=None):
# def get_root_token(self):
# def setUp(self, **extra_config):
# def authenticate_root(resource):
# def skip_if_false(condition, reason):
# def _skip(func):
#
# Path: amivapi/users/security.py
# def hash_on_insert(items):
# """Hook for user insert.
#
# Hash the password if it is not None.
# (When logging in via LDAP the password should not be stored and therefore
# it can be none.)
#
# Args:
# items (list): List of new items as passed by the on_insert event.
# """
# for user in items:
# _hash_password(user)
#
# def hash_on_update(updates, original):
# """Hook for user update or replace.
#
# Hash the password if it is not None.
# (When logging in via LDAP the password should not be stored and therefore
# it can be none.)
#
# Args:
# updates (dict): dict of changed user data
# original (dict): dict of user data before the update
# """
# _hash_password(updates)
. Output only the next line. | hash_on_update(data, None) |
Predict the next line after this snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Test that expired group memberships are deleted."""
class GroupMembershipExpiry(WebTestNoAuth):
"""Test that members are removed from groups, when the membership has
expired."""
def test_expired_groupmembership_gets_removed(self):
user = self.new_object('users')
group = self.new_object('groups')
with self.app.app_context(), freeze_time(
"2016-01-01 00:00:00") as frozen_time:
self.new_object('groupmemberships',
user=str(user['_id']),
group=str(group['_id']),
expiry='2016-01-03T00:00:00Z')
frozen_time.tick(delta=timedelta(days=1))
<|code_end|>
using the current file's imports:
from datetime import timedelta
from freezegun import freeze_time
from amivapi.cron import run_scheduled_tasks
from amivapi.tests.utils import WebTestNoAuth
and any relevant context from other files:
# Path: amivapi/cron.py
# def run_scheduled_tasks():
# """ Check for scheduled task, which have passed the deadline and run them.
# This needs an app context.
# """
# while True:
# task = (current_app.data.driver.db['scheduled_tasks']
# .find_one_and_delete(
# {'time': {'$lte': datetime.utcnow()}}))
#
# if task is None:
# return
#
# args = pickle.loads(task['args'])
# func = schedulable_functions[task['function']]
# func(*args)
#
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | run_scheduled_tasks() |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Test Media handling."""
lenaname = "lena.png"
lenapath = join(dirname(__file__), "fixtures", lenaname)
with open(lenapath, 'rb') as f:
lenadata = f.read()
test_resource = {
'resource_methods': ['POST', 'GET'],
'item_methods': ['GET', 'DELETE'],
'schema': {
'test_file': {
'type': 'media'
}
}
}
<|code_end|>
, predict the immediate next line with the help of imports:
from io import BytesIO
from os.path import dirname, join
from werkzeug.datastructures import FileStorage
from amivapi.tests.utils import WebTestNoAuth
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/tests/utils.py
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
. Output only the next line. | class MediaTest(WebTestNoAuth): |
Based on the snippet: <|code_start|>
There is another file, "ldap_integration.py", which can be used to test
integration with the real ldap. More info there.
"""
class LdapTest(WebTestNoAuth):
"""Tests for LDAP with a mock connection."""
def setUp(self, *args, **kwargs):
"""Extended setUp, enable LDAP and replace it with a Mock object."""
super(LdapTest, self).setUp(*args, **kwargs)
self.app.config['ENABLE_LDAP'] = True
self.mock_ldap = self.app.config['ldap_connector'] = MagicMock()
def test_init_app(self):
"""Test that init app uses correct credentials stored connector."""
ldap_user = 'test'
ldap_pass = 'T3ST'
initialized_ldap = 'I totally am an ldap instance.'
self.app.config['LDAP_USERNAME'] = ldap_user
self.app.config['LDAP_PASSWORD'] = ldap_pass
to_patch = 'amivapi.ldap.AuthenticatedLdap'
with patch(to_patch, return_value=initialized_ldap) as init:
<|code_end|>
, predict the immediate next line with the help of imports:
from unittest.mock import MagicMock, patch, call
from os import getenv
from pprint import pformat
from amivapi import ldap
from amivapi.tests.utils import WebTest, WebTestNoAuth, skip_if_false
import warnings
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
#
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# def skip_if_false(condition, reason):
# """Decorator to mark tests to be skipped if condition is false."""
# def _skip(func):
# return func if condition else pytest.mark.skip(reason=reason)(func)
# return _skip
. Output only the next line. | ldap.init_app(self.app) |
Given the code snippet: <|code_start|> self.app.config['LDAP_DEPARTMENT_MAP'] = {'a': 'itet'}
expected_query = '(& (ou=VSETH Mitglied) (| (departmentNumber=*a*)) )'
search_results = (i for i in [1, 2])
search = 'amivapi.ldap._search'
create = 'amivapi.ldap._create_or_update_user'
with patch(search, return_value=search_results) as mock_search:
with patch(create, return_value=3) as mock_create:
with self.app.app_context():
result = ldap.sync_all()
mock_search.assert_called_with(expected_query)
mock_create.assert_has_calls([call(1), call(2)])
self.assertEqual(result, [3, 3])
# Integration Tests
# Get data from environment
LDAP_USERNAME = getenv('LDAP_TEST_USERNAME')
LDAP_PASSWORD = getenv('LDAP_TEST_PASSWORD')
LDAP_USER_NETHZ = getenv('LDAP_TEST_USER_NETHZ')
LDAP_USER_PASSWORD = getenv('LDAP_TEST_USER_PASSWORD')
requires_credentials = skip_if_false(LDAP_USERNAME and LDAP_PASSWORD,
"LDAP test requires environment "
"variables 'LDAP_TEST_USERNAME' and "
"'LDAP_TEST_PASSWORD")
<|code_end|>
, generate the next line using the imports in this file:
from unittest.mock import MagicMock, patch, call
from os import getenv
from pprint import pformat
from amivapi import ldap
from amivapi.tests.utils import WebTest, WebTestNoAuth, skip_if_false
import warnings
and context (functions, classes, or occasionally code) from other files:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
#
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# def skip_if_false(condition, reason):
# """Decorator to mark tests to be skipped if condition is false."""
# def _skip(func):
# return func if condition else pytest.mark.skip(reason=reason)(func)
# return _skip
. Output only the next line. | class LdapIntegrationTest(WebTest): |
Given the following code snippet before the placeholder: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""LDAP Tests.
Mock the actual ldap responses, since we can only access ldap in the ETH
network which is not usually possible for testing, e.g. on travis.
There is another file, "ldap_integration.py", which can be used to test
integration with the real ldap. More info there.
"""
<|code_end|>
, predict the next line using imports from the current file:
from unittest.mock import MagicMock, patch, call
from os import getenv
from pprint import pformat
from amivapi import ldap
from amivapi.tests.utils import WebTest, WebTestNoAuth, skip_if_false
import warnings
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
#
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# def skip_if_false(condition, reason):
# """Decorator to mark tests to be skipped if condition is false."""
# def _skip(func):
# return func if condition else pytest.mark.skip(reason=reason)(func)
# return _skip
. Output only the next line. | class LdapTest(WebTestNoAuth): |
Predict the next line for this snippet: <|code_start|> self.assertEqual(result, None)
mock_create.assert_not_called()
def test_sync_all(self):
"""Test if sync_all builds the query correctly and creates users."""
# Shorten ou list
self.app.config['LDAP_DEPARTMENT_MAP'] = {'a': 'itet'}
expected_query = '(& (ou=VSETH Mitglied) (| (departmentNumber=*a*)) )'
search_results = (i for i in [1, 2])
search = 'amivapi.ldap._search'
create = 'amivapi.ldap._create_or_update_user'
with patch(search, return_value=search_results) as mock_search:
with patch(create, return_value=3) as mock_create:
with self.app.app_context():
result = ldap.sync_all()
mock_search.assert_called_with(expected_query)
mock_create.assert_has_calls([call(1), call(2)])
self.assertEqual(result, [3, 3])
# Integration Tests
# Get data from environment
LDAP_USERNAME = getenv('LDAP_TEST_USERNAME')
LDAP_PASSWORD = getenv('LDAP_TEST_PASSWORD')
LDAP_USER_NETHZ = getenv('LDAP_TEST_USER_NETHZ')
LDAP_USER_PASSWORD = getenv('LDAP_TEST_USER_PASSWORD')
<|code_end|>
with the help of current file imports:
from unittest.mock import MagicMock, patch, call
from os import getenv
from pprint import pformat
from amivapi import ldap
from amivapi.tests.utils import WebTest, WebTestNoAuth, skip_if_false
import warnings
and context from other files:
# Path: amivapi/ldap.py
# def init_app(app):
# def authenticate_user(cn, password):
# def sync_one(cn):
# def sync_all():
# def _search(query):
# def _escape(query):
# def _process_data(data):
# def _create_or_update_user(ldap_data):
#
# Path: amivapi/tests/utils.py
# class WebTest(unittest.TestCase, FixtureMixin):
# """Base test class for tests against the full WSGI stack.
#
# Inspired by eve standard testing class.
# """
#
# # Test Config overwrites
# test_config = {
# 'MONGO_DBNAME': 'test_amivapi',
# 'MONGO_USERNAME': 'test_user',
# 'MONGO_PASSWORD': 'test_pw',
# 'API_MAIL': 'api@test.ch',
# 'SMTP_SERVER': '',
# 'TESTING': True,
# 'DEBUG': True, # This makes eve's error messages more helpful
# 'LDAP_USERNAME': None, # LDAP test require special treatment
# 'LDAP_PASSWORD': None, # LDAP test require special treatment
# 'SENTRY_DSN': None,
# 'SENTRY_ENVIRONMENT': None,
# 'PASSWORD_CONTEXT': CryptContext(
# schemes=["pbkdf2_sha256"],
# pbkdf2_sha256__default_rounds=10,
# # min_rounds is used to determine if a hash needs to be upgraded
# pbkdf2_sha256__min_rounds=8,
# )
# }
#
# def setUp(self, **extra_config):
# """Set up the testing client and database connection.
#
# self.api will be a flask TestClient to make requests
# self.db will be a MongoDB database
# """
# super().setUp()
#
# # In 3.2, assertItemsEqual was replaced by assertCountEqual
# # Make assertItemsEqual work in tests for py3 as well
# if sys.version_info >= (3, 2):
# self.assertItemsEqual = self.assertCountEqual
#
# # create eve app and test client
# config = {}
# config.update(self.test_config)
# config.update(extra_config)
# self.app = bootstrap.create_app(**config)
# self.app.response_class = TestResponse
# self.app.test_client_class = TestClient
# self.app.test_mails = []
# self.api = self.app.test_client()
#
# # Create a separate mongo connection and db reference for tests
# self.connection = MongoClient(host=self.app.config['MONGO_HOST'],
# port=self.app.config['MONGO_PORT'])
# self.db = self.connection[self.app.config['MONGO_DBNAME']]
# self.db.authenticate(name=self.app.config['MONGO_USERNAME'],
# password=self.app.config['MONGO_PASSWORD'],
# source=self.app.config['MONGO_DBNAME'])
#
# def tearDown(self):
# """Tear down after testing."""
# # delete testing database
# self.connection.drop_database(self.test_config['MONGO_DBNAME'])
# # close database connection
# self.connection.close()
#
# # Shortcuts to get a token
# counter = count()
#
# def get_user_token(self, user_id, created=None):
# """Create session for a user and return a token.
#
# Args:
# user_id (str): user_id as string.
#
# Returns:
# str: Token that can be used to authenticate user.
# """
# if created is None:
# created = datetime.now(timezone.utc)
#
# token = "test_token_" + str(next(self.counter))
# self.db['sessions'].insert_one({u'user': ObjectId(user_id),
# u'token': token,
# u'_created': created})
# return token
#
# def get_root_token(self):
# """The root password is the root token.
#
# Returns:
# str: Token for the root user
# """
# return ROOT_PASSWORD
#
# class WebTestNoAuth(WebTest):
# """WebTest without authentification."""
#
# def setUp(self, **extra_config):
# """Use auth hook to always authenticate as root for every request."""
# super().setUp(**extra_config)
#
# def authenticate_root(resource):
# g.resource_admin = True
#
# self.app.after_auth += authenticate_root
#
# def skip_if_false(condition, reason):
# """Decorator to mark tests to be skipped if condition is false."""
# def _skip(func):
# return func if condition else pytest.mark.skip(reason=reason)(func)
# return _skip
, which may contain function names, class names, or code. Output only the next line. | requires_credentials = skip_if_false(LDAP_USERNAME and LDAP_PASSWORD, |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
#
# license: AGPLv3, see LICENSE for details. In addition we strongly encourage
# you to buy us beer if we meet and you like the software.
"""Email formatting.
Needed when users are notified about their event signups.
"""
def notify_signup_accepted(event, signup):
"""Send an email to a user that his signup was accepted"""
id_field = current_app.config['ID_FIELD']
if signup.get('user'):
lookup = {id_field: signup['user']}
user = current_app.data.find_one('users', None, **lookup)
name = user['firstname']
email = user['email']
else:
name = 'Guest of AMIV'
email = signup['email']
<|code_end|>
with the help of current file imports:
from flask import current_app, url_for
from itsdangerous import URLSafeSerializer
from amivapi.events.utils import get_token_secret
from amivapi.utils import mail
and context from other files:
# Path: amivapi/events/utils.py
# def get_token_secret():
# db = current_app.data.driver.db['config']
# result = db.find_one({'TOKEN_SECRET': {'$exists': True}})
# return result['TOKEN_SECRET']
#
# Path: amivapi/utils.py
# def mail(to, subject, text):
# """Send a mail to a list of recipients.
#
# The mail is sent from the address specified by `API_MAIL` in the config,
# and the subject formatted according to `API_MAIL_SUBJECT`.
#
#
# Args:
# to(list of strings): List of recipient addresses
# subject(string): Subject string
# text(string): Mail content
# """
# sender = app.config['API_MAIL']
# subject = app.config['API_MAIL_SUBJECT'].format(subject=subject)
#
# if app.config.get('TESTING', False):
# app.test_mails.append({
# 'subject': subject,
# 'from': sender,
# 'receivers': to,
# 'text': text
# })
# elif config.SMTP_SERVER and config.SMTP_PORT:
# msg = MIMEText(text)
# msg['Subject'] = subject
# msg['From'] = sender
# msg['To'] = ';'.join([to] if isinstance(to, str) else to)
#
# try:
# with smtplib.SMTP(config.SMTP_SERVER,
# port=config.SMTP_PORT,
# timeout=config.SMTP_TIMEOUT) as smtp:
# status_code, _ = smtp.starttls()
# if status_code != 220:
# app.logger.error("Failed to create secure "
# "SMTP connection!")
# return
#
# if config.SMTP_USERNAME and config.SMTP_PASSWORD:
# smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
# else:
# smtp.ehlo()
#
# try:
# smtp.sendmail(msg['From'], to, msg.as_string())
# except smtplib.SMTPRecipientsRefused:
# error = ("Failed to send mail:\n"
# "From: %s\nTo: %s\n"
# "Subject: %s\n\n%s")
# app.logger.error(error % (sender, str(to), subject, text))
# except smtplib.SMTPException as e:
# app.logger.error("SMTP error trying to send mails: %s" % e)
, which may contain function names, class names, or code. Output only the next line. | s = URLSafeSerializer(get_token_secret()) |
Based on the snippet: <|code_start|>"""Email formatting.
Needed when users are notified about their event signups.
"""
def notify_signup_accepted(event, signup):
"""Send an email to a user that his signup was accepted"""
id_field = current_app.config['ID_FIELD']
if signup.get('user'):
lookup = {id_field: signup['user']}
user = current_app.data.find_one('users', None, **lookup)
name = user['firstname']
email = user['email']
else:
name = 'Guest of AMIV'
email = signup['email']
s = URLSafeSerializer(get_token_secret())
token = s.dumps(str(signup[id_field]))
if current_app.config.get('SERVER_NAME') is None:
current_app.logger.warning("SERVER_NAME is not set. E-Mail links "
"will not work!")
deletion_link = url_for('emails.on_delete_signup', token=token,
_external=True)
<|code_end|>
, predict the immediate next line with the help of imports:
from flask import current_app, url_for
from itsdangerous import URLSafeSerializer
from amivapi.events.utils import get_token_secret
from amivapi.utils import mail
and context (classes, functions, sometimes code) from other files:
# Path: amivapi/events/utils.py
# def get_token_secret():
# db = current_app.data.driver.db['config']
# result = db.find_one({'TOKEN_SECRET': {'$exists': True}})
# return result['TOKEN_SECRET']
#
# Path: amivapi/utils.py
# def mail(to, subject, text):
# """Send a mail to a list of recipients.
#
# The mail is sent from the address specified by `API_MAIL` in the config,
# and the subject formatted according to `API_MAIL_SUBJECT`.
#
#
# Args:
# to(list of strings): List of recipient addresses
# subject(string): Subject string
# text(string): Mail content
# """
# sender = app.config['API_MAIL']
# subject = app.config['API_MAIL_SUBJECT'].format(subject=subject)
#
# if app.config.get('TESTING', False):
# app.test_mails.append({
# 'subject': subject,
# 'from': sender,
# 'receivers': to,
# 'text': text
# })
# elif config.SMTP_SERVER and config.SMTP_PORT:
# msg = MIMEText(text)
# msg['Subject'] = subject
# msg['From'] = sender
# msg['To'] = ';'.join([to] if isinstance(to, str) else to)
#
# try:
# with smtplib.SMTP(config.SMTP_SERVER,
# port=config.SMTP_PORT,
# timeout=config.SMTP_TIMEOUT) as smtp:
# status_code, _ = smtp.starttls()
# if status_code != 220:
# app.logger.error("Failed to create secure "
# "SMTP connection!")
# return
#
# if config.SMTP_USERNAME and config.SMTP_PASSWORD:
# smtp.login(config.SMTP_USERNAME, config.SMTP_PASSWORD)
# else:
# smtp.ehlo()
#
# try:
# smtp.sendmail(msg['From'], to, msg.as_string())
# except smtplib.SMTPRecipientsRefused:
# error = ("Failed to send mail:\n"
# "From: %s\nTo: %s\n"
# "Subject: %s\n\n%s")
# app.logger.error(error % (sender, str(to), subject, text))
# except smtplib.SMTPException as e:
# app.logger.error("SMTP error trying to send mails: %s" % e)
. Output only the next line. | mail([email], |
Given the following code snippet before the placeholder: <|code_start|> id_field = current_app.config['ID_FIELD']
lookup = {id_field: event_id}
event = current_app.data.find_one('events', None, **lookup)
accepted_ids = []
if event['selection_strategy'] == 'fcfs':
lookup = {'event': event_id, 'accepted': True}
signup_count = (
current_app.data.driver.db['eventsignups'].count_documents(lookup))
# 0 spots == infinite spots
if event['spots'] == 0 or signup_count < event['spots']:
lookup = {'event': event_id, 'accepted': False, 'confirmed': True}
new_list = current_app.data.driver.db['eventsignups'].find(
lookup).sort('_created', ASCENDING)
if event['spots'] > 0:
to_accept = new_list.limit(event['spots'] - signup_count)
else:
# infinite spots, so just accept everyone
to_accept = new_list
for new_accepted in to_accept:
accepted_ids.append(new_accepted['_id'])
# Set accepted flag
current_app.data.update('eventsignups', new_accepted[id_field],
{'accepted': True}, new_accepted)
# Notify user
<|code_end|>
, predict the next line using imports from the current file:
from flask import current_app, g
from pymongo import ASCENDING
from amivapi.events.emails import notify_signup_accepted
and context including class names, function names, and sometimes code from other files:
# Path: amivapi/events/emails.py
# def notify_signup_accepted(event, signup):
# """Send an email to a user that his signup was accepted"""
# id_field = current_app.config['ID_FIELD']
#
# if signup.get('user'):
# lookup = {id_field: signup['user']}
# user = current_app.data.find_one('users', None, **lookup)
# name = user['firstname']
# email = user['email']
# else:
# name = 'Guest of AMIV'
# email = signup['email']
#
# s = URLSafeSerializer(get_token_secret())
# token = s.dumps(str(signup[id_field]))
#
# if current_app.config.get('SERVER_NAME') is None:
# current_app.logger.warning("SERVER_NAME is not set. E-Mail links "
# "will not work!")
#
# deletion_link = url_for('emails.on_delete_signup', token=token,
# _external=True)
#
# mail([email],
# 'Eventsignup accepted',
# current_app.config['ACCEPT_EMAIL_TEXT'].format(
# name=name,
# title=event.get('title_en') or event.get('title_de'),
# link=deletion_link,
# deadline=event['time_register_end'].strftime('%H.%M %d.%m.%Y')))
. Output only the next line. | notify_signup_accepted(event, new_accepted) |
Using the snippet: <|code_start|> else:
std.SetProp("Standardized", "True")
except:
errors += 1
utils.log("Error standardizing", sys.exc_info()[0])
std = mol
std.SetProp("Standardized", "Error")
count = write_out([std],count,writer,args.mol_format,args.outformat)
else:
# we want a new UUID generating as we are generating new molecules
if mol.HasProp('uuid'):
parentUuid = mol.GetProp("uuid")
else:
parentUuid = None
results = []
if args.enumerate_tauts:
utils.log("Enumerating tautomers")
results = enumerateTautomers(mol)
else:
results.append(mol)
if args.enumerate_stereo:
utils.log("Enumerating steroisomers")
mols = results
results = []
for m in mols:
<|code_end|>
, determine the next line of code. You have imports:
import sys, argparse
from pipelines_utils import parameter_utils, utils
from pipelines_utils_rdkit import rdkit_utils
from rdkit import Chem
from rdkit.Chem import AllChem
from .sanify_utils import enumerateStereoIsomers,enumerateTautomers,STANDARD_MOL_METHODS
and context (class names, function names, or code) available:
# Path: src/python/pipelines/rdkit/sanify_utils.py
# def enumerateStereoIsomers(mol):
# out = []
# chiralCentres = Chem.FindMolChiralCenters(mol, includeUnassigned=True)
# #return the molecule object when no chiral centres where identified
# if chiralCentres == []:
# return [mol]
#
# #All bit permutations with number of bits equals number of chiralCentres
# elements = _spam(len(chiralCentres))
#
# for isoId,element in enumerate(elements):
# for centreId,i in enumerate(element):
# atomId = chiralCentres[centreId][0]
# if i == 0:
# mol.GetAtomWithIdx(atomId).SetChiralTag(Chem.rdchem.ChiralType.CHI_TETRAHEDRAL_CW)
# elif i == 1:
# mol.GetAtomWithIdx(atomId).SetChiralTag(Chem.rdchem.ChiralType.CHI_TETRAHEDRAL_CCW)
# outmol = copy(mol)
# utils.log("Enumerated ", Chem.MolToSmiles(mol, isomericSmiles=True))
# out.append(outmol)
# return out
#
# def enumerateTautomers(mol):
# """
# Get all of the Tautomers of a given molecule
# :param mol: the input molecule
# :return: a list of Tautomers
# """
# smiles = Chem.MolToSmiles(mol,isomericSmiles=True)
# tauts = enumerate_tautomers_smiles(smiles)
# ##TODO Append Parent molecule name
# return [Chem.MolFromSmiles(x) for x in tauts]
#
# STANDARD_MOL_METHODS = {"molvs": molVsStandardizer, "flatkinson": flatkinsonStandardizer}
. Output only the next line. | enumerated = enumerateStereoIsomers(m) |
Given the following code snippet before the placeholder: <|code_start|> inputCanSmiles = Chem.MolToSmiles(mol, isomericSmiles=True, canonical=True)
try:
std = getStandardMolecule(mol)
outputCanSmiles = Chem.MolToSmiles(std, isomericSmiles=True, canonical=True)
if oldUUID:
std.SetProp("uuid", oldUUID)
#utils.log("Standardized", i, inputCanSmiles, ">>", outputCanSmiles)
if inputCanSmiles == outputCanSmiles:
std.SetProp("Standardized", "False")
else:
std.SetProp("Standardized", "True")
except:
errors += 1
utils.log("Error standardizing", sys.exc_info()[0])
std = mol
std.SetProp("Standardized", "Error")
count = write_out([std],count,writer,args.mol_format,args.outformat)
else:
# we want a new UUID generating as we are generating new molecules
if mol.HasProp('uuid'):
parentUuid = mol.GetProp("uuid")
else:
parentUuid = None
results = []
if args.enumerate_tauts:
utils.log("Enumerating tautomers")
<|code_end|>
, predict the next line using imports from the current file:
import sys, argparse
from pipelines_utils import parameter_utils, utils
from pipelines_utils_rdkit import rdkit_utils
from rdkit import Chem
from rdkit.Chem import AllChem
from .sanify_utils import enumerateStereoIsomers,enumerateTautomers,STANDARD_MOL_METHODS
and context including class names, function names, and sometimes code from other files:
# Path: src/python/pipelines/rdkit/sanify_utils.py
# def enumerateStereoIsomers(mol):
# out = []
# chiralCentres = Chem.FindMolChiralCenters(mol, includeUnassigned=True)
# #return the molecule object when no chiral centres where identified
# if chiralCentres == []:
# return [mol]
#
# #All bit permutations with number of bits equals number of chiralCentres
# elements = _spam(len(chiralCentres))
#
# for isoId,element in enumerate(elements):
# for centreId,i in enumerate(element):
# atomId = chiralCentres[centreId][0]
# if i == 0:
# mol.GetAtomWithIdx(atomId).SetChiralTag(Chem.rdchem.ChiralType.CHI_TETRAHEDRAL_CW)
# elif i == 1:
# mol.GetAtomWithIdx(atomId).SetChiralTag(Chem.rdchem.ChiralType.CHI_TETRAHEDRAL_CCW)
# outmol = copy(mol)
# utils.log("Enumerated ", Chem.MolToSmiles(mol, isomericSmiles=True))
# out.append(outmol)
# return out
#
# def enumerateTautomers(mol):
# """
# Get all of the Tautomers of a given molecule
# :param mol: the input molecule
# :return: a list of Tautomers
# """
# smiles = Chem.MolToSmiles(mol,isomericSmiles=True)
# tauts = enumerate_tautomers_smiles(smiles)
# ##TODO Append Parent molecule name
# return [Chem.MolFromSmiles(x) for x in tauts]
#
# STANDARD_MOL_METHODS = {"molvs": molVsStandardizer, "flatkinson": flatkinsonStandardizer}
. Output only the next line. | results = enumerateTautomers(mol) |
Given snippet: <|code_start|>
def write_out(mols,count,writer,mol_format,file_format):
for mol in mols:
count += 1
if mol is None: continue
if mol_format == 'mol_3d':
AllChem.EmbedMolecule(mol,AllChem.ETKDG())
fmt = 'mol'
elif mol_format == 'mol_2d':
AllChem.Compute2DCoords(mol)
fmt = 'mol'
else:
fmt = 'smiles'
if file_format == 'sdf':
writer.write(mol)
elif file_format == 'json':
writer.write(mol, format=fmt)
return count
def main():
### command line args defintions #########################################
parser = argparse.ArgumentParser(description='RDKit molecule standardizer / enumerator')
parameter_utils.add_default_io_args(parser)
parser.add_argument('-et', '--enumerate_tauts', action='store_true', help='Enumerate all tautomers')
parser.add_argument('-es', '--enumerate_stereo', action='store_true', help='Enumerate all stereoisomers')
parser.add_argument('-st', '--standardize', action='store_true', help='Standardize molecules. Cannot be true if enumerate is on.')
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import sys, argparse
from pipelines_utils import parameter_utils, utils
from pipelines_utils_rdkit import rdkit_utils
from rdkit import Chem
from rdkit.Chem import AllChem
from .sanify_utils import enumerateStereoIsomers,enumerateTautomers,STANDARD_MOL_METHODS
and context:
# Path: src/python/pipelines/rdkit/sanify_utils.py
# def enumerateStereoIsomers(mol):
# out = []
# chiralCentres = Chem.FindMolChiralCenters(mol, includeUnassigned=True)
# #return the molecule object when no chiral centres where identified
# if chiralCentres == []:
# return [mol]
#
# #All bit permutations with number of bits equals number of chiralCentres
# elements = _spam(len(chiralCentres))
#
# for isoId,element in enumerate(elements):
# for centreId,i in enumerate(element):
# atomId = chiralCentres[centreId][0]
# if i == 0:
# mol.GetAtomWithIdx(atomId).SetChiralTag(Chem.rdchem.ChiralType.CHI_TETRAHEDRAL_CW)
# elif i == 1:
# mol.GetAtomWithIdx(atomId).SetChiralTag(Chem.rdchem.ChiralType.CHI_TETRAHEDRAL_CCW)
# outmol = copy(mol)
# utils.log("Enumerated ", Chem.MolToSmiles(mol, isomericSmiles=True))
# out.append(outmol)
# return out
#
# def enumerateTautomers(mol):
# """
# Get all of the Tautomers of a given molecule
# :param mol: the input molecule
# :return: a list of Tautomers
# """
# smiles = Chem.MolToSmiles(mol,isomericSmiles=True)
# tauts = enumerate_tautomers_smiles(smiles)
# ##TODO Append Parent molecule name
# return [Chem.MolFromSmiles(x) for x in tauts]
#
# STANDARD_MOL_METHODS = {"molvs": molVsStandardizer, "flatkinson": flatkinsonStandardizer}
which might include code, classes, or functions. Output only the next line. | parser.add_argument('-stm','--standardize_method', default="molvs",choices=STANDARD_MOL_METHODS.keys(),help="Choose the method to standardize.") |
Using the snippet: <|code_start|>
def enumerateMol(mol, fragment):
"""
Enumerate a single molecule
:param mol:
:param fragment The fragmentation method, 'hac' or 'mw'. If not specified the whole molecules is passed to Dimorphite
:return:
"""
if fragment:
mol = mol_utils.fragment(mol, fragment)
inputmol = []
inputmol.append(mol)
<|code_end|>
, determine the next line of code. You have imports:
import argparse, sys
from pipelines_utils import parameter_utils, utils
from pipelines_utils_rdkit import rdkit_utils, mol_utils
from .dimorphite_dl import run_with_mol_list
and context (class names, function names, or code) available:
# Path: src/python/pipelines/dimorphite/dimorphite_dl.py
# def run_with_mol_list(mol_lst, **kwargs):
# """A helpful, importable function for those who want to call Dimorphite-DL
# from another Python script rather than the command line. Note that this
# function is for passing Dimorphite-DL a list of RDKit Mol objects, together
# with command-line parameters. If you want to use only the same parameters
# that you would use from the command line, import run() instead.
#
# :param mol_lst: A list of rdkit.Chem.rdchem.Mol objects.
# :type mol_lst: list
# :raises Exception: If the **kwargs includes "smiles", "smiles_file",
# "output_file", or "test" parameters.
# :return: A list of properly protonated rdkit.Chem.rdchem.Mol objects.
# :rtype: list
# """
#
# # Do a quick check to make sure the user input makes sense.
# for bad_arg in ["smiles", "smiles_file", "output_file", "test"]:
# if bad_arg in kwargs:
# msg = "You're using Dimorphite-DL's run_with_mol_list(mol_lst, " + \
# "**kwargs) function, but you also passed the \"" + \
# bad_arg + "\" argument. Did you mean to use the " + \
# "run(**kwargs) function instead?"
# print(msg)
# raise Exception(msg)
#
# # Set the return_as_list flag so main() will return the protonated smiles
# # as a list.
# kwargs["return_as_list"] = True
#
# # Having reviewed the code, it will be very difficult to rewrite it so
# # that a list of Mol objects can be used directly. Intead, convert this
# # list of mols to smiles and pass that. Not efficient, but it will work.
# protonated_smiles_and_props = []
# for m in mol_lst:
# props = m.GetPropsAsDict()
# kwargs["smiles"] = Chem.MolToSmiles(m, isomericSmiles=True)
# protonated_smiles_and_props.extend(
# [(s.split("\t")[0], props) for s in main(kwargs)]
# )
#
# # Now convert the list of protonated smiles strings back to RDKit Mol
# # objects. Also, add back in the properties from the original mol objects.
# mols = []
# for s, props in protonated_smiles_and_props:
# m = Chem.MolFromSmiles(s)
# if m:
# for prop, val in props.items():
# if type(val) is int:
# m.SetIntProp(prop, val)
# elif type(val) is float:
# m.SetDoubleProp(prop, val)
# elif type(val) is bool:
# m.SetBoolProp(prop, val)
# else:
# m.SetProp(prop, str(val))
# mols.append(m)
# else:
# UtilFuncs.eprint("WARNING: Could not process molecule with SMILES string " + s + " and properties " + str(props))
#
# return mols
. Output only the next line. | protonated_mols = run_with_mol_list(inputmol) |
Given the code snippet: <|code_start|> # If subtag=HmacKeyTag:
# props = keyLengthBytes:uint32_t, hashId:uint32_t
# If subtag=RsaHashedKeyTag:
# props = algorithmId:uint32_t, type:uint32_t,
# modulusLengthBits:uint32_t,
# publicExponentLength:uint32_t,
# publicExponent:byte[publicExponentLength],
# hashId:uint32_t
# If subtag=EcKeyTag:
# props = algorithmId:uint32_t, type:uint32_t,
# namedCurve:uint32_t
tag_kRTCCertificateTag = b"k" # length:uint32_t, pemPrivateKey:WebCoreString,
# pemCertificate:WebCoreString
tag_kRTCEncodedAudioFrameTag = b"A" # uint32_t -> transferred audio frame ID
tag_kRTCEncodedVideoFrameTag = b"V" # uint32_t -> transferred video frame ID
tag_kVideoFrameTag = b"v" # uint32_t -> transferred video frame ID
# The following tags were used by the Shape Detection API implementation
# between M71 and M81. During these milestones, the API was always behind
# a flag. Usage was removed in https:#crrev.com/c/2040378.
tag_kDeprecatedDetectedBarcodeTag = b"B"
tag_kDeprecatedDetectedFaceTag = b"F"
tag_kDeprecatedDetectedTextTag = b"t"
tag_kDOMExceptionTag = b"x" # name:String,message:String,stack:String
tag_kVersionTag = b"\xff" # version:uint32_t -> Uses this as the file version.
class BlinkV8Deserializer:
def _read_varint(self, stream) -> int:
<|code_end|>
, generate the next line using the imports in this file:
import sys
import enum
import typing
from dataclasses import dataclass
from pyhindsight.lib.ccl_chrome_indexeddb import ccl_v8_value_deserializer
and context (functions, classes, or occasionally code) from other files:
# Path: pyhindsight/lib/ccl_chrome_indexeddb/ccl_v8_value_deserializer.py
# __DEBUG = False
# STRUCT_LOOKUP = types.MappingProxyType({
# tag_kInt8Array: "b",
# tag_kUint8Array: "B",
# tag_kUint8ClampedArray: "B",
# tag_kInt16Array: "h",
# tag_kUint16Array: "H",
# tag_kInt32Array: "i",
# tag_kUint32Array: "I",
# tag_kFloat32Array: "f",
# tag_kFloat64Array: "d",
# tag_kBigInt64Array: "q",
# tag_kBigUint64Array: "Q",
# tag_kDataView: "c"
# })
# __ODDBALLS = {
# Constants.token_kUndefined: Undefined,
# Constants.token_kTheHole: Undefined,
# Constants.token_kNull: None,
# Constants.token_kTrue: True,
# Constants.token_kFalse: False,
# }
# __WRAPPED_PRIMITIVES = {
# Constants.token_kTrueObject,
# Constants.token_kFalseObject,
# Constants.token_kNumberObject,
# Constants.token_kBigIntObject,
# Constants.token_kStringObject
# }
# def log(msg, debug_only=True):
# def read_le_varint(stream: typing.BinaryIO) -> typing.Optional[typing.Tuple[int, bytes]]:
# def __bool__(self):
# def __eq__(self, other):
# def __repr__(self):
# def __str__(self):
# def __init__(self, stream: typing.BinaryIO, host_object_delegate: typing.Callable,
# *, is_little_endian=True, is_64bit=True):
# def _read_raw(self, length: int) -> bytes:
# def _read_le_varint(self) -> typing.Optional[typing.Tuple[int, bytes]]:
# def _read_zigzag(self) -> int:
# def _read_double(self) -> float:
# def _read_bigint(self) -> int:
# def _read_utf8_string(self) -> str:
# def _read_one_byte_string(self) -> typing.AnyStr:
# def _read_two_byte_string(self) -> str:
# def _read_string(self) -> str:
# def _read_object_by_reference(self) -> typing.Any:
# def _read_tag(self) -> bytes:
# def _peek_tag(self) -> bytes:
# def _read_date(self) -> datetime.datetime:
# def _read_js_regex(self) -> typing.Pattern:
# def _read_js_object_properties(self, end_tag) -> typing.Iterable[typing.Tuple[typing.Any, typing.Any]]:
# def _read_js_object(self) -> dict:
# def _read_js_sparse_array(self) -> list:
# def _read_js_dense_array(self) -> list:
# def _read_js_map(self) -> dict:
# def _read_js_set(self) -> set:
# def _read_js_arraybuffer(self) -> bytes:
# def _wrap_js_array_buffer_view(self, raw: bytes) -> tuple:
# def _read_host_object(self) -> typing.Any:
# def _not_implemented(self):
# def _read_object_internal(self) -> typing.Tuple[bytes, typing.Any]:
# def _read_object(self) -> typing.Any:
# def _read_header(self) -> int:
# def read(self) -> typing.Any:
# class _Undefined:
# class Constants:
# class ArrayBufferViewTag:
# class Deserializer:
. Output only the next line. | return ccl_v8_value_deserializer.read_le_varint(stream)[0] |
Given the following code snippet before the placeholder: <|code_start|> self.artifacts_display = {}
if self.preferences is None:
self.preferences = []
if self.origin_hashes is None:
self.origin_hashes = {}
@staticmethod
def format_processing_output(name, items):
width = 80
left_side = width*0.55
count = '{:>6}'.format(str(items))
pretty_name = "{name:>{left_width}}:{count:^{right_width}}" \
.format(name=name, left_width=int(left_side), count=' '.join(['[', count, ']']),
right_width=(width - int(left_side)-2))
return pretty_name
@staticmethod
def format_profile_path(profile_path):
if len(profile_path) > 68:
profile_path = "...{}".format(profile_path[-65:])
return "\n Profile: {}".format(profile_path)
def build_structure(self, path, database):
if database not in list(self.structure.keys()):
self.structure[database] = {}
# Copy and connect to copy of SQLite DB
<|code_end|>
, predict the next line using imports from the current file:
import hashlib
import logging
import sqlite3
import sys
import urllib.parse
from pyhindsight import utils
and context including class names, function names, and sometimes code from other files:
# Path: pyhindsight/utils.py
# def dict_factory(cursor, row):
# def text_factory(row_data):
# def open_sqlite_db(chrome, database_path, database_name):
# def format_plugin_output(name, version, items):
# def format_meta_output(name, content):
# def default(self, obj):
# def to_datetime(timestamp, timezone=None):
# def friendly_date(timestamp):
# def get_ldb_records(ldb_path, prefix=''):
# def read_varint(source):
# def read_string(input_bytes, ptr):
# def read_int32(input_bytes, ptr):
# def read_int64(input_bytes, ptr):
# class MyEncoder(json.JSONEncoder):
. Output only the next line. | conn = utils.open_sqlite_db(self, path, database) |
Given the code snippet: <|code_start|>
self._f = file.open("rb")
self._f.seek(-LdbFile.FOOTER_SIZE, os.SEEK_END)
self._meta_index_handle = BlockHandle.from_stream(self._f)
self._index_handle = BlockHandle.from_stream(self._f)
self._f.seek(-8, os.SEEK_END)
magic, = struct.unpack("<Q", self._f.read(8))
if magic != LdbFile.MAGIC:
raise ValueError(f"Invalid magic number in {file}")
self._index = self._read_index()
def _read_block(self, handle: BlockHandle):
# block is the size in the blockhandle plus the trailer
# the trailer is 5 bytes long.
# idx size meaning
# 0 1 CompressionType (0 = none, 1 = snappy)
# 1 4 CRC32
self._f.seek(handle.offset)
raw_block = self._f.read(handle.length)
trailer = self._f.read(LdbFile.BLOCK_TRAILER_SIZE)
if len(raw_block) != handle.length or len(trailer) != LdbFile.BLOCK_TRAILER_SIZE:
raise ValueError(f"Could not read all of the block at offset {handle.offset} in file {self.path}")
is_compressed = trailer[0] != 0
if is_compressed:
with io.BytesIO(raw_block) as buff:
<|code_end|>
, generate the next line using the imports in this file:
import typing
import struct
import re
import os
import io
import pathlib
import dataclasses
import enum
from collections import namedtuple
from types import MappingProxyType
from pyhindsight.lib.ccl_chrome_indexeddb import ccl_simplesnappy
and context (functions, classes, or occasionally code) from other files:
# Path: pyhindsight/lib/ccl_chrome_indexeddb/ccl_simplesnappy.py
# DEBUG = False
# def log(msg):
# def _read_le_varint(stream: typing.BinaryIO) -> typing.Optional[typing.Tuple[int, bytes]]:
# def read_le_varint(stream: typing.BinaryIO) -> typing.Optional[int]:
# def read_uint16(stream: typing.BinaryIO) -> int:
# def read_uint24(stream: typing.BinaryIO) -> int:
# def read_uint32(stream: typing.BinaryIO) -> int:
# def read_byte(stream: typing.BinaryIO) -> typing.Optional[int]:
# def decompress(data: typing.BinaryIO) -> bytes:
# def main(path):
# class ElementType(enum.IntEnum):
. Output only the next line. | raw_block = ccl_simplesnappy.decompress(buff) |
Predict the next line after this snippet: <|code_start|> cat = SourceCatalog3FGL(filename='input_data/3fgl.fits.gz')
sources = get_selected_sources(cat, sources)
for idx in sources:
source = cat[idx]
data = source._data_python_dict
data['source_id'] = data['catalog_row_index']
del data['catalog_row_index']
out_dir = DATA_DIR / 'cat/3fgl/sources/{:04d}'.format((data['source_id']))
out_dir.mkdir(parents=True, exist_ok=True)
filename = out_dir / 'data.json'
click.secho('Writing: {}'.format(filename), fg='green')
dump_to_json(data, filename)
def make_snrcat_catalog_data():
click.secho('Making SNRcat catalog data...', fg='green')
out_dir = DATA_DIR / 'cat/snrcat'
out_dir.mkdir(parents=True, exist_ok=True)
url = 'https://github.com/gammapy/gammapy-extra/blob/master/datasets/catalogs/snrcat.fits.gz?raw=true'
cols = ['Source_Name', 'RAJ2000', 'DEJ2000', 'GLON', 'GLAT',
'id_alt', 'size_radio_mean']
table = Table.read(url)
table = table[cols]
table['snrcat_id'] = [
_.replace('+', 'p').replace('-', 'm')
for _ in table['Source_Name']
]
<|code_end|>
using the current file's imports:
from pathlib import Path
from astropy.table import Table
from gammapy.catalog import SourceCatalog3FHL, SourceCatalogGammaCat, SourceCatalog3FGL
from .utils import table_to_list_of_dict, dump_to_json, get_selected_sources
from .config import DATA_DIR
import click
import numpy as np
import json
and any relevant context from other files:
# Path: gammasky/utils.py
# def table_to_list_of_dict(table):
# """Convert table to list of dict."""
# rows = []
# for row in table:
# data = OrderedDict()
# for name in table.colnames:
# val = row[name]
# # TODO: The below is not working, find a fix
# # if val in {np.nan}:
# # val = None
# if isinstance(val, np.int64):
# val = int(val)
# elif isinstance(val, np.int32):
# val = int(val)
# elif isinstance(val, np.bool_):
# val = bool(val)
# elif isinstance(val, np.float):
# val = float(val)
# elif isinstance(val, np.float32):
# val = float(val)
# elif isinstance(val, np.str):
# val = str(val)
# elif isinstance(val, np.ndarray):
# vals = [float(_) for _ in val]
# val = list(vals)
# else:
# raise ValueError('Unknown type: {} {}'.format(val, type(val)))
# data[name] = val
#
# rows.append(data)
#
# return rows
#
# def dump_to_json(data, path):
# # Replace all instances of NaN and Inf values (which are not valid JSON) with null
# text = json.dumps(data)
# text = text.replace('NaN', 'null').replace('-Infinity', 'null').replace('Infinity', 'null')
# data = json.loads(text)
#
# with path.open('w') as fh:
# json.dump(data, fh)
#
# def get_selected_sources(cat, sources):
# if sources == 'all':
# return list(range(len(cat.table)))
# else:
# return [int(_) for _ in sources.split(',')]
#
# Path: gammasky/config.py
# DATA_DIR = Path('src/data/')
. Output only the next line. | list_of_dict = table_to_list_of_dict(table.filled()) |
Given snippet: <|code_start|> 'make_tev_catalog_data',
'make_tev_source_data',
'make_3fhl_catalog_data',
'make_3fhl_source_data',
'make_3fgl_catalog_data',
'make_3fgl_source_data',
'make_snrcat_catalog_data'
]
TO_JSON_KWARGS = dict(orient='split', double_precision=5)
def make_3fhl_catalog_data():
click.secho('Making 3FHL catalog data...', fg='green')
out_dir = DATA_DIR / 'cat/3fhl'
out_dir.mkdir(parents=True, exist_ok=True)
cat = SourceCatalog3FHL(filename='input_data/3fhl.fits.gz')
# Indexing table - this is all the data we need for MapView.
cols = ['Source_Name', 'RAJ2000', 'DEJ2000',
'GLON', 'GLAT', 'ASSOC1', 'CLASS']
cat.table = cat.table[cols]
data = cat._data_python_list
for i in range(len(data)):
data[i]['source_id'] = data[i]['catalog_row_index']
del data[i]['catalog_row_index']
filename = out_dir / 'cat.json'
click.secho('Writing 3fhl {}'.format(filename), fg='green')
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from pathlib import Path
from astropy.table import Table
from gammapy.catalog import SourceCatalog3FHL, SourceCatalogGammaCat, SourceCatalog3FGL
from .utils import table_to_list_of_dict, dump_to_json, get_selected_sources
from .config import DATA_DIR
import click
import numpy as np
import json
and context:
# Path: gammasky/utils.py
# def table_to_list_of_dict(table):
# """Convert table to list of dict."""
# rows = []
# for row in table:
# data = OrderedDict()
# for name in table.colnames:
# val = row[name]
# # TODO: The below is not working, find a fix
# # if val in {np.nan}:
# # val = None
# if isinstance(val, np.int64):
# val = int(val)
# elif isinstance(val, np.int32):
# val = int(val)
# elif isinstance(val, np.bool_):
# val = bool(val)
# elif isinstance(val, np.float):
# val = float(val)
# elif isinstance(val, np.float32):
# val = float(val)
# elif isinstance(val, np.str):
# val = str(val)
# elif isinstance(val, np.ndarray):
# vals = [float(_) for _ in val]
# val = list(vals)
# else:
# raise ValueError('Unknown type: {} {}'.format(val, type(val)))
# data[name] = val
#
# rows.append(data)
#
# return rows
#
# def dump_to_json(data, path):
# # Replace all instances of NaN and Inf values (which are not valid JSON) with null
# text = json.dumps(data)
# text = text.replace('NaN', 'null').replace('-Infinity', 'null').replace('Infinity', 'null')
# data = json.loads(text)
#
# with path.open('w') as fh:
# json.dump(data, fh)
#
# def get_selected_sources(cat, sources):
# if sources == 'all':
# return list(range(len(cat.table)))
# else:
# return [int(_) for _ in sources.split(',')]
#
# Path: gammasky/config.py
# DATA_DIR = Path('src/data/')
which might include code, classes, or functions. Output only the next line. | dump_to_json(data, filename) |
Using the snippet: <|code_start|>]
TO_JSON_KWARGS = dict(orient='split', double_precision=5)
def make_3fhl_catalog_data():
click.secho('Making 3FHL catalog data...', fg='green')
out_dir = DATA_DIR / 'cat/3fhl'
out_dir.mkdir(parents=True, exist_ok=True)
cat = SourceCatalog3FHL(filename='input_data/3fhl.fits.gz')
# Indexing table - this is all the data we need for MapView.
cols = ['Source_Name', 'RAJ2000', 'DEJ2000',
'GLON', 'GLAT', 'ASSOC1', 'CLASS']
cat.table = cat.table[cols]
data = cat._data_python_list
for i in range(len(data)):
data[i]['source_id'] = data[i]['catalog_row_index']
del data[i]['catalog_row_index']
filename = out_dir / 'cat.json'
click.secho('Writing 3fhl {}'.format(filename), fg='green')
dump_to_json(data, filename)
def make_3fhl_source_data(sources):
click.secho('Making source data from 3FHL...', fg='green')
cat = SourceCatalog3FHL(filename='input_data/3fhl.fits.gz')
<|code_end|>
, determine the next line of code. You have imports:
from pathlib import Path
from astropy.table import Table
from gammapy.catalog import SourceCatalog3FHL, SourceCatalogGammaCat, SourceCatalog3FGL
from .utils import table_to_list_of_dict, dump_to_json, get_selected_sources
from .config import DATA_DIR
import click
import numpy as np
import json
and context (class names, function names, or code) available:
# Path: gammasky/utils.py
# def table_to_list_of_dict(table):
# """Convert table to list of dict."""
# rows = []
# for row in table:
# data = OrderedDict()
# for name in table.colnames:
# val = row[name]
# # TODO: The below is not working, find a fix
# # if val in {np.nan}:
# # val = None
# if isinstance(val, np.int64):
# val = int(val)
# elif isinstance(val, np.int32):
# val = int(val)
# elif isinstance(val, np.bool_):
# val = bool(val)
# elif isinstance(val, np.float):
# val = float(val)
# elif isinstance(val, np.float32):
# val = float(val)
# elif isinstance(val, np.str):
# val = str(val)
# elif isinstance(val, np.ndarray):
# vals = [float(_) for _ in val]
# val = list(vals)
# else:
# raise ValueError('Unknown type: {} {}'.format(val, type(val)))
# data[name] = val
#
# rows.append(data)
#
# return rows
#
# def dump_to_json(data, path):
# # Replace all instances of NaN and Inf values (which are not valid JSON) with null
# text = json.dumps(data)
# text = text.replace('NaN', 'null').replace('-Infinity', 'null').replace('Infinity', 'null')
# data = json.loads(text)
#
# with path.open('w') as fh:
# json.dump(data, fh)
#
# def get_selected_sources(cat, sources):
# if sources == 'all':
# return list(range(len(cat.table)))
# else:
# return [int(_) for _ in sources.split(',')]
#
# Path: gammasky/config.py
# DATA_DIR = Path('src/data/')
. Output only the next line. | sources = get_selected_sources(cat, sources) |
Based on the snippet: <|code_start|>"""
Prepare catalog data for the website.
"""
__all__ = [
'make_tev_catalog_data',
'make_tev_source_data',
'make_3fhl_catalog_data',
'make_3fhl_source_data',
'make_3fgl_catalog_data',
'make_3fgl_source_data',
'make_snrcat_catalog_data'
]
TO_JSON_KWARGS = dict(orient='split', double_precision=5)
def make_3fhl_catalog_data():
click.secho('Making 3FHL catalog data...', fg='green')
<|code_end|>
, predict the immediate next line with the help of imports:
from pathlib import Path
from astropy.table import Table
from gammapy.catalog import SourceCatalog3FHL, SourceCatalogGammaCat, SourceCatalog3FGL
from .utils import table_to_list_of_dict, dump_to_json, get_selected_sources
from .config import DATA_DIR
import click
import numpy as np
import json
and context (classes, functions, sometimes code) from other files:
# Path: gammasky/utils.py
# def table_to_list_of_dict(table):
# """Convert table to list of dict."""
# rows = []
# for row in table:
# data = OrderedDict()
# for name in table.colnames:
# val = row[name]
# # TODO: The below is not working, find a fix
# # if val in {np.nan}:
# # val = None
# if isinstance(val, np.int64):
# val = int(val)
# elif isinstance(val, np.int32):
# val = int(val)
# elif isinstance(val, np.bool_):
# val = bool(val)
# elif isinstance(val, np.float):
# val = float(val)
# elif isinstance(val, np.float32):
# val = float(val)
# elif isinstance(val, np.str):
# val = str(val)
# elif isinstance(val, np.ndarray):
# vals = [float(_) for _ in val]
# val = list(vals)
# else:
# raise ValueError('Unknown type: {} {}'.format(val, type(val)))
# data[name] = val
#
# rows.append(data)
#
# return rows
#
# def dump_to_json(data, path):
# # Replace all instances of NaN and Inf values (which are not valid JSON) with null
# text = json.dumps(data)
# text = text.replace('NaN', 'null').replace('-Infinity', 'null').replace('Infinity', 'null')
# data = json.loads(text)
#
# with path.open('w') as fh:
# json.dump(data, fh)
#
# def get_selected_sources(cat, sources):
# if sources == 'all':
# return list(range(len(cat.table)))
# else:
# return [int(_) for _ in sources.split(',')]
#
# Path: gammasky/config.py
# DATA_DIR = Path('src/data/')
. Output only the next line. | out_dir = DATA_DIR / 'cat/3fhl' |
Predict the next line for this snippet: <|code_start|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
handlers = []
handlers.extend(account.handlers)
handlers.extend(topic.handlers)
<|code_end|>
with the help of current file imports:
from handlers import account, topic, node, member, tool
and context from other files:
# Path: handlers/account.py
# class SignupHandler(BaseHandler):
# class SigninHandler(BaseHandler):
# class SignoutHandler(BaseHandler):
# class SettingsHandler(BaseHandler):
# class ChangeAvatarHandler(BaseHandler):
# class RemoveAvatarHandler(BaseHandler):
# class ChangePasswordHandler(BaseHandler):
# class NotificationsHandler(BaseHandler):
# class NotificationsClearHandler(BaseHandler):
# class NotificationsRemoveHandler(BaseHandler):
# def get(self):
# def post(self):
# def get(self):
# def post(self):
# def get(self):
# def get(self):
# def post(self):
# def post(self):
# def post(self):
# def post(self):
# def get(self):
# def get(self):
# def get(self, id):
#
# Path: handlers/topic.py
# class HomeHandler(BaseHandler):
# class TopicListHandler(BaseHandler):
# class TopicHandler(BaseHandler):
# class CreateHandler(BaseHandler):
# class ReplyHandler(BaseHandler):
# class LikeHandler(BaseHandler):
# class DisikeHandler(BaseHandler):
# class AppendHandler(BaseHandler):
# class RemoveHandler(BaseHandler):
# class MoveHandler(BaseHandler):
# def get(self):
# def get(self):
# def get(self, topic_id):
# def get(self):
# def post(self):
# def post(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def post(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def post(self, topic_id):
#
# Path: handlers/node.py
# class NodeListHandler(BaseHandler):
# class NodeHandler(BaseHandler):
# class AddHandler(BaseHandler):
# class EditHandler(BaseHandler):
# class RemoveHandler(BaseHandler):
# def get(self):
# def get(self, node_name):
# def get(self):
# def post(self):
# def get(self, node_name):
# def post(self, node_name):
# def get(self, node_name):
# def post(self, node_name):
#
# Path: handlers/member.py
# class MemberListHandler(BaseHandler):
# class MemberPageHandler(BaseHandler):
# class FavoriteHandler(BaseHandler):
# class MemberTopicsHandler(BaseHandler):
# class ChangeRoleHandler(BaseHandler):
# def get(self):
# def get(self, name):
# def get(self, name):
# def get(self, name):
# def post(self, name):
#
# Path: handlers/tool.py
# class UploadHandler(BaseHandler):
# class NoteListHandler(BaseHandler):
# class NoteHandler(BaseHandler):
# class NewNoteHandler(BaseHandler):
# class EditNoteHandler(BaseHandler):
# class DelNoteHandler(BaseHandler):
# def get(self):
# def post(self):
# def get(self):
# def get(self, note_id):
# def get(self):
# def post(self):
# def get(self, note_id):
# def post(self, note_id):
# def get(self, note_id):
, which may contain function names, class names, or code. Output only the next line. | handlers.extend(node.handlers) |
Given snippet: <|code_start|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
handlers = []
handlers.extend(account.handlers)
handlers.extend(topic.handlers)
handlers.extend(node.handlers)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from handlers import account, topic, node, member, tool
and context:
# Path: handlers/account.py
# class SignupHandler(BaseHandler):
# class SigninHandler(BaseHandler):
# class SignoutHandler(BaseHandler):
# class SettingsHandler(BaseHandler):
# class ChangeAvatarHandler(BaseHandler):
# class RemoveAvatarHandler(BaseHandler):
# class ChangePasswordHandler(BaseHandler):
# class NotificationsHandler(BaseHandler):
# class NotificationsClearHandler(BaseHandler):
# class NotificationsRemoveHandler(BaseHandler):
# def get(self):
# def post(self):
# def get(self):
# def post(self):
# def get(self):
# def get(self):
# def post(self):
# def post(self):
# def post(self):
# def post(self):
# def get(self):
# def get(self):
# def get(self, id):
#
# Path: handlers/topic.py
# class HomeHandler(BaseHandler):
# class TopicListHandler(BaseHandler):
# class TopicHandler(BaseHandler):
# class CreateHandler(BaseHandler):
# class ReplyHandler(BaseHandler):
# class LikeHandler(BaseHandler):
# class DisikeHandler(BaseHandler):
# class AppendHandler(BaseHandler):
# class RemoveHandler(BaseHandler):
# class MoveHandler(BaseHandler):
# def get(self):
# def get(self):
# def get(self, topic_id):
# def get(self):
# def post(self):
# def post(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def post(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def post(self, topic_id):
#
# Path: handlers/node.py
# class NodeListHandler(BaseHandler):
# class NodeHandler(BaseHandler):
# class AddHandler(BaseHandler):
# class EditHandler(BaseHandler):
# class RemoveHandler(BaseHandler):
# def get(self):
# def get(self, node_name):
# def get(self):
# def post(self):
# def get(self, node_name):
# def post(self, node_name):
# def get(self, node_name):
# def post(self, node_name):
#
# Path: handlers/member.py
# class MemberListHandler(BaseHandler):
# class MemberPageHandler(BaseHandler):
# class FavoriteHandler(BaseHandler):
# class MemberTopicsHandler(BaseHandler):
# class ChangeRoleHandler(BaseHandler):
# def get(self):
# def get(self, name):
# def get(self, name):
# def get(self, name):
# def post(self, name):
#
# Path: handlers/tool.py
# class UploadHandler(BaseHandler):
# class NoteListHandler(BaseHandler):
# class NoteHandler(BaseHandler):
# class NewNoteHandler(BaseHandler):
# class EditNoteHandler(BaseHandler):
# class DelNoteHandler(BaseHandler):
# def get(self):
# def post(self):
# def get(self):
# def get(self, note_id):
# def get(self):
# def post(self):
# def get(self, note_id):
# def post(self, note_id):
# def get(self, note_id):
which might include code, classes, or functions. Output only the next line. | handlers.extend(member.handlers) |
Given snippet: <|code_start|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
handlers = []
handlers.extend(account.handlers)
handlers.extend(topic.handlers)
handlers.extend(node.handlers)
handlers.extend(member.handlers)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from handlers import account, topic, node, member, tool
and context:
# Path: handlers/account.py
# class SignupHandler(BaseHandler):
# class SigninHandler(BaseHandler):
# class SignoutHandler(BaseHandler):
# class SettingsHandler(BaseHandler):
# class ChangeAvatarHandler(BaseHandler):
# class RemoveAvatarHandler(BaseHandler):
# class ChangePasswordHandler(BaseHandler):
# class NotificationsHandler(BaseHandler):
# class NotificationsClearHandler(BaseHandler):
# class NotificationsRemoveHandler(BaseHandler):
# def get(self):
# def post(self):
# def get(self):
# def post(self):
# def get(self):
# def get(self):
# def post(self):
# def post(self):
# def post(self):
# def post(self):
# def get(self):
# def get(self):
# def get(self, id):
#
# Path: handlers/topic.py
# class HomeHandler(BaseHandler):
# class TopicListHandler(BaseHandler):
# class TopicHandler(BaseHandler):
# class CreateHandler(BaseHandler):
# class ReplyHandler(BaseHandler):
# class LikeHandler(BaseHandler):
# class DisikeHandler(BaseHandler):
# class AppendHandler(BaseHandler):
# class RemoveHandler(BaseHandler):
# class MoveHandler(BaseHandler):
# def get(self):
# def get(self):
# def get(self, topic_id):
# def get(self):
# def post(self):
# def post(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def post(self, topic_id):
# def get(self, topic_id):
# def get(self, topic_id):
# def post(self, topic_id):
#
# Path: handlers/node.py
# class NodeListHandler(BaseHandler):
# class NodeHandler(BaseHandler):
# class AddHandler(BaseHandler):
# class EditHandler(BaseHandler):
# class RemoveHandler(BaseHandler):
# def get(self):
# def get(self, node_name):
# def get(self):
# def post(self):
# def get(self, node_name):
# def post(self, node_name):
# def get(self, node_name):
# def post(self, node_name):
#
# Path: handlers/member.py
# class MemberListHandler(BaseHandler):
# class MemberPageHandler(BaseHandler):
# class FavoriteHandler(BaseHandler):
# class MemberTopicsHandler(BaseHandler):
# class ChangeRoleHandler(BaseHandler):
# def get(self):
# def get(self, name):
# def get(self, name):
# def get(self, name):
# def post(self, name):
#
# Path: handlers/tool.py
# class UploadHandler(BaseHandler):
# class NoteListHandler(BaseHandler):
# class NoteHandler(BaseHandler):
# class NewNoteHandler(BaseHandler):
# class EditNoteHandler(BaseHandler):
# class DelNoteHandler(BaseHandler):
# def get(self):
# def post(self):
# def get(self):
# def get(self, note_id):
# def get(self):
# def post(self):
# def get(self, note_id):
# def post(self, note_id):
# def get(self, note_id):
which might include code, classes, or functions. Output only the next line. | handlers.extend(tool.handlers) |
Predict the next line after this snippet: <|code_start|>major = sys.version_info[0]
if major < 3:
reload(sys)
sys.setdefaultencoding('utf-8')
define('port', default=8888, help='run on the given port', type=int)
class Application(tornado.web.Application):
def __init__(self):
#Use Qiniu to store avatars
qiniu.conf.ACCESS_KEY = qiniu_access_key
qiniu.conf.SECRET_KEY = qiniu_secret_key
settings = dict(
static_path = os.path.join(os.path.dirname(__file__), "static"),
template_path = os.path.join(os.path.dirname(__file__), "templates"),
autoescape = None,
site_name = site_name,
site_url = site_url,
google_analytics = google_analytics.lstrip(),
role = {1: 'Member',2: 'Admin',3: 'Lord'},
cookie_secret = cookie_secret,
xsrf_cookies = True,
login_url = "/account/signin",
debug = Debug,
)
tornado.web.Application.__init__(self, urls.handlers, **settings)
<|code_end|>
using the current file's imports:
import sys
import os.path
import tornado.web
import tornado.ioloop
import tornado.httpserver
import urls
import qiniu.conf
from init_db import db, async_db
from settings import *
from tornado.options import define, options
and any relevant context from other files:
# Path: init_db.py
. Output only the next line. | self.db = db |
Given the following code snippet before the placeholder: <|code_start|>if major < 3:
reload(sys)
sys.setdefaultencoding('utf-8')
define('port', default=8888, help='run on the given port', type=int)
class Application(tornado.web.Application):
def __init__(self):
#Use Qiniu to store avatars
qiniu.conf.ACCESS_KEY = qiniu_access_key
qiniu.conf.SECRET_KEY = qiniu_secret_key
settings = dict(
static_path = os.path.join(os.path.dirname(__file__), "static"),
template_path = os.path.join(os.path.dirname(__file__), "templates"),
autoescape = None,
site_name = site_name,
site_url = site_url,
google_analytics = google_analytics.lstrip(),
role = {1: 'Member',2: 'Admin',3: 'Lord'},
cookie_secret = cookie_secret,
xsrf_cookies = True,
login_url = "/account/signin",
debug = Debug,
)
tornado.web.Application.__init__(self, urls.handlers, **settings)
self.db = db
<|code_end|>
, predict the next line using imports from the current file:
import sys
import os.path
import tornado.web
import tornado.ioloop
import tornado.httpserver
import urls
import qiniu.conf
from init_db import db, async_db
from settings import *
from tornado.options import define, options
and context including class names, function names, and sometimes code from other files:
# Path: init_db.py
. Output only the next line. | self.async_db = async_db |
Given the code snippet: <|code_start|>"""
Provides a easy way for accessing all needed database functions
"""
__author__ = 'Jesse'
class server_stats(object):
@staticmethod
def failures_in_x_minutes_ago(last_x_minutes_of_failures):
""" Returns list of failures from variable"""
<|code_end|>
, generate the next line using the imports in this file:
import logging
from datetime import datetime
from Database import db_controller
and context (functions, classes, or occasionally code) from other files:
# Path: Database/db_controller.py
# class db_settings():
# class SettingsHelper(db_settings):
# class db_access(SettingsHelper, object):
# class db_helper(db_access):
# USERNAME = 'postgres'
# DB_HOST = '127.0.0.1'
# PORT = 5432
# DATABASE = 'network_monitor'
# SETTINGS_FILE_PATH = "database_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_db'
# DB_HOST = raw_input('({0})>'.format(self.DB_HOST))
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def open_connection(self):
# def close_connection(connection, cursor):
# def __init__(self):
# def __create_database(self):
# def __create_tables(self):
# def test_db_setup(self):
# def configure(self):
# def clear_password_store(self):
. Output only the next line. | conn, cur = db_controller.db_access().open_connection() |
Here is a snippet: <|code_start|>
__author__ = 'Jesse Laptop'
class email_actions():
def __init__(self):
pass
@staticmethod
def send_alert(server_info_object):
logging.debug(server_info_object)
subj = server_info_object.sl_service_type + " @ " + server_info_object.sl_host + ' is DOWN'
# noinspection PyListCreation
msg = [] # Email Message Body
msg.append('Report: ')
msg.append('\nHost: ' + server_info_object.sl_host)
msg.append('\nPort: ' + str(server_info_object.sl_port))
msg.append('\nService: ' + server_info_object.sl_service_type)
msg.append('\nNote: ' + str(server_info_object.sl_note))
msg.append('\n\nReport Generated @ ' + str(datetime.now()))
logging.debug('Subject:' + subj)
logging.debug(''.join(msg))
email_controller.send_gmail().send(subject=subj, text=''.join(msg))
<|code_end|>
. Write the next line using the current file imports:
from datetime import datetime
from Database import db_helpers
from Alerters import email_controller
import logging
and context from other files:
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
, which may include functions, classes, or code. Output only the next line. | db_helpers.email_log.log_email_sent(''.join(msg)) |
Predict the next line after this snippet: <|code_start|>
__author__ = 'Jesse Laptop'
class email_actions():
def __init__(self):
pass
@staticmethod
def send_alert(server_info_object):
logging.debug(server_info_object)
subj = server_info_object.sl_service_type + " @ " + server_info_object.sl_host + ' is DOWN'
# noinspection PyListCreation
msg = [] # Email Message Body
msg.append('Report: ')
msg.append('\nHost: ' + server_info_object.sl_host)
msg.append('\nPort: ' + str(server_info_object.sl_port))
msg.append('\nService: ' + server_info_object.sl_service_type)
msg.append('\nNote: ' + str(server_info_object.sl_note))
msg.append('\n\nReport Generated @ ' + str(datetime.now()))
logging.debug('Subject:' + subj)
logging.debug(''.join(msg))
<|code_end|>
using the current file's imports:
from datetime import datetime
from Database import db_helpers
from Alerters import email_controller
import logging
and any relevant context from other files:
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
. Output only the next line. | email_controller.send_gmail().send(subject=subj, text=''.join(msg)) |
Given snippet: <|code_start|> action="store_true",
help="Debug Mode Logging")
args = parser.parse_args()
if args.debug:
logging.basicConfig(format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=logging.DEBUG)
logging.debug(sys.path)
logging.debug(args)
logging.debug('Debug Mode Enabled')
else:
logging.basicConfig(filename=LOG_FILENAME,
format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=logging.WARNING)
mode = modes(sleep_delay=args.delay, alert_timeout=args.alert_timeout, host_timeout=args.host_timeout)
# Create new mode object for flow, I'll buy that :)
if len(sys.argv) == 1: # Displays help and lists servers (to help first time users)
parser.print_help()
sys.exit(1)
# Arg Logic here
if args.list:
db_monitor_list.get_print_server_list()
if args.config_monitors:
db_monitor_list.config_monitor_list()
if args.config_db:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import sys
import os
import logging
import argparse
from time import sleep
from Database import db_controller
from Database import db_helpers
from Monitors import network
from Database import db_monitor_list
from Alerters import email_controller
from Alerters import email_alerts
and context:
# Path: Database/db_controller.py
# class db_settings():
# class SettingsHelper(db_settings):
# class db_access(SettingsHelper, object):
# class db_helper(db_access):
# USERNAME = 'postgres'
# DB_HOST = '127.0.0.1'
# PORT = 5432
# DATABASE = 'network_monitor'
# SETTINGS_FILE_PATH = "database_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_db'
# DB_HOST = raw_input('({0})>'.format(self.DB_HOST))
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def open_connection(self):
# def close_connection(connection, cursor):
# def __init__(self):
# def __create_database(self):
# def __create_tables(self):
# def test_db_setup(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Monitors/network.py
# class MonitorHTTP():
# class MonitorTCP():
# class MonitorHost():
# def __init__(self, url, timeout):
# def run_test(self):
# def describe(self):
# def __init__(self, host, port, timeout):
# def run_test(self):
# def describe(self):
# def get_params(self):
# def __init__(self, host, timeout):
# def run_test(self):
# def describe(self):
#
# Path: Database/db_monitor_list.py
# def check_for_valid_service_type():
# def get_print_server_list():
# def config_monitor_list():
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Alerters/email_alerts.py
# class email_actions():
# def __init__(self):
# def send_alert(server_info_object):
# def parse_url_info(url_in):
# def generate_fail_table(fail_list_in, monitor_list_in):
# def generate_report():
which might include code, classes, or functions. Output only the next line. | db_controller.db_helper().configure() |
Based on the snippet: <|code_start|> generate_report()
print('*NOTE: If report is empty, that just means nothing has failed since we sent an email, '
'run -m to "fix" it*')
if args.monitor:
db_controller.db_helper().test_db_setup()
email_controller.send_gmail().test_login()
mode.multi_server()
class modes(object): # Uses new style classes
def __init__(self, sleep_delay, alert_timeout, host_timeout):
self.sleep_delay = sleep_delay
self.alert_timeout = alert_timeout
self.host_timeout = host_timeout
self.server_list = []
def sleep(self):
try:
sleep(self.sleep_delay)
except KeyboardInterrupt:
print("Bye Bye.")
sys.exit(0)
def multi_server(self):
print("Multi Server mode")
print("Press Ctrl-C to quit")
while True:
<|code_end|>
, predict the immediate next line with the help of imports:
import sys
import os
import logging
import argparse
from time import sleep
from Database import db_controller
from Database import db_helpers
from Monitors import network
from Database import db_monitor_list
from Alerters import email_controller
from Alerters import email_alerts
and context (classes, functions, sometimes code) from other files:
# Path: Database/db_controller.py
# class db_settings():
# class SettingsHelper(db_settings):
# class db_access(SettingsHelper, object):
# class db_helper(db_access):
# USERNAME = 'postgres'
# DB_HOST = '127.0.0.1'
# PORT = 5432
# DATABASE = 'network_monitor'
# SETTINGS_FILE_PATH = "database_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_db'
# DB_HOST = raw_input('({0})>'.format(self.DB_HOST))
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def open_connection(self):
# def close_connection(connection, cursor):
# def __init__(self):
# def __create_database(self):
# def __create_tables(self):
# def test_db_setup(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Monitors/network.py
# class MonitorHTTP():
# class MonitorTCP():
# class MonitorHost():
# def __init__(self, url, timeout):
# def run_test(self):
# def describe(self):
# def __init__(self, host, port, timeout):
# def run_test(self):
# def describe(self):
# def get_params(self):
# def __init__(self, host, timeout):
# def run_test(self):
# def describe(self):
#
# Path: Database/db_monitor_list.py
# def check_for_valid_service_type():
# def get_print_server_list():
# def config_monitor_list():
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Alerters/email_alerts.py
# class email_actions():
# def __init__(self):
# def send_alert(server_info_object):
# def parse_url_info(url_in):
# def generate_fail_table(fail_list_in, monitor_list_in):
# def generate_report():
. Output only the next line. | self.server_list = db_helpers.monitor_list.get_server_list() |
Given snippet: <|code_start|> > db_helpers.monitor_list.get_time_from_last_failure():
# Are we spamming alerts?
# Check if any servers have gone down in the the last X minutes
# If any have gone down, send report
if email_controller.send_gmail().test_login():
logging.info('SENDING REPORT')
email_alerts.email_actions.generate_report()
else:
logging.info('E-mail timeout still cooling off')
self.sleep()
class server_logger(modes):
""" self.variable same as monitor_list columns"""
def __init__(self, monitor_row, sleep_delay, alert_timeout, host_timeout):
super(server_logger, self).__init__(sleep_delay, alert_timeout, host_timeout)
self.sl_host = monitor_row[1]
self.sl_port = monitor_row[2]
self.sl_service_type = monitor_row[3]
self.sl_note = monitor_row[4]
def check_server_status(self):
""" Picks either TCP, Ping host, or check web, depending on args """
up_down_flag = False
if self.sl_service_type == 'url':
logging.debug("Checking URL: " + str(self.sl_host))
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import sys
import os
import logging
import argparse
from time import sleep
from Database import db_controller
from Database import db_helpers
from Monitors import network
from Database import db_monitor_list
from Alerters import email_controller
from Alerters import email_alerts
and context:
# Path: Database/db_controller.py
# class db_settings():
# class SettingsHelper(db_settings):
# class db_access(SettingsHelper, object):
# class db_helper(db_access):
# USERNAME = 'postgres'
# DB_HOST = '127.0.0.1'
# PORT = 5432
# DATABASE = 'network_monitor'
# SETTINGS_FILE_PATH = "database_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_db'
# DB_HOST = raw_input('({0})>'.format(self.DB_HOST))
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def open_connection(self):
# def close_connection(connection, cursor):
# def __init__(self):
# def __create_database(self):
# def __create_tables(self):
# def test_db_setup(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Monitors/network.py
# class MonitorHTTP():
# class MonitorTCP():
# class MonitorHost():
# def __init__(self, url, timeout):
# def run_test(self):
# def describe(self):
# def __init__(self, host, port, timeout):
# def run_test(self):
# def describe(self):
# def get_params(self):
# def __init__(self, host, timeout):
# def run_test(self):
# def describe(self):
#
# Path: Database/db_monitor_list.py
# def check_for_valid_service_type():
# def get_print_server_list():
# def config_monitor_list():
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Alerters/email_alerts.py
# class email_actions():
# def __init__(self):
# def send_alert(server_info_object):
# def parse_url_info(url_in):
# def generate_fail_table(fail_list_in, monitor_list_in):
# def generate_report():
which might include code, classes, or functions. Output only the next line. | up_down_flag = network.MonitorHTTP(url=self.sl_host, timeout=self.host_timeout).run_test() |
Predict the next line for this snippet: <|code_start|> action="store",
type=int,
default=10,
help="Wait x seconds for failure (10)")
parser.add_argument("--debug",
action="store_true",
help="Debug Mode Logging")
args = parser.parse_args()
if args.debug:
logging.basicConfig(format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=logging.DEBUG)
logging.debug(sys.path)
logging.debug(args)
logging.debug('Debug Mode Enabled')
else:
logging.basicConfig(filename=LOG_FILENAME,
format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=logging.WARNING)
mode = modes(sleep_delay=args.delay, alert_timeout=args.alert_timeout, host_timeout=args.host_timeout)
# Create new mode object for flow, I'll buy that :)
if len(sys.argv) == 1: # Displays help and lists servers (to help first time users)
parser.print_help()
sys.exit(1)
# Arg Logic here
if args.list:
<|code_end|>
with the help of current file imports:
import sys
import os
import logging
import argparse
from time import sleep
from Database import db_controller
from Database import db_helpers
from Monitors import network
from Database import db_monitor_list
from Alerters import email_controller
from Alerters import email_alerts
and context from other files:
# Path: Database/db_controller.py
# class db_settings():
# class SettingsHelper(db_settings):
# class db_access(SettingsHelper, object):
# class db_helper(db_access):
# USERNAME = 'postgres'
# DB_HOST = '127.0.0.1'
# PORT = 5432
# DATABASE = 'network_monitor'
# SETTINGS_FILE_PATH = "database_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_db'
# DB_HOST = raw_input('({0})>'.format(self.DB_HOST))
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def open_connection(self):
# def close_connection(connection, cursor):
# def __init__(self):
# def __create_database(self):
# def __create_tables(self):
# def test_db_setup(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Monitors/network.py
# class MonitorHTTP():
# class MonitorTCP():
# class MonitorHost():
# def __init__(self, url, timeout):
# def run_test(self):
# def describe(self):
# def __init__(self, host, port, timeout):
# def run_test(self):
# def describe(self):
# def get_params(self):
# def __init__(self, host, timeout):
# def run_test(self):
# def describe(self):
#
# Path: Database/db_monitor_list.py
# def check_for_valid_service_type():
# def get_print_server_list():
# def config_monitor_list():
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Alerters/email_alerts.py
# class email_actions():
# def __init__(self):
# def send_alert(server_info_object):
# def parse_url_info(url_in):
# def generate_fail_table(fail_list_in, monitor_list_in):
# def generate_report():
, which may contain function names, class names, or code. Output only the next line. | db_monitor_list.get_print_server_list() |
Predict the next line after this snippet: <|code_start|> level=logging.DEBUG)
logging.debug(sys.path)
logging.debug(args)
logging.debug('Debug Mode Enabled')
else:
logging.basicConfig(filename=LOG_FILENAME,
format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=logging.WARNING)
mode = modes(sleep_delay=args.delay, alert_timeout=args.alert_timeout, host_timeout=args.host_timeout)
# Create new mode object for flow, I'll buy that :)
if len(sys.argv) == 1: # Displays help and lists servers (to help first time users)
parser.print_help()
sys.exit(1)
# Arg Logic here
if args.list:
db_monitor_list.get_print_server_list()
if args.config_monitors:
db_monitor_list.config_monitor_list()
if args.config_db:
db_controller.db_helper().configure()
if args.rm_db_pass_store:
db_controller.db_helper().clear_password_store()
if args.config_email:
<|code_end|>
using the current file's imports:
import sys
import os
import logging
import argparse
from time import sleep
from Database import db_controller
from Database import db_helpers
from Monitors import network
from Database import db_monitor_list
from Alerters import email_controller
from Alerters import email_alerts
and any relevant context from other files:
# Path: Database/db_controller.py
# class db_settings():
# class SettingsHelper(db_settings):
# class db_access(SettingsHelper, object):
# class db_helper(db_access):
# USERNAME = 'postgres'
# DB_HOST = '127.0.0.1'
# PORT = 5432
# DATABASE = 'network_monitor'
# SETTINGS_FILE_PATH = "database_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_db'
# DB_HOST = raw_input('({0})>'.format(self.DB_HOST))
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def open_connection(self):
# def close_connection(connection, cursor):
# def __init__(self):
# def __create_database(self):
# def __create_tables(self):
# def test_db_setup(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Monitors/network.py
# class MonitorHTTP():
# class MonitorTCP():
# class MonitorHost():
# def __init__(self, url, timeout):
# def run_test(self):
# def describe(self):
# def __init__(self, host, port, timeout):
# def run_test(self):
# def describe(self):
# def get_params(self):
# def __init__(self, host, timeout):
# def run_test(self):
# def describe(self):
#
# Path: Database/db_monitor_list.py
# def check_for_valid_service_type():
# def get_print_server_list():
# def config_monitor_list():
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Alerters/email_alerts.py
# class email_actions():
# def __init__(self):
# def send_alert(server_info_object):
# def parse_url_info(url_in):
# def generate_fail_table(fail_list_in, monitor_list_in):
# def generate_report():
. Output only the next line. | email_controller.send_gmail().configure() |
Predict the next line after this snippet: <|code_start|> except KeyboardInterrupt:
print("Bye Bye.")
sys.exit(0)
def multi_server(self):
print("Multi Server mode")
print("Press Ctrl-C to quit")
while True:
self.server_list = db_helpers.monitor_list.get_server_list()
# Gets server list on each refresh, in-case of updates
logging.debug(self.server_list)
# Send each row of monitor_list to logic gate
for i in self.server_list:
server_logger(i, sleep_delay=self.sleep_delay, alert_timeout=self.alert_timeout,
host_timeout=self.host_timeout).check_server_status()
last_email = db_helpers.email_log.email_sent_x_minutes_ago()
last_fail = db_helpers.monitor_list.get_time_from_last_failure()
logging.debug(
'Last e-mail sent: ' + str(last_email) + ' Timeout: ' + str(self.alert_timeout) +
' Last Failure: ' + str(last_fail))
if db_helpers.email_log.email_sent_x_minutes_ago() > self.alert_timeout \
> db_helpers.monitor_list.get_time_from_last_failure():
# Are we spamming alerts?
# Check if any servers have gone down in the the last X minutes
# If any have gone down, send report
if email_controller.send_gmail().test_login():
logging.info('SENDING REPORT')
<|code_end|>
using the current file's imports:
import sys
import os
import logging
import argparse
from time import sleep
from Database import db_controller
from Database import db_helpers
from Monitors import network
from Database import db_monitor_list
from Alerters import email_controller
from Alerters import email_alerts
and any relevant context from other files:
# Path: Database/db_controller.py
# class db_settings():
# class SettingsHelper(db_settings):
# class db_access(SettingsHelper, object):
# class db_helper(db_access):
# USERNAME = 'postgres'
# DB_HOST = '127.0.0.1'
# PORT = 5432
# DATABASE = 'network_monitor'
# SETTINGS_FILE_PATH = "database_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_db'
# DB_HOST = raw_input('({0})>'.format(self.DB_HOST))
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def open_connection(self):
# def close_connection(connection, cursor):
# def __init__(self):
# def __create_database(self):
# def __create_tables(self):
# def test_db_setup(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
#
# Path: Monitors/network.py
# class MonitorHTTP():
# class MonitorTCP():
# class MonitorHost():
# def __init__(self, url, timeout):
# def run_test(self):
# def describe(self):
# def __init__(self, host, port, timeout):
# def run_test(self):
# def describe(self):
# def get_params(self):
# def __init__(self, host, timeout):
# def run_test(self):
# def describe(self):
#
# Path: Database/db_monitor_list.py
# def check_for_valid_service_type():
# def get_print_server_list():
# def config_monitor_list():
#
# Path: Alerters/email_controller.py
# class gmailSettings():
# class SettingsHelper(gmailSettings):
# class send_gmail(object, SettingsHelper):
# USERNAME = ""
# SEND_ALERT_TO = [] # Must be a list
# SETTINGS_FILE_PATH = "email_settings.json"
# KEYRING_APP_ID = 'NetworkMonitor_email'
# def loadSettings(cls):
# def saveSettings(cls):
# def __init__(self):
# def test_login(self):
# def send(self, subject, text):
# def convert_to_list_to_csv(self):
# def configure(self):
# def clear_password_store(self):
#
# Path: Alerters/email_alerts.py
# class email_actions():
# def __init__(self):
# def send_alert(server_info_object):
# def parse_url_info(url_in):
# def generate_fail_table(fail_list_in, monitor_list_in):
# def generate_report():
. Output only the next line. | email_alerts.email_actions.generate_report() |
Predict the next line after this snippet: <|code_start|> except urllib2.URLError:
pass
if http_response_code == 200:
response_flag = True
else:
response_flag = False
logging.error('Cannot reach gmail.com')
logging.debug('Testing login')
if response_flag:
try:
logging.debug(str(self.USERNAME) + str(self.PASSWORD))
gmail.GMail(username=self.USERNAME, password=self.PASSWORD)
login_flag = True
except smtplib.SMTPAuthenticationError:
logging.critical('Bad gmail login info, cannot send messages, exiting')
sys.exit(1)
else:
login_flag = False
if login_flag and response_flag:
return True
else:
return False
def send(self, subject, text):
logging.info("Sending email")
gmail.GMail(username=self.USERNAME, password=self.PASSWORD).send(
gmail.Message(subject=subject, to=self.convert_to_list_to_csv(), text=text))
<|code_end|>
using the current file's imports:
import getpass
import json
import logging
import os
import smtplib # For Authentication Error
import sys
import urllib2
import gmail
import keyring
import distutils.util
from Database import db_helpers
from keyring.errors import PasswordDeleteError
and any relevant context from other files:
# Path: Database/db_helpers.py
# class server_stats(object):
# class email_log(object):
# class monitor_list(object):
# class tcp():
# class host():
# class url():
# def failures_in_x_minutes_ago(last_x_minutes_of_failures):
# def log_email_sent(to_list):
# def email_sent_x_minutes_ago():
# def get_server_list():
# def get_time_from_last_failure():
# def log_service_down(server_logger_obj):
# def remove_server_from_monitor_list(index_to_remove):
# def __init__(self):
# def create_server(ip_address, port, note=None):
# def __init__(self):
# def create_server(ip_address, note=None):
# def __init__(self):
# def create_server(web_url, note=None):
. Output only the next line. | db_helpers.email_log.log_email_sent(self.SEND_ALERT_TO) |
Given snippet: <|code_start|>
class SecurityCommand(unicode_str):
"""
A string suitable for passing as the 'command' parameter to the
OS X 'security' command.
"""
def __new__(cls, cmd, store='generic'):
cmd = '%(cmd)s-%(store)s-password' % vars()
return super(SecurityCommand, cls).__new__(cls, cmd)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import platform
import subprocess
import re
import binascii
from ..backend import KeyringBackend
from ..errors import PasswordSetError
from ..errors import PasswordDeleteError
from ..util import properties
from ..py27compat import unicode_str
and context:
# Path: keyring/keyring/backend.py
# class KeyringBackend(object):
# """The abstract base class of the keyring, every backend must implement
# this interface.
# """
#
# #@abc.abstractproperty
# def priority(cls):
# """
# Each backend class must supply a priority, a number (float or integer)
# indicating the priority of the backend relative to all other backends.
# The priority need not be static -- it may (and should) vary based
# attributes of the environment in which is runs (platform, available
# packages, etc.).
#
# A higher number indicates a higher priority. The priority should raise
# a RuntimeError with a message indicating the underlying cause if the
# backend is not suitable for the current environment.
#
# As a rule of thumb, a priority between zero but less than one is
# suitable, but a priority of one or greater is recommended.
# """
#
# @properties.ClassProperty
# @classmethod
# def viable(cls):
# with errors.ExceptionRaisedContext() as exc:
# cls.priority
# return not bool(exc)
#
# @abc.abstractmethod
# def get_password(self, service, username):
# """Get password of the username for the service
# """
# return None
#
# @abc.abstractmethod
# def set_password(self, service, username, password):
# """Set password for the username of the service
# """
# raise errors.PasswordSetError("reason")
#
# # for backward-compatibility, don't require a backend to implement
# # delete_password
# #@abc.abstractmethod
# def delete_password(self, service, username):
# """Delete the password for the username of the service.
# """
# raise errors.PasswordDeleteError("reason")
which might include code, classes, or functions. Output only the next line. | class Keyring(KeyringBackend): |
Given snippet: <|code_start|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# ===========================================================================
# NEWS CHANNEL GENERATION SCRIPT
# AUTHORS: LARSEN VALLECILLO
# ****************************************************************************
# Copyright (c) 2015-2022 RiiConnect24, and its (Lead) Developers
# ===========================================================================
with open("./Channels/News_Channel/config.json", "rb") as f:
config = json.load(f)
if config["production"]:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import binascii
import collections
import json
import random
import sys
import textwrap
import time
import feedparser
import ftfy
import googlemaps
import newspaper
import requests
import zlib
import importlib
from html.parser import unescape
from io import BytesIO, StringIO
from datetime import datetime
from PIL import Image
from bs4 import BeautifulSoup
from unidecode import unidecode
from utils import setup_log, log, u8, u16, u32, u32_littleendian, s16
and context:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
#
# def s16(data):
# if not -32768 <= data <= 32767:
# log("s16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">h", data)
which might include code, classes, or functions. Output only the next line. | setup_log(config["sentry_url"], True) |
Here is a snippet: <|code_start|> read = gmaps.geocode(name, language=languages[language_code])
loc_name = read[0]["address_components"][0]["long_name"]
for loc in read[0]["address_components"]:
if "locality" in loc["types"]:
loc_name = loc["long_name"]
loc_name = enc(loc_name)
"""Not doing anything with these."""
country = u8(0)
region = u8(0)
location = u16(0)
zoom_factor = u32_littleendian(
6
) # Nintendo used the value of 3 for states and countries but we probably don't have any articles that are just states or countries
coordinates = (
s16(int(read[0]["geometry"]["location"]["lat"] / (360 / 65536)))
+ s16(int(read[0]["geometry"]["location"]["lng"] / (360 / 65536)))
+ country
+ region
+ location
+ zoom_factor
) # latitude and longitude is divided by the value of 360 (degrees of a full circle) divided by the max int for a 16-bit int
except Exception as e:
ex = "There was a error downloading the location data - line {}: {}".format(
sys.exc_info()[-1].tb_lineno, str(e)
)
print(ex)
<|code_end|>
. Write the next line using the current file imports:
import binascii
import collections
import json
import random
import sys
import textwrap
import time
import feedparser
import ftfy
import googlemaps
import newspaper
import requests
import zlib
import importlib
from html.parser import unescape
from io import BytesIO, StringIO
from datetime import datetime
from PIL import Image
from bs4 import BeautifulSoup
from unidecode import unidecode
from utils import setup_log, log, u8, u16, u32, u32_littleendian, s16
and context from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
#
# def s16(data):
# if not -32768 <= data <= 32767:
# log("s16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">h", data)
, which may include functions, classes, or code. Output only the next line. | log(ex, "INFO") |
Continue the code snippet: <|code_start|>
for keys, values in list(data.items()):
location = values[7]
if location and location != "":
if location not in locations:
locations[location] = [None, None, []]
locations[location][2].append(keys)
for name in list(locations.keys()):
if name == "":
continue
print(name)
coordinates = None
if name not in cities:
try:
read = gmaps.geocode(name, language=languages[language_code])
loc_name = read[0]["address_components"][0]["long_name"]
for loc in read[0]["address_components"]:
if "locality" in loc["types"]:
loc_name = loc["long_name"]
loc_name = enc(loc_name)
"""Not doing anything with these."""
<|code_end|>
. Use current file imports:
import binascii
import collections
import json
import random
import sys
import textwrap
import time
import feedparser
import ftfy
import googlemaps
import newspaper
import requests
import zlib
import importlib
from html.parser import unescape
from io import BytesIO, StringIO
from datetime import datetime
from PIL import Image
from bs4 import BeautifulSoup
from unidecode import unidecode
from utils import setup_log, log, u8, u16, u32, u32_littleendian, s16
and context (classes, functions, or code) from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
#
# def s16(data):
# if not -32768 <= data <= 32767:
# log("s16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">h", data)
. Output only the next line. | country = u8(0) |
Using the snippet: <|code_start|> location = values[7]
if location and location != "":
if location not in locations:
locations[location] = [None, None, []]
locations[location][2].append(keys)
for name in list(locations.keys()):
if name == "":
continue
print(name)
coordinates = None
if name not in cities:
try:
read = gmaps.geocode(name, language=languages[language_code])
loc_name = read[0]["address_components"][0]["long_name"]
for loc in read[0]["address_components"]:
if "locality" in loc["types"]:
loc_name = loc["long_name"]
loc_name = enc(loc_name)
"""Not doing anything with these."""
country = u8(0)
region = u8(0)
<|code_end|>
, determine the next line of code. You have imports:
import binascii
import collections
import json
import random
import sys
import textwrap
import time
import feedparser
import ftfy
import googlemaps
import newspaper
import requests
import zlib
import importlib
from html.parser import unescape
from io import BytesIO, StringIO
from datetime import datetime
from PIL import Image
from bs4 import BeautifulSoup
from unidecode import unidecode
from utils import setup_log, log, u8, u16, u32, u32_littleendian, s16
and context (class names, function names, or code) available:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
#
# def s16(data):
# if not -32768 <= data <= 32767:
# log("s16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">h", data)
. Output only the next line. | location = u16(0) |
Given the following code snippet before the placeholder: <|code_start|> self.soup = soup
self.session = requests.Session()
if self.source != "AP" and self.source != "Reuters":
init = self.newspaper_init()
if init == []:
return None
{
"AP": self.parse_ap,
"Reuters": self.parse_reuters,
"AFP_French": self.parse_afp_french,
"dpa": self.parse_dpa_german,
"ANSA": self.parse_ansa,
"ANP": self.parse_anp,
}[self.source]()
self.get_news()
def get_news(self):
if self.headline == "" or self.headline is None:
return []
elif self.article == "" or self.article is None:
return []
try:
_ = enc(self.headline).replace(b"\n", b"").decode("utf-16be")
_ = enc(self.article).replace(b"\n", b"").decode("utf-16be")
except:
return []
return [
<|code_end|>
, predict the next line using imports from the current file:
import binascii
import collections
import json
import random
import sys
import textwrap
import time
import feedparser
import ftfy
import googlemaps
import newspaper
import requests
import zlib
import importlib
from html.parser import unescape
from io import BytesIO, StringIO
from datetime import datetime
from PIL import Image
from bs4 import BeautifulSoup
from unidecode import unidecode
from utils import setup_log, log, u8, u16, u32, u32_littleendian, s16
and context including class names, function names, and sometimes code from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
#
# def s16(data):
# if not -32768 <= data <= 32767:
# log("s16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">h", data)
. Output only the next line. | u32(self.updated_time), |
Here is a snippet: <|code_start|>
if location and location != "":
if location not in locations:
locations[location] = [None, None, []]
locations[location][2].append(keys)
for name in list(locations.keys()):
if name == "":
continue
print(name)
coordinates = None
if name not in cities:
try:
read = gmaps.geocode(name, language=languages[language_code])
loc_name = read[0]["address_components"][0]["long_name"]
for loc in read[0]["address_components"]:
if "locality" in loc["types"]:
loc_name = loc["long_name"]
loc_name = enc(loc_name)
"""Not doing anything with these."""
country = u8(0)
region = u8(0)
location = u16(0)
<|code_end|>
. Write the next line using the current file imports:
import binascii
import collections
import json
import random
import sys
import textwrap
import time
import feedparser
import ftfy
import googlemaps
import newspaper
import requests
import zlib
import importlib
from html.parser import unescape
from io import BytesIO, StringIO
from datetime import datetime
from PIL import Image
from bs4 import BeautifulSoup
from unidecode import unidecode
from utils import setup_log, log, u8, u16, u32, u32_littleendian, s16
and context from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
#
# def s16(data):
# if not -32768 <= data <= 32767:
# log("s16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">h", data)
, which may include functions, classes, or code. Output only the next line. | zoom_factor = u32_littleendian( |
Given snippet: <|code_start|> locations[location][2].append(keys)
for name in list(locations.keys()):
if name == "":
continue
print(name)
coordinates = None
if name not in cities:
try:
read = gmaps.geocode(name, language=languages[language_code])
loc_name = read[0]["address_components"][0]["long_name"]
for loc in read[0]["address_components"]:
if "locality" in loc["types"]:
loc_name = loc["long_name"]
loc_name = enc(loc_name)
"""Not doing anything with these."""
country = u8(0)
region = u8(0)
location = u16(0)
zoom_factor = u32_littleendian(
6
) # Nintendo used the value of 3 for states and countries but we probably don't have any articles that are just states or countries
coordinates = (
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import binascii
import collections
import json
import random
import sys
import textwrap
import time
import feedparser
import ftfy
import googlemaps
import newspaper
import requests
import zlib
import importlib
from html.parser import unescape
from io import BytesIO, StringIO
from datetime import datetime
from PIL import Image
from bs4 import BeautifulSoup
from unidecode import unidecode
from utils import setup_log, log, u8, u16, u32, u32_littleendian, s16
and context:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
#
# def s16(data):
# if not -32768 <= data <= 32767:
# log("s16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">h", data)
which might include code, classes, or functions. Output only the next line. | s16(int(read[0]["geometry"]["location"]["lat"] / (360 / 65536))) |
Based on the snippet: <|code_start|> keyIndex = list(forecastlists.laundry).index(k)
laundry[keyIndex] = v.encode("utf-16be") + pad(2)
return laundry
def make_pollen_text_table():
pollen = {}
for k, v in forecastlists.pollen.items():
keyIndex = list(forecastlists.pollen).index(k)
pollen[keyIndex] = v.encode("utf-16be") + pad(2)
return pollen
def get_wind_direction(degrees):
return forecastlists.winddirection[degrees]
def dump_db():
# db = {"update_time": time.time(), "location_keys": weatherloc, "local_times": times, "laundry_indexes": laundry,
# "pollen_indexes": pollen, "globe_data": globe, "wind_speed": wind, "uvindexes": uvindex,
# "current_forecast": current, "precipitation": precipitation, "hourly_forecast": hourly,
# "tomorrow_forecast": tomorrow, "week_forecast": week, "today_forecast": today, "key_cache": cache}
db = {"location_keys": location_keys}
with open("weather.db", "wb") as f:
pickle.dump(db, f)
with open("./Channels/Forecast_Channel/config.json", "rb") as f:
config = json.load(f)
if config["production"] and config["send_logs"]:
<|code_end|>
, predict the immediate next line with the help of imports:
import binascii
import calendar
import CloudFlare
import io
import json
import math
import os
import pickle
import queue
import shutil
import subprocess
import sys
import threading
import time
import nlzss
import requests
import rsa
from datetime import datetime
from Channels.Forecast_Channel import forecastlists
from datadog import statsd
from utils import setup_log, log, u8, u16, u32, s8
and context (classes, functions, sometimes code) from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def s8(data):
# if not -128 <= data <= 127:
# log("s8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">b", data)
. Output only the next line. | setup_log(config["sentry_url"], False) |
Given snippet: <|code_start|>
def get_lat(forecast_list, key):
return forecast_list[key][3][:4]
def get_lng(forecast_list, key):
return forecast_list[key][3][:8][4:]
def isJapan(forecast_list, key):
return forecast_list[key][2][1] == "Japan"
def matches_country_code(forecast_list, key):
v = forecast_list[key]
if v[2][1] in forecastlists.bincountries:
return hex(int(str(forecastlists.bincountries[v[2][1]])))[2:].zfill(2) == hex(
country_code
)[2:].zfill(2)
return False
def check_coords(forecast_list, key, lat, lng):
global errors
""" Verify Location Coordinates """
if (
abs(lat - coord_decode(binascii.hexlify(globe[key]["lat"]))) >= 2
or abs(lng - coord_decode(binascii.hexlify(globe[key]["lng"]))) >= 2
):
if config["check_coordinates"]:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import binascii
import calendar
import CloudFlare
import io
import json
import math
import os
import pickle
import queue
import shutil
import subprocess
import sys
import threading
import time
import nlzss
import requests
import rsa
from datetime import datetime
from Channels.Forecast_Channel import forecastlists
from datadog import statsd
from utils import setup_log, log, u8, u16, u32, s8
and context:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def s8(data):
# if not -128 <= data <= 127:
# log("s8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">b", data)
which might include code, classes, or functions. Output only the next line. | log("Coordinate Inaccuracy Detected: %s" % key, "WARNING") |
Based on the snippet: <|code_start|> lon = coord_decode(get_lng(forecast_list, key))
if config["download_locations"]:
location_key = request_data(
"https://api.accuweather.com/locations/v1/cities/geoposition/search.json?q={},{}&apikey={}".format(
lat, lon, api_key
)
)["Key"]
location_keys["{},{}".format(lat, lon)] = location_key
else:
location_key = location_keys["{},{}".format(lat, lon)]
weather_data[key] = {}
weather_data[key]["current"] = request_data(
"https://api.accuweather.com/currentconditions/v1/{}?apikey={}&details=true".format(
location_key, api_key
)
)
weather_data[key]["quarters"] = request_data(
"https://api.accuweather.com/forecasts/v1/daily/5day/quarters/{}?apikey={}".format(
location_key, api_key
)
)
weather_data[key]["10day"] = request_data(
"https://api.accuweather.com/forecasts/v1/daily/10day/{}?apikey={}&details=true".format(
location_key, api_key
)
)
def make_header_short(forecast_list):
header = {}
<|code_end|>
, predict the immediate next line with the help of imports:
import binascii
import calendar
import CloudFlare
import io
import json
import math
import os
import pickle
import queue
import shutil
import subprocess
import sys
import threading
import time
import nlzss
import requests
import rsa
from datetime import datetime
from Channels.Forecast_Channel import forecastlists
from datadog import statsd
from utils import setup_log, log, u8, u16, u32, s8
and context (classes, functions, sometimes code) from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def s8(data):
# if not -128 <= data <= 127:
# log("s8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">b", data)
. Output only the next line. | header["country_code"] = u8(country_code) # Wii Country Code. |
Predict the next line after this snippet: <|code_start|> for key in forecast_list.keys():
keyIndex = list(forecast_list).index(key)
short_forecast_table["location_code_%s" % keyIndex] = binascii.unhexlify(
get_locationkey(forecast_list, key)
) # Wii location code for city
short_forecast_table["timestamp_1_%s" % keyIndex] = u32(
timestamps(1, key)
) # Timestamp 1
short_forecast_table["timestamp_2_%s" % keyIndex] = u32(
timestamps(0, key)
) # Timestamp 2
short_forecast_table["current_forecast_%s" % keyIndex] = binascii.unhexlify(
current[key][5]
) # Current forecast
short_forecast_table["unknown_%s" % keyIndex] = u8(0) # 0xE unknown
short_forecast_table["current_tempc_%s" % keyIndex] = s8(
current[key][4]
) # Current temperature in Celsius
short_forecast_table["current_tempf_%s" % keyIndex] = s8(
current[key][3]
) # Current temperature in Fahrenheit
short_forecast_table["current_winddirection_%s" % keyIndex] = u8(
int(get_wind_direction(current[key][0]))
) # Current wind direction
short_forecast_table["current_windkm_%s" % keyIndex] = u8(
current[key][1]
) # Current wind in km/hr
short_forecast_table["current_windmph_%s" % keyIndex] = u8(
current[key][2]
) # Current wind in mph
<|code_end|>
using the current file's imports:
import binascii
import calendar
import CloudFlare
import io
import json
import math
import os
import pickle
import queue
import shutil
import subprocess
import sys
import threading
import time
import nlzss
import requests
import rsa
from datetime import datetime
from Channels.Forecast_Channel import forecastlists
from datadog import statsd
from utils import setup_log, log, u8, u16, u32, s8
and any relevant context from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def s8(data):
# if not -128 <= data <= 127:
# log("s8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">b", data)
. Output only the next line. | short_forecast_table["unknown_2_%s" % keyIndex] = u16(0) # 00? |
Predict the next line after this snippet: <|code_start|> except:
blank_data(forecast_list, key)
return
j = 0
for i in range(hourly_start, 8):
hourly[key][i] = get_icon(
int(data_quarters[quarter_offset + j]["Icon"]), forecast_list, key
)
j += 1
"""if check_coords(forecast_list,key,lat,lng):
globe[key]['lat'] = s16(int(lat / GLOBE_CONSTANT))
globe[key]['lng'] = s16(int(lng / GLOBE_CONSTANT))"""
def parse_data(forecast_list):
global weather_data
for k, v in weather_data.items():
if weather_data[k]:
get_accuweather_api(forecast_list, k)
else:
log(
"Unable to retrieve forecast data for %s - using blank data" % k, "INFO"
)
def hex_write(loc, data):
global file
file.seek(loc)
<|code_end|>
using the current file's imports:
import binascii
import calendar
import CloudFlare
import io
import json
import math
import os
import pickle
import queue
import shutil
import subprocess
import sys
import threading
import time
import nlzss
import requests
import rsa
from datetime import datetime
from Channels.Forecast_Channel import forecastlists
from datadog import statsd
from utils import setup_log, log, u8, u16, u32, s8
and any relevant context from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def s8(data):
# if not -128 <= data <= 127:
# log("s8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">b", data)
. Output only the next line. | file.write(u32(data)) |
Given snippet: <|code_start|>
def make_long_forecast_table(forecast_list):
long_forecast_table = {}
for key in forecast_list.keys():
if (
matches_country_code(forecast_list, key)
and get_region(forecast_list, key) != ""
):
keyIndex = list(forecast_list).index(key)
long_forecast_table["location_code_%s" % keyIndex] = binascii.unhexlify(
get_locationkey(forecast_list, key)
) # Wii Location Code.
long_forecast_table["timestamp_1_%s" % keyIndex] = u32(
timestamps(1, key)
) # 1st timestamp.
long_forecast_table["timestamp_2_%s" % keyIndex] = u32(
timestamps(0, key)
) # 2nd timestamp.
long_forecast_table["unknown_1_%s" % keyIndex] = u32(
0
) # Unknown. (0xC-0xF)
long_forecast_table["today_forecast_%s" % keyIndex] = binascii.unhexlify(
today[key][0]
) # Today's forecast.
for p in range(0, 4):
long_forecast_table[
"today_hourly_forecast_%s_%s" % (p, keyIndex)
] = binascii.unhexlify(
hourly[key][p]
) # Tomorrow's hourly forecast.
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import binascii
import calendar
import CloudFlare
import io
import json
import math
import os
import pickle
import queue
import shutil
import subprocess
import sys
import threading
import time
import nlzss
import requests
import rsa
from datetime import datetime
from Channels.Forecast_Channel import forecastlists
from datadog import statsd
from utils import setup_log, log, u8, u16, u32, s8
and context:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def s8(data):
# if not -128 <= data <= 127:
# log("s8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">b", data)
which might include code, classes, or functions. Output only the next line. | long_forecast_table["today_tempc_high_%s" % keyIndex] = s8( |
Given snippet: <|code_start|> if file_type == "q" or file_type == "r":
question_file = get_name() + "_" + file_type
else:
question_file = "voting"
print("Writing to %s.bin ..." % question_file)
with open(question_file, "wb") as f:
for dictionary in dictionaries:
# print("Writing to %s ..." % hex(f.tell()).rstrip("L"))
for name, values in dictionary.items():
f.write(values)
f.write(pad(16))
f.write('RIICONNECT24'.encode("ASCII"))
f.flush()
if config["production"]:
sign_file(question_file)
print("Writing Completed")
for dictionary in dictionaries:
dictionary.clear()
def make_header():
header = collections.OrderedDict()
dictionaries.append(header)
header["timestamp"] = u32(get_timestamp(0, None, None))
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import binascii
import calendar
import CloudFlare
import datetime
import json
import logging
import nlzss
import os
import struct
import subprocess
import sys
import textwrap
import time
import MySQLdb
import requests
import rsa
from utils import setup_log, log, u8, u16, u32
from Channels.Everybody_Votes_Channel.voteslists import *
and context:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
which might include code, classes, or functions. Output only the next line. | header["country_code"] = u8(country_code) |
Given snippet: <|code_start|> f.write(values)
f.write(pad(16))
f.write('RIICONNECT24'.encode("ASCII"))
f.flush()
if config["production"]:
sign_file(question_file)
print("Writing Completed")
for dictionary in dictionaries:
dictionary.clear()
def make_header():
header = collections.OrderedDict()
dictionaries.append(header)
header["timestamp"] = u32(get_timestamp(0, None, None))
header["country_code"] = u8(country_code)
header["publicity_flag"] = u8(0)
header["question_version"] = u8(0 if file_type == "r" else 1)
header["result_version"] = u8(1 if file_type == "r" else 0)
header["national_question_number"] = u8(national)
header["national_question_offset"] = u32(0)
header["worldwide_question_number"] = u8(worldwide)
header["worldwide_question_offset"] = u32(0)
header["question_number"] = u8(questions * len(country_language[country_code]))
header["question_offset"] = u32(0)
header["national_result_entry"] = u8(national_results)
header["national_result_offset"] = u32(0)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import binascii
import calendar
import CloudFlare
import datetime
import json
import logging
import nlzss
import os
import struct
import subprocess
import sys
import textwrap
import time
import MySQLdb
import requests
import rsa
from utils import setup_log, log, u8, u16, u32
from Channels.Everybody_Votes_Channel.voteslists import *
and context:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
which might include code, classes, or functions. Output only the next line. | header["national_result_detailed_number"] = u16(national_results * region_number[country_code]) |
Based on the snippet: <|code_start|> "author_icon": "https://rc24.xyz/images/webhooks/votes/profile.png", "text": webhook_text,
"title": "Update!",
"fields": [{"title": "Script", "value": "Everybody Votes Channel", "short": "false"}],
"thumb_url": "https://rc24.xyz/images/webhooks/votes/vote_%s.png" % webhook_type,
"footer": "RiiConnect24 Script", "footer_icon": "https://rc24.xyz/images/logo-small.png",
"ts": int(calendar.timegm(datetime.datetime.utcnow().timetuple()))}]}
for url in config["webhook_urls"]:
post_webhook = requests.post(url, json=data, allow_redirects=True)
def purge_cache():
if config["production"]:
if config["cloudflare_cache_purge"]:
print("Purging cache...")
for country_code in country_codes:
url = "http://{}/{}/".format(
config["cloudflare_hostname"],
str(country_code).zfill(3),
)
cf = CloudFlare.CloudFlare(token=config["cloudflare_token"])
cf.zones.purge_cache.post(
config["cloudflare_zone_name"],
data={"files": [url + "voting.bin"]},
)
dictionaries = []
<|code_end|>
, predict the immediate next line with the help of imports:
import binascii
import calendar
import CloudFlare
import datetime
import json
import logging
import nlzss
import os
import struct
import subprocess
import sys
import textwrap
import time
import MySQLdb
import requests
import rsa
from utils import setup_log, log, u8, u16, u32
from Channels.Everybody_Votes_Channel.voteslists import *
and context (classes, functions, sometimes code) from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
. Output only the next line. | def offset_count(): return u32(12 + sum(len(values) for dictionary in dictionaries for values in list(dictionary.values()) if values)) |
Given the following code snippet before the placeholder: <|code_start|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# ===========================================================================
# NEWS CHANNEL GENERATION SCRIPT
# AUTHORS: LARSEN VALLECILLO
# ****************************************************************************
# Copyright (c) 2015-2022 RiiConnect24, and its (Lead) Developers
# ===========================================================================
with open("./Channels/News_Channel/config.json", "rb") as f:
config = json.load(f) # load config
if config["production"]:
<|code_end|>
, predict the next line using imports from the current file:
import binascii
import calendar
import CloudFlare
import difflib
import glob
import json
import nlzss
import os
import pickle
import requests
import subprocess
import sys
import time
import utils
import rsa
from datetime import timedelta, datetime, date
from . import newsdownload
from datadog import statsd
from utils import setup_log, log, mkdir_p, u8, u16, u32, u32_littleendian
and context including class names, function names, and sometimes code from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def mkdir_p(path):
# try:
# os.makedirs(path)
# except:
# pass
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
. Output only the next line. | setup_log(config["sentry_url"], True) # error logging |
Given the following code snippet before the placeholder: <|code_start|> ]: # brilliant way to keep the news flowing when it's close to or over the file size limit, surprisingly seems to work?
path = "{}/v2/{}_{}".format(config["file_path"], language_code, region)
try:
size = round(
float(
subprocess.check_output(["du", "-sh", path])
.split()[0]
.decode("utf-8")
.replace("M", "")
)
- 3.7,
1,
)
if size >= 3.8:
limit -= 20
elif size == 3.7:
limit -= 15
elif size == 3.6:
limit -= 4
elif size == 3.5:
limit -= 3
filesize = sum(
os.path.getsize(f) - 320
for f in glob.glob(
path + "/news.bin.*"
)
) # let's do one more check to see if the filesize is ok
if filesize > 3712000:
<|code_end|>
, predict the next line using imports from the current file:
import binascii
import calendar
import CloudFlare
import difflib
import glob
import json
import nlzss
import os
import pickle
import requests
import subprocess
import sys
import time
import utils
import rsa
from datetime import timedelta, datetime, date
from . import newsdownload
from datadog import statsd
from utils import setup_log, log, mkdir_p, u8, u16, u32, u32_littleendian
and context including class names, function names, and sometimes code from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def mkdir_p(path):
# try:
# os.makedirs(path)
# except:
# pass
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
. Output only the next line. | log("News files exceed the maximum file size amount.", "error") |
Predict the next line after this snippet: <|code_start|> "title": "Script",
"value": "News Channel (" + name + ")",
"short": "false",
}
],
"thumb_url": "https://rc24.xyz/images/webhooks/news/%s.png" % mode,
"footer": "RiiConnect24 Script",
"footer_icon": "https://rc24.xyz/images/logo-small.png",
"ts": int(calendar.timegm(datetime.utcnow().timetuple())),
}
],
}
for url in config["webhook_urls"]:
requests.post(url, json=webhook, allow_redirects=True)
copy_file(region)
if config["packVFF"]:
packVFF(region)
os.remove(newsfilename)
# copy the temp files to the correct path that the Wii will request from the server
def copy(region, hour):
newsfilename2 = "news.bin.{}".format(str(datetime.utcnow().hour).zfill(2))
path = "{}/v2/{}_{}".format(config["file_path"], language_code, region)
<|code_end|>
using the current file's imports:
import binascii
import calendar
import CloudFlare
import difflib
import glob
import json
import nlzss
import os
import pickle
import requests
import subprocess
import sys
import time
import utils
import rsa
from datetime import timedelta, datetime, date
from . import newsdownload
from datadog import statsd
from utils import setup_log, log, mkdir_p, u8, u16, u32, u32_littleendian
and any relevant context from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def mkdir_p(path):
# try:
# os.makedirs(path)
# except:
# pass
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
. Output only the next line. | mkdir_p(path) |
Based on the snippet: <|code_start|> for k, v in list(data.items()):
if v[3] not in headlines:
headlines.append(v[3])
elif v[3] in headlines:
del data[k]
return data
# Make the news.bin
# First part of the header
def make_header(data):
header = {}
dictionaries.append(header)
header["updated_timestamp_1"] = get_timestamp(1) # Updated time.
header["term_timestamp"] = get_timestamp(2) # Timestamp for the term.
header["country_code"] = u32_littleendian(country_code) # Wii Country Code.
header["updated_timestamp_2"] = get_timestamp(1) # 3rd timestamp.
# List of languages that appear on the language select screen
numbers = 0
for language in languages:
numbers += 1
<|code_end|>
, predict the immediate next line with the help of imports:
import binascii
import calendar
import CloudFlare
import difflib
import glob
import json
import nlzss
import os
import pickle
import requests
import subprocess
import sys
import time
import utils
import rsa
from datetime import timedelta, datetime, date
from . import newsdownload
from datadog import statsd
from utils import setup_log, log, mkdir_p, u8, u16, u32, u32_littleendian
and context (classes, functions, sometimes code) from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def mkdir_p(path):
# try:
# os.makedirs(path)
# except:
# pass
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
. Output only the next line. | header["language_select_%s" % numbers] = u8(language) |
Here is a snippet: <|code_start|> numbers += 1
header["language_select_%s" % numbers] = u8(language)
# Fills the rest of the languages as null
while numbers < 16:
numbers += 1
header["language_select_%s" % numbers] = u8(255)
header["language_code"] = u8(language_code) # Wii language code.
header["goo_flag"] = u8(0) # Flag to make the Globe display "Powered by Goo".
header["language_select_screen_flag"] = u8(
0
) # Flag to bring up the language select screen.
header["download_interval"] = u8(
30
) # Interval in minutes to check for new articles to display on the Wii Menu.
header["message_offset"] = u32(0) # Offset for a message.
header["topics_number"] = u32(0) # Number of entries for the topics table.
header["topics_offset"] = u32(0) # Offset for the topics table.
header["articles_number"] = u32(0) # Number of entries for the articles table.
header["articles_offset"] = u32(0) # Offset for the articles table.
header["source_number"] = u32(0) # Number of entries for the source table.
header["source_offset"] = u32(0) # Offset for the source table.
header["locations_number"] = u32(0) # Number of entries for the locations.
header["locations_offset"] = u32(0) # Offset for the locations table.
header["pictures_number"] = u32(0) # Number of entries for the pictures table.
header["pictures_offset"] = u32(0) # Offset for the pictures table.
<|code_end|>
. Write the next line using the current file imports:
import binascii
import calendar
import CloudFlare
import difflib
import glob
import json
import nlzss
import os
import pickle
import requests
import subprocess
import sys
import time
import utils
import rsa
from datetime import timedelta, datetime, date
from . import newsdownload
from datadog import statsd
from utils import setup_log, log, mkdir_p, u8, u16, u32, u32_littleendian
and context from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def mkdir_p(path):
# try:
# os.makedirs(path)
# except:
# pass
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
, which may include functions, classes, or code. Output only the next line. | header["count"] = u16(480) # Count value. |
Predict the next line for this snippet: <|code_start|>
# Run the functions to make the news
def make_news_bin(mode, data, locations_data, region):
global dictionaries, languages, country_code, language_code
source = sources[mode]
if source is None:
print("Could not find %s in sources.")
topics_news = source["topics_news"]
languages = source["languages"]
language_code = source["language_code"]
country_code = source["country_code"]
numbers = 0
if not os.path.exists("newstime"):
os.mkdir("newstime")
for topics in list(topics_news.values()):
newstime = {}
for keys in list(data.keys()):
if topics in keys:
numbers += 1
<|code_end|>
with the help of current file imports:
import binascii
import calendar
import CloudFlare
import difflib
import glob
import json
import nlzss
import os
import pickle
import requests
import subprocess
import sys
import time
import utils
import rsa
from datetime import timedelta, datetime, date
from . import newsdownload
from datadog import statsd
from utils import setup_log, log, mkdir_p, u8, u16, u32, u32_littleendian
and context from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def mkdir_p(path):
# try:
# os.makedirs(path)
# except:
# pass
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
, which may contain function names, class names, or code. Output only the next line. | newstime[data[keys][3]] = get_timestamp(1) + u32(numbers) |
Given the code snippet: <|code_start|> + 1500
)
# Remove duplicate articles
def remove_duplicates(data):
headlines = []
for k, v in list(data.items()):
if v[3] not in headlines:
headlines.append(v[3])
elif v[3] in headlines:
del data[k]
return data
# Make the news.bin
# First part of the header
def make_header(data):
header = {}
dictionaries.append(header)
header["updated_timestamp_1"] = get_timestamp(1) # Updated time.
header["term_timestamp"] = get_timestamp(2) # Timestamp for the term.
<|code_end|>
, generate the next line using the imports in this file:
import binascii
import calendar
import CloudFlare
import difflib
import glob
import json
import nlzss
import os
import pickle
import requests
import subprocess
import sys
import time
import utils
import rsa
from datetime import timedelta, datetime, date
from . import newsdownload
from datadog import statsd
from utils import setup_log, log, mkdir_p, u8, u16, u32, u32_littleendian
and context (functions, classes, or occasionally code) from other files:
# Path: utils.py
# def setup_log(sentry_url, print_errors):
# global logger, production
# sentry_logging = LoggingIntegration(
# level=logging.INFO,
# event_level=logging.INFO
# )
# sentry_sdk.init(dsn=sentry_url, integrations=[sentry_logging])
# logger = logging.getLogger(__name__)
# p_errors = print_errors
# production = True
#
# def log(msg, level): # TODO: Use number levels, strings are annoying
# if p_errors:
# print(msg)
#
# if production:
# if level == "VERBOSE":
# logger.debug(msg)
# elif level == "INFO":
# logger.info(msg)
# elif level == "WARNING":
# logger.warning(msg)
# elif level == "CRITICAL":
# logger.critical(msg)
#
# def mkdir_p(path):
# try:
# os.makedirs(path)
# except:
# pass
#
# def u8(data):
# if not 0 <= data <= 255:
# log("u8 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">B", data)
#
# def u16(data):
# if not 0 <= data <= 65535:
# log("u16 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">H", data)
#
# def u32(data):
# if not 0 <= data <= 4294967295:
# log("u32 out of range: %s" % data, "INFO")
# data = 0
# return struct.pack(">I", data)
#
# def u32_littleendian(data):
# if not 0 <= data <= 4294967295:
# log("u32 little endian out of range: %s" % data, "INFO")
# data = 0
# return struct.pack("<I", data)
. Output only the next line. | header["country_code"] = u32_littleendian(country_code) # Wii Country Code. |
Continue the code snippet: <|code_start|> verify_hashes=True, retry_exceptions=(socket.error,)):
"""Download an archive to a file.
:type filename: str
:param filename: The name of the file where the archive
contents will be saved.
:type chunk_size: int
:param chunk_size: The chunk size to use when downloading
the archive.
:type verify_hashes: bool
:param verify_hashes: Indicates whether or not to verify
the tree hashes for each downloaded chunk.
"""
num_chunks = int(math.ceil(self.archive_size / float(chunk_size)))
with open(filename, 'wb') as output_file:
self._download_to_fileob(output_file, num_chunks, chunk_size,
verify_hashes, retry_exceptions)
def _download_to_fileob(self, fileobj, num_chunks, chunk_size, verify_hashes,
retry_exceptions):
for i in xrange(num_chunks):
byte_range = ((i * chunk_size), ((i + 1) * chunk_size) - 1)
data, expected_tree_hash = self._download_byte_range(
byte_range, retry_exceptions)
if verify_hashes:
actual_tree_hash = bytes_to_hex(tree_hash(chunk_hashes(data)))
if expected_tree_hash != actual_tree_hash:
<|code_end|>
. Use current file imports:
import math
import socket
from .exceptions import TreeHashDoesNotMatchError, DownloadArchiveError
from .writer import bytes_to_hex, chunk_hashes, tree_hash
and context (classes, functions, or code) from other files:
# Path: python/boto/glacier/exceptions.py
# class TreeHashDoesNotMatchError(DownloadArchiveError):
# pass
#
# class DownloadArchiveError(Exception):
# pass
#
# Path: python/boto/glacier/writer.py
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
. Output only the next line. | raise TreeHashDoesNotMatchError( |
Given the code snippet: <|code_start|> verify_hashes, retry_exceptions)
def _download_to_fileob(self, fileobj, num_chunks, chunk_size, verify_hashes,
retry_exceptions):
for i in xrange(num_chunks):
byte_range = ((i * chunk_size), ((i + 1) * chunk_size) - 1)
data, expected_tree_hash = self._download_byte_range(
byte_range, retry_exceptions)
if verify_hashes:
actual_tree_hash = bytes_to_hex(tree_hash(chunk_hashes(data)))
if expected_tree_hash != actual_tree_hash:
raise TreeHashDoesNotMatchError(
"The calculated tree hash %s does not match the "
"expected tree hash %s for the byte range %s" % (
actual_tree_hash, expected_tree_hash, byte_range))
fileobj.write(data)
def _download_byte_range(self, byte_range, retry_exceptions):
# You can occasionally get socket.errors when downloading
# chunks from Glacier, so each chunk can be retried up
# to 5 times.
for _ in xrange(5):
try:
response = self.get_output(byte_range)
data = response.read()
expected_tree_hash = response['TreeHash']
return data, expected_tree_hash
except retry_exceptions, e:
continue
else:
<|code_end|>
, generate the next line using the imports in this file:
import math
import socket
from .exceptions import TreeHashDoesNotMatchError, DownloadArchiveError
from .writer import bytes_to_hex, chunk_hashes, tree_hash
and context (functions, classes, or occasionally code) from other files:
# Path: python/boto/glacier/exceptions.py
# class TreeHashDoesNotMatchError(DownloadArchiveError):
# pass
#
# class DownloadArchiveError(Exception):
# pass
#
# Path: python/boto/glacier/writer.py
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
. Output only the next line. | raise DownloadArchiveError("There was an error downloading" |
Given snippet: <|code_start|>
def download_to_file(self, filename, chunk_size=DefaultPartSize,
verify_hashes=True, retry_exceptions=(socket.error,)):
"""Download an archive to a file.
:type filename: str
:param filename: The name of the file where the archive
contents will be saved.
:type chunk_size: int
:param chunk_size: The chunk size to use when downloading
the archive.
:type verify_hashes: bool
:param verify_hashes: Indicates whether or not to verify
the tree hashes for each downloaded chunk.
"""
num_chunks = int(math.ceil(self.archive_size / float(chunk_size)))
with open(filename, 'wb') as output_file:
self._download_to_fileob(output_file, num_chunks, chunk_size,
verify_hashes, retry_exceptions)
def _download_to_fileob(self, fileobj, num_chunks, chunk_size, verify_hashes,
retry_exceptions):
for i in xrange(num_chunks):
byte_range = ((i * chunk_size), ((i + 1) * chunk_size) - 1)
data, expected_tree_hash = self._download_byte_range(
byte_range, retry_exceptions)
if verify_hashes:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import math
import socket
from .exceptions import TreeHashDoesNotMatchError, DownloadArchiveError
from .writer import bytes_to_hex, chunk_hashes, tree_hash
and context:
# Path: python/boto/glacier/exceptions.py
# class TreeHashDoesNotMatchError(DownloadArchiveError):
# pass
#
# class DownloadArchiveError(Exception):
# pass
#
# Path: python/boto/glacier/writer.py
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
which might include code, classes, or functions. Output only the next line. | actual_tree_hash = bytes_to_hex(tree_hash(chunk_hashes(data))) |
Given the following code snippet before the placeholder: <|code_start|>
def download_to_file(self, filename, chunk_size=DefaultPartSize,
verify_hashes=True, retry_exceptions=(socket.error,)):
"""Download an archive to a file.
:type filename: str
:param filename: The name of the file where the archive
contents will be saved.
:type chunk_size: int
:param chunk_size: The chunk size to use when downloading
the archive.
:type verify_hashes: bool
:param verify_hashes: Indicates whether or not to verify
the tree hashes for each downloaded chunk.
"""
num_chunks = int(math.ceil(self.archive_size / float(chunk_size)))
with open(filename, 'wb') as output_file:
self._download_to_fileob(output_file, num_chunks, chunk_size,
verify_hashes, retry_exceptions)
def _download_to_fileob(self, fileobj, num_chunks, chunk_size, verify_hashes,
retry_exceptions):
for i in xrange(num_chunks):
byte_range = ((i * chunk_size), ((i + 1) * chunk_size) - 1)
data, expected_tree_hash = self._download_byte_range(
byte_range, retry_exceptions)
if verify_hashes:
<|code_end|>
, predict the next line using imports from the current file:
import math
import socket
from .exceptions import TreeHashDoesNotMatchError, DownloadArchiveError
from .writer import bytes_to_hex, chunk_hashes, tree_hash
and context including class names, function names, and sometimes code from other files:
# Path: python/boto/glacier/exceptions.py
# class TreeHashDoesNotMatchError(DownloadArchiveError):
# pass
#
# class DownloadArchiveError(Exception):
# pass
#
# Path: python/boto/glacier/writer.py
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
. Output only the next line. | actual_tree_hash = bytes_to_hex(tree_hash(chunk_hashes(data))) |
Given the code snippet: <|code_start|>
def download_to_file(self, filename, chunk_size=DefaultPartSize,
verify_hashes=True, retry_exceptions=(socket.error,)):
"""Download an archive to a file.
:type filename: str
:param filename: The name of the file where the archive
contents will be saved.
:type chunk_size: int
:param chunk_size: The chunk size to use when downloading
the archive.
:type verify_hashes: bool
:param verify_hashes: Indicates whether or not to verify
the tree hashes for each downloaded chunk.
"""
num_chunks = int(math.ceil(self.archive_size / float(chunk_size)))
with open(filename, 'wb') as output_file:
self._download_to_fileob(output_file, num_chunks, chunk_size,
verify_hashes, retry_exceptions)
def _download_to_fileob(self, fileobj, num_chunks, chunk_size, verify_hashes,
retry_exceptions):
for i in xrange(num_chunks):
byte_range = ((i * chunk_size), ((i + 1) * chunk_size) - 1)
data, expected_tree_hash = self._download_byte_range(
byte_range, retry_exceptions)
if verify_hashes:
<|code_end|>
, generate the next line using the imports in this file:
import math
import socket
from .exceptions import TreeHashDoesNotMatchError, DownloadArchiveError
from .writer import bytes_to_hex, chunk_hashes, tree_hash
and context (functions, classes, or occasionally code) from other files:
# Path: python/boto/glacier/exceptions.py
# class TreeHashDoesNotMatchError(DownloadArchiveError):
# pass
#
# class DownloadArchiveError(Exception):
# pass
#
# Path: python/boto/glacier/writer.py
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
. Output only the next line. | actual_tree_hash = bytes_to_hex(tree_hash(chunk_hashes(data))) |
Continue the code snippet: <|code_start|> while self.should_continue:
try:
work = self._worker_queue.get(timeout=1)
except Empty:
continue
if work is _END_SENTINEL:
return
result = self._process_chunk(work)
self._result_queue.put(result)
def _process_chunk(self, work):
result = None
for _ in xrange(self._num_retries):
try:
result = self._upload_chunk(work)
break
except self._retry_exceptions, e:
log.error("Exception caught uploading part number %s for "
"vault %s, filename: %s", work[0], self._vault_name,
self._filename)
time.sleep(self._time_between_retries)
result = e
return result
def _upload_chunk(self, work):
part_number, part_size = work
start_byte = part_number * part_size
self._fileobj.seek(start_byte)
contents = self._fileobj.read(part_size)
linear_hash = hashlib.sha256(contents).hexdigest()
<|code_end|>
. Use current file imports:
import os
import math
import threading
import hashlib
import time
import logging
from Queue import Queue, Empty
from .writer import chunk_hashes, tree_hash, bytes_to_hex
from .exceptions import UploadArchiveError
and context (classes, functions, or code) from other files:
# Path: python/boto/glacier/writer.py
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
#
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# Path: python/boto/glacier/exceptions.py
# class UploadArchiveError(Exception):
# pass
. Output only the next line. | tree_hash_bytes = tree_hash(chunk_hashes(contents)) |
Predict the next line after this snippet: <|code_start|> :rtype: str
:return: The archive id of the newly created archive.
"""
fileobj = open(filename, 'rb')
total_size = os.fstat(fileobj.fileno()).st_size
total_parts = int(math.ceil(total_size / float(self._part_size)))
hash_chunks = [None] * total_parts
worker_queue = Queue()
result_queue = Queue()
response = self._api.initiate_multipart_upload(self._vault_name,
self._part_size,
description)
upload_id = response['UploadId']
# The basic idea is to add the chunks (the offsets not the actual
# contents) to a work queue, start up a thread pool, let the crank
# through the items in the work queue, and then place their results
# in a result queue which we use to complete the multipart upload.
self._add_work_items_to_queue(total_parts, worker_queue)
self._start_upload_threads(result_queue, upload_id,
worker_queue, filename)
try:
self._wait_for_upload_threads(hash_chunks, result_queue, total_parts)
except UploadArchiveError, e:
log.debug("An error occurred while uploading an archive, aborting "
"multipart upload.")
self._api.abort_multipart_upload(self._vault_name, upload_id)
raise e
log.debug("Completing upload.")
response = self._api.complete_multipart_upload(
<|code_end|>
using the current file's imports:
import os
import math
import threading
import hashlib
import time
import logging
from Queue import Queue, Empty
from .writer import chunk_hashes, tree_hash, bytes_to_hex
from .exceptions import UploadArchiveError
and any relevant context from other files:
# Path: python/boto/glacier/writer.py
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
#
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# Path: python/boto/glacier/exceptions.py
# class UploadArchiveError(Exception):
# pass
. Output only the next line. | self._vault_name, upload_id, bytes_to_hex(tree_hash(hash_chunks)), |
Continue the code snippet: <|code_start|> :rtype: str
:return: The archive id of the newly created archive.
"""
fileobj = open(filename, 'rb')
total_size = os.fstat(fileobj.fileno()).st_size
total_parts = int(math.ceil(total_size / float(self._part_size)))
hash_chunks = [None] * total_parts
worker_queue = Queue()
result_queue = Queue()
response = self._api.initiate_multipart_upload(self._vault_name,
self._part_size,
description)
upload_id = response['UploadId']
# The basic idea is to add the chunks (the offsets not the actual
# contents) to a work queue, start up a thread pool, let the crank
# through the items in the work queue, and then place their results
# in a result queue which we use to complete the multipart upload.
self._add_work_items_to_queue(total_parts, worker_queue)
self._start_upload_threads(result_queue, upload_id,
worker_queue, filename)
try:
self._wait_for_upload_threads(hash_chunks, result_queue, total_parts)
except UploadArchiveError, e:
log.debug("An error occurred while uploading an archive, aborting "
"multipart upload.")
self._api.abort_multipart_upload(self._vault_name, upload_id)
raise e
log.debug("Completing upload.")
response = self._api.complete_multipart_upload(
<|code_end|>
. Use current file imports:
import os
import math
import threading
import hashlib
import time
import logging
from Queue import Queue, Empty
from .writer import chunk_hashes, tree_hash, bytes_to_hex
from .exceptions import UploadArchiveError
and context (classes, functions, or code) from other files:
# Path: python/boto/glacier/writer.py
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
#
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# Path: python/boto/glacier/exceptions.py
# class UploadArchiveError(Exception):
# pass
. Output only the next line. | self._vault_name, upload_id, bytes_to_hex(tree_hash(hash_chunks)), |
Given the code snippet: <|code_start|>
:type file: str
:param file: The filename to upload
:type description: str
:param description: The description of the archive.
:rtype: str
:return: The archive id of the newly created archive.
"""
fileobj = open(filename, 'rb')
total_size = os.fstat(fileobj.fileno()).st_size
total_parts = int(math.ceil(total_size / float(self._part_size)))
hash_chunks = [None] * total_parts
worker_queue = Queue()
result_queue = Queue()
response = self._api.initiate_multipart_upload(self._vault_name,
self._part_size,
description)
upload_id = response['UploadId']
# The basic idea is to add the chunks (the offsets not the actual
# contents) to a work queue, start up a thread pool, let the crank
# through the items in the work queue, and then place their results
# in a result queue which we use to complete the multipart upload.
self._add_work_items_to_queue(total_parts, worker_queue)
self._start_upload_threads(result_queue, upload_id,
worker_queue, filename)
try:
self._wait_for_upload_threads(hash_chunks, result_queue, total_parts)
<|code_end|>
, generate the next line using the imports in this file:
import os
import math
import threading
import hashlib
import time
import logging
from Queue import Queue, Empty
from .writer import chunk_hashes, tree_hash, bytes_to_hex
from .exceptions import UploadArchiveError
and context (functions, classes, or occasionally code) from other files:
# Path: python/boto/glacier/writer.py
# def chunk_hashes(bytestring, chunk_size=_ONE_MEGABYTE):
# chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
# hashes = []
# for i in xrange(chunk_count):
# start = i * chunk_size
# end = (i + 1) * chunk_size
# hashes.append(hashlib.sha256(bytestring[start:end]).digest())
# return hashes
#
# def tree_hash(fo):
# """
# Given a hash of each 1MB chunk (from chunk_hashes) this will hash
# together adjacent hashes until it ends up with one big one. So a
# tree of hashes.
# """
# hashes = []
# hashes.extend(fo)
# while len(hashes) > 1:
# new_hashes = []
# while True:
# if len(hashes) > 1:
# first = hashes.pop(0)
# second = hashes.pop(0)
# new_hashes.append(hashlib.sha256(first + second).digest())
# elif len(hashes) == 1:
# only = hashes.pop(0)
# new_hashes.append(only)
# else:
# break
# hashes.extend(new_hashes)
# return hashes[0]
#
# def bytes_to_hex(str):
# return ''.join(["%02x" % ord(x) for x in str]).strip()
#
# Path: python/boto/glacier/exceptions.py
# class UploadArchiveError(Exception):
# pass
. Output only the next line. | except UploadArchiveError, e: |
Given snippet: <|code_start|> break
self.region = region
self.account_id = account_id
AWSAuthConnection.__init__(self, region.endpoint,
aws_access_key_id, aws_secret_access_key,
True, port, proxy, proxy_port,
proxy_user, proxy_pass, debug,
https_connection_factory,
path, provider, security_token,
suppress_consec_slashes)
def _required_auth_capability(self):
return ['hmac-v4']
def make_request(self, verb, resource, headers=None,
data='', ok_responses=(200,), params=None,
response_headers=None):
if headers is None:
headers = {}
headers['x-amz-glacier-version'] = self.Version
uri = '/%s/%s' % (self.account_id, resource)
response = AWSAuthConnection.make_request(self, verb, uri,
params=params,
headers=headers,
data=data)
if response.status in ok_responses:
return GlacierResponse(response, response_headers)
else:
# create glacier-specific exceptions
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import json
import urllib
import boto.glacier
from boto.connection import AWSAuthConnection
from .exceptions import UnexpectedHTTPResponseError
from .response import GlacierResponse
and context:
# Path: python/boto/glacier/exceptions.py
# class UnexpectedHTTPResponseError(Exception):
# def __init__(self, expected_responses, response):
# self.status = response.status
# self.body = response.read()
# self.code = None
# try:
# body = json.loads(self.body)
# self.code = body["code"]
# msg = 'Expected %s, got ' % expected_responses
# msg += '(%d, code=%s, message=%s)' % (response.status,
# self.code,
# body["message"])
# except Exception:
# msg = 'Expected %s, got (%d, %s)' % (expected_responses,
# response.status,
# self.body)
# super(UnexpectedHTTPResponseError, self).__init__(msg)
#
# Path: python/boto/glacier/response.py
# class GlacierResponse(dict):
# """
# Represents a response from Glacier layer1. It acts as a dictionary
# containing the combined keys received via JSON in the body (if
# supplied) and headers.
# """
# def __init__(self, http_response, response_headers):
# self.http_response = http_response
# self.status = http_response.status
# self[u'RequestId'] = http_response.getheader('x-amzn-requestid')
# if response_headers:
# for header_name, item_name in response_headers:
# self[item_name] = http_response.getheader(header_name)
# if http_response.getheader('Content-Type') == 'application/json':
# body = json.loads(http_response.read())
# self.update(body)
# size = http_response.getheader('Content-Length', None)
# if size is not None:
# self.size = size
#
# def read(self, amt=None):
# "Reads and returns the response body, or up to the next amt bytes."
# return self.http_response.read(amt)
which might include code, classes, or functions. Output only the next line. | raise UnexpectedHTTPResponseError(ok_responses, response) |
Based on the snippet: <|code_start|> for reg in boto.glacier.regions():
if reg.name == region_name:
region = reg
break
self.region = region
self.account_id = account_id
AWSAuthConnection.__init__(self, region.endpoint,
aws_access_key_id, aws_secret_access_key,
True, port, proxy, proxy_port,
proxy_user, proxy_pass, debug,
https_connection_factory,
path, provider, security_token,
suppress_consec_slashes)
def _required_auth_capability(self):
return ['hmac-v4']
def make_request(self, verb, resource, headers=None,
data='', ok_responses=(200,), params=None,
response_headers=None):
if headers is None:
headers = {}
headers['x-amz-glacier-version'] = self.Version
uri = '/%s/%s' % (self.account_id, resource)
response = AWSAuthConnection.make_request(self, verb, uri,
params=params,
headers=headers,
data=data)
if response.status in ok_responses:
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import json
import urllib
import boto.glacier
from boto.connection import AWSAuthConnection
from .exceptions import UnexpectedHTTPResponseError
from .response import GlacierResponse
and context (classes, functions, sometimes code) from other files:
# Path: python/boto/glacier/exceptions.py
# class UnexpectedHTTPResponseError(Exception):
# def __init__(self, expected_responses, response):
# self.status = response.status
# self.body = response.read()
# self.code = None
# try:
# body = json.loads(self.body)
# self.code = body["code"]
# msg = 'Expected %s, got ' % expected_responses
# msg += '(%d, code=%s, message=%s)' % (response.status,
# self.code,
# body["message"])
# except Exception:
# msg = 'Expected %s, got (%d, %s)' % (expected_responses,
# response.status,
# self.body)
# super(UnexpectedHTTPResponseError, self).__init__(msg)
#
# Path: python/boto/glacier/response.py
# class GlacierResponse(dict):
# """
# Represents a response from Glacier layer1. It acts as a dictionary
# containing the combined keys received via JSON in the body (if
# supplied) and headers.
# """
# def __init__(self, http_response, response_headers):
# self.http_response = http_response
# self.status = http_response.status
# self[u'RequestId'] = http_response.getheader('x-amzn-requestid')
# if response_headers:
# for header_name, item_name in response_headers:
# self[item_name] = http_response.getheader(header_name)
# if http_response.getheader('Content-Type') == 'application/json':
# body = json.loads(http_response.read())
# self.update(body)
# size = http_response.getheader('Content-Length', None)
# if size is not None:
# self.size = size
#
# def read(self, amt=None):
# "Reads and returns the response body, or up to the next amt bytes."
# return self.http_response.read(amt)
. Output only the next line. | return GlacierResponse(response, response_headers) |
Predict the next line after this snippet: <|code_start|>
return Github(**credentials)
#: Server-wide authenticated GitHub state
github_setup = _github_setup()
def get_repo_from_url(url, gh_setup=github_setup):
"""
Given an URL like (ssh://)git@github.com/user/repo.git or any other url
that defines the root of a repository, this function returns the PyGithub
resource describing that object.
One can use :func:`get_cookie_data_from_repo` or
:func:`get_mapping_file_from_repo` to get further information about that
repository such as the content of the ``cookiecutter.json`` file.
:param str url: The root URL to a github repository
:param gh_setup: If not the server-wide authentiaction :data:`github_setup`
should be used, this parameter can be set to another, e.g. user
authenticated PyGithub object
:return: Returns an instance of a ``PyGithub.Repository``.
:raises: ``InvalidRepositoryError`` if the given URL does not match a known
GitHub URL.
"""
if 'git@github.com' in url:
identifier = 'git@github.com'
elif 'https://github.com/' in url:
identifier = 'https://github.com'
else:
<|code_end|>
using the current file's imports:
import json
import pytz
import uuid
import http.client
from base64 import standard_b64decode
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.timezone import make_aware
from github import Github
from bakery.cookies.exceptions import (InvalidRepositoryError,
InvalidContentFileEncoding)
and any relevant context from other files:
# Path: bakery/cookies/exceptions.py
# class InvalidRepositoryError(ValueError):
# pass
#
# class InvalidContentFileEncoding(ValueError):
# pass
. Output only the next line. | raise InvalidRepositoryError('{0} is not a valid GitHub URL'.format(url)) |
Based on the snippet: <|code_start|> :raises: ``InvalidRepositoryError`` if there was no way to
deterministically find the mapping file.
"""
candidates = filter_repo(repo, {'name': lambda val: val.endswith('.json')})
if not candidates:
raise InvalidRepositoryError('No JSON mapping file found!')
if len(candidates) > 1:
mapping_file = candidates.get('cookiecutter.json', None)
if mapping_file is None:
raise InvalidRepositoryError('Cannot decide for a mapping file! '
'Multiple files found: {0}'.format(', '.join(candidates.keys)))
else:
mapping_file = list(candidates.values())[0]
return repo.get_contents('/' + mapping_file['name'])
def decode_file(content_file):
"""
Given a ``PyGithub.ContentFile`` this function will decode the file's data.
:return dict: Returns a raw decoded string.
:raises: ``InvalidContentFileEncoding`` raised if not suitable decoding
is defined.
"""
decoded = None
if content_file.encoding == 'base64':
decoded = standard_b64decode(content_file.content).decode('utf-8')
if decoded is None:
<|code_end|>
, predict the immediate next line with the help of imports:
import json
import pytz
import uuid
import http.client
from base64 import standard_b64decode
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.timezone import make_aware
from github import Github
from bakery.cookies.exceptions import (InvalidRepositoryError,
InvalidContentFileEncoding)
and context (classes, functions, sometimes code) from other files:
# Path: bakery/cookies/exceptions.py
# class InvalidRepositoryError(ValueError):
# pass
#
# class InvalidContentFileEncoding(ValueError):
# pass
. Output only the next line. | raise InvalidContentFileEncoding( |
Next line prediction: <|code_start|>#-*- coding: utf-8 -*-
class ImportForm(forms.Form):
url = forms.CharField(max_length=255)
def import_cookie(self):
url = self.cleaned_data['url']
<|code_end|>
. Use current file imports:
(from django import forms
from bakery.cookies.models import Cookie)
and context including class names, function names, or small code snippets from other files:
# Path: bakery/cookies/models.py
# class Cookie(models.Model):
# name = models.CharField(_('Name'), max_length=50)
# owner_name = models.CharField(_('Owner name'), max_length=50)
# url = models.URLField(_('URL'), unique=True)
# owner = models.ForeignKey(get_user_model(), verbose_name=_('User'),
# on_delete=models.CASCADE)
# description = models.TextField(_('Description'), blank=True)
# last_change = models.DateTimeField(_('Last change'), null=True)
# last_poll = models.DateTimeField(_('Last poll'), null=True)
# backend = models.CharField(_('Backend'), max_length=25)
# mapping = JSONField(default={})
#
# # Hosting Statistics
# license = models.CharField(_('License'), max_length=50, blank=True)
# repo_watchers = models.IntegerField(_("repo watchers"), default=0)
# repo_forks = models.IntegerField(_("repo forks"), default=0)
# participants = models.TextField(_("Participants"),
# help_text="List of collaborats/participants on the project", null=True)
# language = models.CharField(_('Language'), max_length=50, null=True)
# homepage = models.CharField(_('Homepage'), max_length=255, null=True)
# clone_urls = JSONField(default={})
#
# # Social aspect, such as votes etc
# votes = models.ManyToManyField(BakeryUser, through='socialize.Vote',
# related_name='votes')
#
# objects = CookieManager()
#
# class Meta:
# ordering = ['-last_change', 'name']
# unique_together = ('name', 'owner_name')
# verbose_name = _('Cookie')
# verbose_name_plural = _('Cookies')
#
# def __str__(self):
# return self.name
#
# def get_absolute_url(self):
# return reverse_lazy('cookies:detail', args=(self.owner_name, self.name))
#
# @property
# def full_name(self):
# return "{0}/{1}".format(self.owner_name, self.name)
#
# @property
# def short_description(self):
# descr = self.mapping.get('project_short_description', None)
# if descr is None:
# descr = self.description
# return _punctuation.split(descr)[0]
#
# @property
# def activity(self):
# if self.last_change >= (datetime.utcnow() - timedelta(days=365)):
# return ACTIVITY['ancient']
# elif self.last_change >= (datetime.utcnow() - timedelta(days=10)):
# return ACTIVITY['moderate']
# else:
# return ACTIVITY['active']
#
# def fork(self, user):
# """
# :raises: ``UnknownObjectException`` is raised if the repository cannot
# be located.
# """
# fork = fork_repository(user, self.repository)
# cookie_dict = get_cookie_data_from_repo(fork)
# owner_dict = cookie_dict.pop('_owner', None)
# Cookie.objects.import_from_cookie_dict(
# cookie_dict,
# owner_dict,
# self.repository
# )
#
# @property
# def repository(self):
# repository = getattr(self, '_repository', None)
# if not repository:
# repository = get_repo_from_full_name(self.full_name)
# setattr(self, '_repository', repository)
# return repository
#
# @property
# def mapping_pretty(self):
# mapping_pretty = getattr(self, '_mapping_pretty', None)
# if not mapping_pretty:
# mapping_pretty = json.dumps(self.mapping, ensure_ascii=False,
# indent=4, sort_keys=True)
# setattr(self, '_mapping_pretty', mapping_pretty)
# return mapping_pretty
#
# @property
# def clone_urls_tuple(self):
# return sorted(tuple(self.clone_urls.items()))
. Output only the next line. | cookie = Cookie.objects.import_from_url(url) |
Given the following code snippet before the placeholder: <|code_start|>#-*- coding: utf-8 -*-
repos = [
'https://github.com/audreyr/cookiecutter-pypackage',
'https://github.com/sloria/cookiecutter-flask',
'https://github.com/lucuma/cookiecutter-flask-env',
'https://github.com/marcofucci/cookiecutter-simple-django',
'https://github.com/pydanny/cookiecutter-django',
'https://github.com/pydanny/cookiecutter-djangopackage',
'https://github.com/openstack-dev/cookiecutter',
'https://github.com/sloria/cookiecutter-docopt',
'https://github.com/vincentbernat/bootstrap.c',
'https://github.com/audreyr/cookiecutter-jquery',
'https://github.com/audreyr/cookiecutter-component',
'https://github.com/larsyencken/pandoc-talk',
'https://github.com/audreyr/cookiecutter-complexity',
'https://github.com/NigelCleland/cookiecutter-latex-journal',
]
for repo in repos:
print('Importing {0}'.format(repo))
<|code_end|>
, predict the next line using imports from the current file:
from bakery.cookies.models import Cookie
and context including class names, function names, and sometimes code from other files:
# Path: bakery/cookies/models.py
# class Cookie(models.Model):
# name = models.CharField(_('Name'), max_length=50)
# owner_name = models.CharField(_('Owner name'), max_length=50)
# url = models.URLField(_('URL'), unique=True)
# owner = models.ForeignKey(get_user_model(), verbose_name=_('User'),
# on_delete=models.CASCADE)
# description = models.TextField(_('Description'), blank=True)
# last_change = models.DateTimeField(_('Last change'), null=True)
# last_poll = models.DateTimeField(_('Last poll'), null=True)
# backend = models.CharField(_('Backend'), max_length=25)
# mapping = JSONField(default={})
#
# # Hosting Statistics
# license = models.CharField(_('License'), max_length=50, blank=True)
# repo_watchers = models.IntegerField(_("repo watchers"), default=0)
# repo_forks = models.IntegerField(_("repo forks"), default=0)
# participants = models.TextField(_("Participants"),
# help_text="List of collaborats/participants on the project", null=True)
# language = models.CharField(_('Language'), max_length=50, null=True)
# homepage = models.CharField(_('Homepage'), max_length=255, null=True)
# clone_urls = JSONField(default={})
#
# # Social aspect, such as votes etc
# votes = models.ManyToManyField(BakeryUser, through='socialize.Vote',
# related_name='votes')
#
# objects = CookieManager()
#
# class Meta:
# ordering = ['-last_change', 'name']
# unique_together = ('name', 'owner_name')
# verbose_name = _('Cookie')
# verbose_name_plural = _('Cookies')
#
# def __str__(self):
# return self.name
#
# def get_absolute_url(self):
# return reverse_lazy('cookies:detail', args=(self.owner_name, self.name))
#
# @property
# def full_name(self):
# return "{0}/{1}".format(self.owner_name, self.name)
#
# @property
# def short_description(self):
# descr = self.mapping.get('project_short_description', None)
# if descr is None:
# descr = self.description
# return _punctuation.split(descr)[0]
#
# @property
# def activity(self):
# if self.last_change >= (datetime.utcnow() - timedelta(days=365)):
# return ACTIVITY['ancient']
# elif self.last_change >= (datetime.utcnow() - timedelta(days=10)):
# return ACTIVITY['moderate']
# else:
# return ACTIVITY['active']
#
# def fork(self, user):
# """
# :raises: ``UnknownObjectException`` is raised if the repository cannot
# be located.
# """
# fork = fork_repository(user, self.repository)
# cookie_dict = get_cookie_data_from_repo(fork)
# owner_dict = cookie_dict.pop('_owner', None)
# Cookie.objects.import_from_cookie_dict(
# cookie_dict,
# owner_dict,
# self.repository
# )
#
# @property
# def repository(self):
# repository = getattr(self, '_repository', None)
# if not repository:
# repository = get_repo_from_full_name(self.full_name)
# setattr(self, '_repository', repository)
# return repository
#
# @property
# def mapping_pretty(self):
# mapping_pretty = getattr(self, '_mapping_pretty', None)
# if not mapping_pretty:
# mapping_pretty = json.dumps(self.mapping, ensure_ascii=False,
# indent=4, sort_keys=True)
# setattr(self, '_mapping_pretty', mapping_pretty)
# return mapping_pretty
#
# @property
# def clone_urls_tuple(self):
# return sorted(tuple(self.clone_urls.items()))
. Output only the next line. | Cookie.objects.import_from_url(repo) |
Given the following code snippet before the placeholder: <|code_start|># -*- coding: utf-8 -*-
class Command(BaseCommand):
username = '<username>'
help = 'Change the user <username> to be a superuser.'
def handle(self, username, **options):
traceback = options.get('traceback', False)
try:
<|code_end|>
, predict the next line using imports from the current file:
import sys
from django.core.management.base import BaseCommand, CommandError
from bakery.auth.models import BakeryUser
and context including class names, function names, and sometimes code from other files:
# Path: bakery/auth/models.py
# class BakeryUser(AbstractBaseUser):
# username = models.CharField(_('Username'), max_length=50, unique=True)
# email = models.EmailField(_('Email'), max_length=254, unique=True)
# name = models.CharField(_('Name'), max_length=100, blank=True, null=True)
# is_superuser = models.BooleanField(_('Superuser'), default=False)
# is_staff = models.BooleanField(_('Staff'), default=False)
# is_active = models.BooleanField(_('Active'), default=True)
# is_organization = models.BooleanField(_('Organization'))
# profile_url = models.URLField(_('Profile'), blank=True, null=True)
# date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
#
# objects = BakeryUserManager()
#
# USERNAME_FIELD = 'username'
# REQUIRED_FIELDS = ['password']
#
# class Meta:
# verbose_name = _('User')
# verbose_name_plural = _('Users')
#
# def __str__(self):
# return self.username
#
# def get_absolute_url(self):
# return reverse_lazy('auth:profile', kwargs={'username': self.username})
#
# def get_full_name(self):
# return self.name
#
# def get_short_name(self):
# "Returns the short name for the user."
# return self.name
#
# def get_display_name(self):
# return self.name or self.username
#
# def get_gravatar(self):
# return get_gravatar(self.email)
#
# def vote_for_cookie(self, cookie):
# do_vote(self, cookie)
#
# @property
# def candies_list(self):
# candies_list = getattr(self, '_candies_list', [])
# if not candies_list:
# candies = self.candies.order_by('candy_type')
# for candy in CANDIES:
# candy_type = candy[0]
# count = candies.filter(candy_type=candy_type).count()
# candies_list.append((candy, count))
# setattr(self, '_candies_list', candies_list)
# return candies_list
. Output only the next line. | user = BakeryUser.objects.get(username=username) |
Here is a snippet: <|code_start|> def get_query_set(self):
return CookieQuerySet(self.model)
def update_or_create(self, *args, **kwargs):
return self.get_query_set().update_or_create(**kwargs)
def import_from_url(self, url):
"""Imports or updates from ``url``"""
if 'git@github.com' in url or 'https://github.com/' in url:
repo = gh.get_repo_from_url(url)
return self.import_from_repo(repo)
raise ValueError('{0} is not a recognized URL'.format(url))
def import_from_repo(self, repo):
cookie_data = gh.get_cookie_data_from_repo(repo)
owner_data = cookie_data.pop('_owner', None)
return self.import_from_cookie_dict(cookie_data, owner_data, repo)
def import_from_cookie_dict(self, cookie_dict, owner_dict, repo=None):
username = owner_dict['username']
email = owner_dict.get('email', None)
if email is not None and not email.endswith('localhost.invalid'):
filter_args = {'username': username, 'email': email}
else:
filter_args = {'username': username}
filter_args['defaults'] = owner_dict
<|code_end|>
. Write the next line using the current file imports:
from django.db.models import Manager
from django.db.models.query import QuerySet
from bakery.auth.models import BakeryUser
from bakery.utils.vcs import gh
and context from other files:
# Path: bakery/auth/models.py
# class BakeryUser(AbstractBaseUser):
# username = models.CharField(_('Username'), max_length=50, unique=True)
# email = models.EmailField(_('Email'), max_length=254, unique=True)
# name = models.CharField(_('Name'), max_length=100, blank=True, null=True)
# is_superuser = models.BooleanField(_('Superuser'), default=False)
# is_staff = models.BooleanField(_('Staff'), default=False)
# is_active = models.BooleanField(_('Active'), default=True)
# is_organization = models.BooleanField(_('Organization'))
# profile_url = models.URLField(_('Profile'), blank=True, null=True)
# date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
#
# objects = BakeryUserManager()
#
# USERNAME_FIELD = 'username'
# REQUIRED_FIELDS = ['password']
#
# class Meta:
# verbose_name = _('User')
# verbose_name_plural = _('Users')
#
# def __str__(self):
# return self.username
#
# def get_absolute_url(self):
# return reverse_lazy('auth:profile', kwargs={'username': self.username})
#
# def get_full_name(self):
# return self.name
#
# def get_short_name(self):
# "Returns the short name for the user."
# return self.name
#
# def get_display_name(self):
# return self.name or self.username
#
# def get_gravatar(self):
# return get_gravatar(self.email)
#
# def vote_for_cookie(self, cookie):
# do_vote(self, cookie)
#
# @property
# def candies_list(self):
# candies_list = getattr(self, '_candies_list', [])
# if not candies_list:
# candies = self.candies.order_by('candy_type')
# for candy in CANDIES:
# candy_type = candy[0]
# count = candies.filter(candy_type=candy_type).count()
# candies_list.append((candy, count))
# setattr(self, '_candies_list', candies_list)
# return candies_list
#
# Path: bakery/utils/vcs/gh.py
# def _github_setup():
# def get_repo_from_url(url, gh_setup=github_setup):
# def get_repo_from_full_name(full_name, gh_setup=github_setup):
# def get_cookie_data_from_repo(repo):
# def filter_repo(repo, filters):
# def get_mapping_file_from_repo(repo):
# def decode_file(content_file):
# def get_content_from_content_file(content_file):
# def fork_repository(user, repo):
, which may include functions, classes, or code. Output only the next line. | owner, created = BakeryUser.objects.get_or_create(**filter_args) |
Based on the snippet: <|code_start|>
class CookieQuerySet(QuerySet):
def update_or_create(self, *args, **kwargs):
obj, created = self.get_or_create(*args, **kwargs)
if not created:
fields = dict(kwargs.pop("defaults", {}))
fields.update(kwargs)
for key, value in fields.items():
setattr(obj, key, value)
obj.save()
return obj
class CookieManager(Manager):
use_for_related_fields = True
def get_query_set(self):
return CookieQuerySet(self.model)
def update_or_create(self, *args, **kwargs):
return self.get_query_set().update_or_create(**kwargs)
def import_from_url(self, url):
"""Imports or updates from ``url``"""
if 'git@github.com' in url or 'https://github.com/' in url:
<|code_end|>
, predict the immediate next line with the help of imports:
from django.db.models import Manager
from django.db.models.query import QuerySet
from bakery.auth.models import BakeryUser
from bakery.utils.vcs import gh
and context (classes, functions, sometimes code) from other files:
# Path: bakery/auth/models.py
# class BakeryUser(AbstractBaseUser):
# username = models.CharField(_('Username'), max_length=50, unique=True)
# email = models.EmailField(_('Email'), max_length=254, unique=True)
# name = models.CharField(_('Name'), max_length=100, blank=True, null=True)
# is_superuser = models.BooleanField(_('Superuser'), default=False)
# is_staff = models.BooleanField(_('Staff'), default=False)
# is_active = models.BooleanField(_('Active'), default=True)
# is_organization = models.BooleanField(_('Organization'))
# profile_url = models.URLField(_('Profile'), blank=True, null=True)
# date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
#
# objects = BakeryUserManager()
#
# USERNAME_FIELD = 'username'
# REQUIRED_FIELDS = ['password']
#
# class Meta:
# verbose_name = _('User')
# verbose_name_plural = _('Users')
#
# def __str__(self):
# return self.username
#
# def get_absolute_url(self):
# return reverse_lazy('auth:profile', kwargs={'username': self.username})
#
# def get_full_name(self):
# return self.name
#
# def get_short_name(self):
# "Returns the short name for the user."
# return self.name
#
# def get_display_name(self):
# return self.name or self.username
#
# def get_gravatar(self):
# return get_gravatar(self.email)
#
# def vote_for_cookie(self, cookie):
# do_vote(self, cookie)
#
# @property
# def candies_list(self):
# candies_list = getattr(self, '_candies_list', [])
# if not candies_list:
# candies = self.candies.order_by('candy_type')
# for candy in CANDIES:
# candy_type = candy[0]
# count = candies.filter(candy_type=candy_type).count()
# candies_list.append((candy, count))
# setattr(self, '_candies_list', candies_list)
# return candies_list
#
# Path: bakery/utils/vcs/gh.py
# def _github_setup():
# def get_repo_from_url(url, gh_setup=github_setup):
# def get_repo_from_full_name(full_name, gh_setup=github_setup):
# def get_cookie_data_from_repo(repo):
# def filter_repo(repo, filters):
# def get_mapping_file_from_repo(repo):
# def decode_file(content_file):
# def get_content_from_content_file(content_file):
# def fork_repository(user, repo):
. Output only the next line. | repo = gh.get_repo_from_url(url) |
Here is a snippet: <|code_start|># -*- coding: utf-8 -*-
class Command(BaseCommand):
url = '<url>'
args = 'url'
help = 'Add the cookie defined by the VCS URL to the database.'
def handle(self, url, *args, **options):
verbosity = int(options.get('verbosity', 1))
traceback = options.get('traceback', False)
if verbosity > 1:
self.stdout.write('Importing {0}'.format(url))
try:
<|code_end|>
. Write the next line using the current file imports:
import sys
from django.core.management.base import BaseCommand, CommandError
from bakery.cookies.models import Cookie
and context from other files:
# Path: bakery/cookies/models.py
# class Cookie(models.Model):
# name = models.CharField(_('Name'), max_length=50)
# owner_name = models.CharField(_('Owner name'), max_length=50)
# url = models.URLField(_('URL'), unique=True)
# owner = models.ForeignKey(get_user_model(), verbose_name=_('User'),
# on_delete=models.CASCADE)
# description = models.TextField(_('Description'), blank=True)
# last_change = models.DateTimeField(_('Last change'), null=True)
# last_poll = models.DateTimeField(_('Last poll'), null=True)
# backend = models.CharField(_('Backend'), max_length=25)
# mapping = JSONField(default={})
#
# # Hosting Statistics
# license = models.CharField(_('License'), max_length=50, blank=True)
# repo_watchers = models.IntegerField(_("repo watchers"), default=0)
# repo_forks = models.IntegerField(_("repo forks"), default=0)
# participants = models.TextField(_("Participants"),
# help_text="List of collaborats/participants on the project", null=True)
# language = models.CharField(_('Language'), max_length=50, null=True)
# homepage = models.CharField(_('Homepage'), max_length=255, null=True)
# clone_urls = JSONField(default={})
#
# # Social aspect, such as votes etc
# votes = models.ManyToManyField(BakeryUser, through='socialize.Vote',
# related_name='votes')
#
# objects = CookieManager()
#
# class Meta:
# ordering = ['-last_change', 'name']
# unique_together = ('name', 'owner_name')
# verbose_name = _('Cookie')
# verbose_name_plural = _('Cookies')
#
# def __str__(self):
# return self.name
#
# def get_absolute_url(self):
# return reverse_lazy('cookies:detail', args=(self.owner_name, self.name))
#
# @property
# def full_name(self):
# return "{0}/{1}".format(self.owner_name, self.name)
#
# @property
# def short_description(self):
# descr = self.mapping.get('project_short_description', None)
# if descr is None:
# descr = self.description
# return _punctuation.split(descr)[0]
#
# @property
# def activity(self):
# if self.last_change >= (datetime.utcnow() - timedelta(days=365)):
# return ACTIVITY['ancient']
# elif self.last_change >= (datetime.utcnow() - timedelta(days=10)):
# return ACTIVITY['moderate']
# else:
# return ACTIVITY['active']
#
# def fork(self, user):
# """
# :raises: ``UnknownObjectException`` is raised if the repository cannot
# be located.
# """
# fork = fork_repository(user, self.repository)
# cookie_dict = get_cookie_data_from_repo(fork)
# owner_dict = cookie_dict.pop('_owner', None)
# Cookie.objects.import_from_cookie_dict(
# cookie_dict,
# owner_dict,
# self.repository
# )
#
# @property
# def repository(self):
# repository = getattr(self, '_repository', None)
# if not repository:
# repository = get_repo_from_full_name(self.full_name)
# setattr(self, '_repository', repository)
# return repository
#
# @property
# def mapping_pretty(self):
# mapping_pretty = getattr(self, '_mapping_pretty', None)
# if not mapping_pretty:
# mapping_pretty = json.dumps(self.mapping, ensure_ascii=False,
# indent=4, sort_keys=True)
# setattr(self, '_mapping_pretty', mapping_pretty)
# return mapping_pretty
#
# @property
# def clone_urls_tuple(self):
# return sorted(tuple(self.clone_urls.items()))
, which may include functions, classes, or code. Output only the next line. | Cookie.objects.import_from_url(url) |
Given the following code snippet before the placeholder: <|code_start|>
class LoginErrorView(TemplateView):
template_name = 'error.html'
login_error = LoginErrorView.as_view()
class LogoutView(RedirectView):
permanent = False
def get_redirect_url(self, **kwargs):
auth.logout(self.request)
return reverse('home')
logout = LogoutView.as_view()
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
def get_context_data(self, **kwargs):
context = super(ProfileView, self).get_context_data(**kwargs)
<|code_end|>
, predict the next line using imports from the current file:
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView, RedirectView
from django.contrib import auth
from bakery.auth.models import BakeryUser
and context including class names, function names, and sometimes code from other files:
# Path: bakery/auth/models.py
# class BakeryUser(AbstractBaseUser):
# username = models.CharField(_('Username'), max_length=50, unique=True)
# email = models.EmailField(_('Email'), max_length=254, unique=True)
# name = models.CharField(_('Name'), max_length=100, blank=True, null=True)
# is_superuser = models.BooleanField(_('Superuser'), default=False)
# is_staff = models.BooleanField(_('Staff'), default=False)
# is_active = models.BooleanField(_('Active'), default=True)
# is_organization = models.BooleanField(_('Organization'))
# profile_url = models.URLField(_('Profile'), blank=True, null=True)
# date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
#
# objects = BakeryUserManager()
#
# USERNAME_FIELD = 'username'
# REQUIRED_FIELDS = ['password']
#
# class Meta:
# verbose_name = _('User')
# verbose_name_plural = _('Users')
#
# def __str__(self):
# return self.username
#
# def get_absolute_url(self):
# return reverse_lazy('auth:profile', kwargs={'username': self.username})
#
# def get_full_name(self):
# return self.name
#
# def get_short_name(self):
# "Returns the short name for the user."
# return self.name
#
# def get_display_name(self):
# return self.name or self.username
#
# def get_gravatar(self):
# return get_gravatar(self.email)
#
# def vote_for_cookie(self, cookie):
# do_vote(self, cookie)
#
# @property
# def candies_list(self):
# candies_list = getattr(self, '_candies_list', [])
# if not candies_list:
# candies = self.candies.order_by('candy_type')
# for candy in CANDIES:
# candy_type = candy[0]
# count = candies.filter(candy_type=candy_type).count()
# candies_list.append((candy, count))
# setattr(self, '_candies_list', candies_list)
# return candies_list
. Output only the next line. | user = BakeryUser.objects.get(username=kwargs['username']) |
Given the following code snippet before the placeholder: <|code_start|>
class TestCommands(TestCase):
def test_makesuperuser(self):
BakeryUser.objects.create_user('SocialUser')
user = BakeryUser.objects.get(username='SocialUser')
self.assertFalse(user.is_staff)
self.assertFalse(user.is_superuser)
out = StringIO()
management.call_command('makesuperuser', 'SocialUser', stdout=out)
self.assertIn('Updated SocialUser to superuser status', out.getvalue())
user = BakeryUser.objects.get(username='SocialUser')
self.assertTrue(user.is_staff)
self.assertTrue(user.is_superuser)
def test_makesuperuser_not_found(self):
BakeryUser.objects.create_user('SocialUser')
user = BakeryUser.objects.get(username='SocialUser')
self.assertFalse(user.is_staff)
self.assertFalse(user.is_superuser)
self.assertRaises(CommandError, management.call_command, ('makesuperuser',), 'SocialUser2')
@httpretty.activate
def test_importcookie(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/audreyr/cookiecutter-pypackage',
<|code_end|>
, predict the next line using imports from the current file:
import httpretty
from django.core import management
from django.core.management.base import CommandError
from django.test import TestCase
from django.utils.six import StringIO
from bakery.auth.models import BakeryUser
from bakery.utils.test import read
and context including class names, function names, and sometimes code from other files:
# Path: bakery/auth/models.py
# class BakeryUser(AbstractBaseUser):
# username = models.CharField(_('Username'), max_length=50, unique=True)
# email = models.EmailField(_('Email'), max_length=254, unique=True)
# name = models.CharField(_('Name'), max_length=100, blank=True, null=True)
# is_superuser = models.BooleanField(_('Superuser'), default=False)
# is_staff = models.BooleanField(_('Staff'), default=False)
# is_active = models.BooleanField(_('Active'), default=True)
# is_organization = models.BooleanField(_('Organization'))
# profile_url = models.URLField(_('Profile'), blank=True, null=True)
# date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
#
# objects = BakeryUserManager()
#
# USERNAME_FIELD = 'username'
# REQUIRED_FIELDS = ['password']
#
# class Meta:
# verbose_name = _('User')
# verbose_name_plural = _('Users')
#
# def __str__(self):
# return self.username
#
# def get_absolute_url(self):
# return reverse_lazy('auth:profile', kwargs={'username': self.username})
#
# def get_full_name(self):
# return self.name
#
# def get_short_name(self):
# "Returns the short name for the user."
# return self.name
#
# def get_display_name(self):
# return self.name or self.username
#
# def get_gravatar(self):
# return get_gravatar(self.email)
#
# def vote_for_cookie(self, cookie):
# do_vote(self, cookie)
#
# @property
# def candies_list(self):
# candies_list = getattr(self, '_candies_list', [])
# if not candies_list:
# candies = self.candies.order_by('candy_type')
# for candy in CANDIES:
# candy_type = candy[0]
# count = candies.filter(candy_type=candy_type).count()
# candies_list.append((candy, count))
# setattr(self, '_candies_list', candies_list)
# return candies_list
#
# Path: bakery/utils/test.py
# def read(base, *paths):
# fp = join(dirname(base), *paths)
# with open(fp, 'r') as f:
# return f.read()
. Output only the next line. | body=read(__file__, '..', '_replay_data', 'cookiecutter-pypacker-repository'), |
Predict the next line after this snippet: <|code_start|># -*- coding: utf-8 -*-
class CookieDetailView(DetailView):
model = Cookie
def get_object(self):
owner_name = self.kwargs['owner_name']
name = self.kwargs['name']
self.object = get_object_or_404(Cookie, owner_name=owner_name, name=name)
return self.object
def get_context_data(self, **kwargs):
context = super(CookieDetailView, self).get_context_data(**kwargs)
context['has_voted'] = Vote.objects.has_voted(self.request.user.id, self.object)
return context
detail = CookieDetailView.as_view()
class ImportView(FormView):
template_name = 'cookies/import.html'
<|code_end|>
using the current file's imports:
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.http import require_POST
from django.views.generic import DetailView, FormView
from github import Github, GithubException
from bakery.cookies.models import Cookie
from bakery.cookies.forms import ImportForm
from bakery.socialize.models import Vote
from bakery.utils.vcs.gh import fork_repository
and any relevant context from other files:
# Path: bakery/cookies/models.py
# class Cookie(models.Model):
# name = models.CharField(_('Name'), max_length=50)
# owner_name = models.CharField(_('Owner name'), max_length=50)
# url = models.URLField(_('URL'), unique=True)
# owner = models.ForeignKey(get_user_model(), verbose_name=_('User'),
# on_delete=models.CASCADE)
# description = models.TextField(_('Description'), blank=True)
# last_change = models.DateTimeField(_('Last change'), null=True)
# last_poll = models.DateTimeField(_('Last poll'), null=True)
# backend = models.CharField(_('Backend'), max_length=25)
# mapping = JSONField(default={})
#
# # Hosting Statistics
# license = models.CharField(_('License'), max_length=50, blank=True)
# repo_watchers = models.IntegerField(_("repo watchers"), default=0)
# repo_forks = models.IntegerField(_("repo forks"), default=0)
# participants = models.TextField(_("Participants"),
# help_text="List of collaborats/participants on the project", null=True)
# language = models.CharField(_('Language'), max_length=50, null=True)
# homepage = models.CharField(_('Homepage'), max_length=255, null=True)
# clone_urls = JSONField(default={})
#
# # Social aspect, such as votes etc
# votes = models.ManyToManyField(BakeryUser, through='socialize.Vote',
# related_name='votes')
#
# objects = CookieManager()
#
# class Meta:
# ordering = ['-last_change', 'name']
# unique_together = ('name', 'owner_name')
# verbose_name = _('Cookie')
# verbose_name_plural = _('Cookies')
#
# def __str__(self):
# return self.name
#
# def get_absolute_url(self):
# return reverse_lazy('cookies:detail', args=(self.owner_name, self.name))
#
# @property
# def full_name(self):
# return "{0}/{1}".format(self.owner_name, self.name)
#
# @property
# def short_description(self):
# descr = self.mapping.get('project_short_description', None)
# if descr is None:
# descr = self.description
# return _punctuation.split(descr)[0]
#
# @property
# def activity(self):
# if self.last_change >= (datetime.utcnow() - timedelta(days=365)):
# return ACTIVITY['ancient']
# elif self.last_change >= (datetime.utcnow() - timedelta(days=10)):
# return ACTIVITY['moderate']
# else:
# return ACTIVITY['active']
#
# def fork(self, user):
# """
# :raises: ``UnknownObjectException`` is raised if the repository cannot
# be located.
# """
# fork = fork_repository(user, self.repository)
# cookie_dict = get_cookie_data_from_repo(fork)
# owner_dict = cookie_dict.pop('_owner', None)
# Cookie.objects.import_from_cookie_dict(
# cookie_dict,
# owner_dict,
# self.repository
# )
#
# @property
# def repository(self):
# repository = getattr(self, '_repository', None)
# if not repository:
# repository = get_repo_from_full_name(self.full_name)
# setattr(self, '_repository', repository)
# return repository
#
# @property
# def mapping_pretty(self):
# mapping_pretty = getattr(self, '_mapping_pretty', None)
# if not mapping_pretty:
# mapping_pretty = json.dumps(self.mapping, ensure_ascii=False,
# indent=4, sort_keys=True)
# setattr(self, '_mapping_pretty', mapping_pretty)
# return mapping_pretty
#
# @property
# def clone_urls_tuple(self):
# return sorted(tuple(self.clone_urls.items()))
#
# Path: bakery/cookies/forms.py
# class ImportForm(forms.Form):
# url = forms.CharField(max_length=255)
#
# def import_cookie(self):
# url = self.cleaned_data['url']
# cookie = Cookie.objects.import_from_url(url)
# return cookie
#
# Path: bakery/socialize/models.py
# class Vote(models.Model):
# cookie = models.ForeignKey('cookies.Cookie')
# user = models.ForeignKey('auth.BakeryUser')
#
# datetime = models.DateTimeField(auto_now_add=True)
#
# objects = VoteManager()
#
# class Meta:
# unique_together = ('user', 'cookie')
#
# def __repr__(self):
# return '<Vote: cookie: {0}, user: {1}>'.format(self.cookie, self.user)
#
# Path: bakery/utils/vcs/gh.py
# def fork_repository(user, repo):
# """
# Forks the repository ``repo`` to the user ``user``.
#
# :return: Returns an instance of the newly forked ``PyGithub.Repository``.
# """
# return user.create_fork(repo)
. Output only the next line. | form_class = ImportForm |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
class CookieDetailView(DetailView):
model = Cookie
def get_object(self):
owner_name = self.kwargs['owner_name']
name = self.kwargs['name']
self.object = get_object_or_404(Cookie, owner_name=owner_name, name=name)
return self.object
def get_context_data(self, **kwargs):
context = super(CookieDetailView, self).get_context_data(**kwargs)
<|code_end|>
, determine the next line of code. You have imports:
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.http import require_POST
from django.views.generic import DetailView, FormView
from github import Github, GithubException
from bakery.cookies.models import Cookie
from bakery.cookies.forms import ImportForm
from bakery.socialize.models import Vote
from bakery.utils.vcs.gh import fork_repository
and context (class names, function names, or code) available:
# Path: bakery/cookies/models.py
# class Cookie(models.Model):
# name = models.CharField(_('Name'), max_length=50)
# owner_name = models.CharField(_('Owner name'), max_length=50)
# url = models.URLField(_('URL'), unique=True)
# owner = models.ForeignKey(get_user_model(), verbose_name=_('User'),
# on_delete=models.CASCADE)
# description = models.TextField(_('Description'), blank=True)
# last_change = models.DateTimeField(_('Last change'), null=True)
# last_poll = models.DateTimeField(_('Last poll'), null=True)
# backend = models.CharField(_('Backend'), max_length=25)
# mapping = JSONField(default={})
#
# # Hosting Statistics
# license = models.CharField(_('License'), max_length=50, blank=True)
# repo_watchers = models.IntegerField(_("repo watchers"), default=0)
# repo_forks = models.IntegerField(_("repo forks"), default=0)
# participants = models.TextField(_("Participants"),
# help_text="List of collaborats/participants on the project", null=True)
# language = models.CharField(_('Language'), max_length=50, null=True)
# homepage = models.CharField(_('Homepage'), max_length=255, null=True)
# clone_urls = JSONField(default={})
#
# # Social aspect, such as votes etc
# votes = models.ManyToManyField(BakeryUser, through='socialize.Vote',
# related_name='votes')
#
# objects = CookieManager()
#
# class Meta:
# ordering = ['-last_change', 'name']
# unique_together = ('name', 'owner_name')
# verbose_name = _('Cookie')
# verbose_name_plural = _('Cookies')
#
# def __str__(self):
# return self.name
#
# def get_absolute_url(self):
# return reverse_lazy('cookies:detail', args=(self.owner_name, self.name))
#
# @property
# def full_name(self):
# return "{0}/{1}".format(self.owner_name, self.name)
#
# @property
# def short_description(self):
# descr = self.mapping.get('project_short_description', None)
# if descr is None:
# descr = self.description
# return _punctuation.split(descr)[0]
#
# @property
# def activity(self):
# if self.last_change >= (datetime.utcnow() - timedelta(days=365)):
# return ACTIVITY['ancient']
# elif self.last_change >= (datetime.utcnow() - timedelta(days=10)):
# return ACTIVITY['moderate']
# else:
# return ACTIVITY['active']
#
# def fork(self, user):
# """
# :raises: ``UnknownObjectException`` is raised if the repository cannot
# be located.
# """
# fork = fork_repository(user, self.repository)
# cookie_dict = get_cookie_data_from_repo(fork)
# owner_dict = cookie_dict.pop('_owner', None)
# Cookie.objects.import_from_cookie_dict(
# cookie_dict,
# owner_dict,
# self.repository
# )
#
# @property
# def repository(self):
# repository = getattr(self, '_repository', None)
# if not repository:
# repository = get_repo_from_full_name(self.full_name)
# setattr(self, '_repository', repository)
# return repository
#
# @property
# def mapping_pretty(self):
# mapping_pretty = getattr(self, '_mapping_pretty', None)
# if not mapping_pretty:
# mapping_pretty = json.dumps(self.mapping, ensure_ascii=False,
# indent=4, sort_keys=True)
# setattr(self, '_mapping_pretty', mapping_pretty)
# return mapping_pretty
#
# @property
# def clone_urls_tuple(self):
# return sorted(tuple(self.clone_urls.items()))
#
# Path: bakery/cookies/forms.py
# class ImportForm(forms.Form):
# url = forms.CharField(max_length=255)
#
# def import_cookie(self):
# url = self.cleaned_data['url']
# cookie = Cookie.objects.import_from_url(url)
# return cookie
#
# Path: bakery/socialize/models.py
# class Vote(models.Model):
# cookie = models.ForeignKey('cookies.Cookie')
# user = models.ForeignKey('auth.BakeryUser')
#
# datetime = models.DateTimeField(auto_now_add=True)
#
# objects = VoteManager()
#
# class Meta:
# unique_together = ('user', 'cookie')
#
# def __repr__(self):
# return '<Vote: cookie: {0}, user: {1}>'.format(self.cookie, self.user)
#
# Path: bakery/utils/vcs/gh.py
# def fork_repository(user, repo):
# """
# Forks the repository ``repo`` to the user ``user``.
#
# :return: Returns an instance of the newly forked ``PyGithub.Repository``.
# """
# return user.create_fork(repo)
. Output only the next line. | context['has_voted'] = Vote.objects.has_voted(self.request.user.id, self.object) |
Next line prediction: <|code_start|> def test_get_repo_ssh_url(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/muffins-on-dope/bakery',
body=read(__file__, '..', '_replay_data', 'bakery-repository'),
content_type='application/json; charset=utf-8'
)
repo = get_repo_from_url('git@github.com:muffins-on-dope/bakery')
self.assertEqual(repo.full_name, 'muffins-on-dope/bakery')
@httpretty.activate
def test_get_repo_ssh_url_git(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/muffins-on-dope/bakery',
body=read(__file__, '..', '_replay_data', 'bakery-repository'),
content_type='application/json; charset=utf-8'
)
repo = get_repo_from_url('git@github.com:muffins-on-dope/bakery.git')
self.assertEqual(repo.full_name, 'muffins-on-dope/bakery')
@httpretty.activate
def test_get_repo_https_url(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/muffins-on-dope/bakery',
body=read(__file__, '..', '_replay_data', 'bakery-repository'),
content_type='application/json; charset=utf-8'
)
repo = get_repo_from_url('https://github.com/muffins-on-dope/bakery')
self.assertEqual(repo.full_name, 'muffins-on-dope/bakery')
def test_get_repo_invalid_url(self):
<|code_end|>
. Use current file imports:
(import httpretty
from django.core.exceptions import ImproperlyConfigured
from django.test.utils import override_settings
from django.utils.unittest import TestCase
from bakery.cookies.exceptions import InvalidRepositoryError
from bakery.utils.test import read
from bakery.utils.vcs.gh import _github_setup, get_repo_from_url)
and context including class names, function names, or small code snippets from other files:
# Path: bakery/cookies/exceptions.py
# class InvalidRepositoryError(ValueError):
# pass
#
# Path: bakery/utils/test.py
# def read(base, *paths):
# fp = join(dirname(base), *paths)
# with open(fp, 'r') as f:
# return f.read()
#
# Path: bakery/utils/vcs/gh.py
# def _github_setup():
# """
# Sets up the server-wide Github authentication for the project and returns
# an authorized `Github object from PyGithub
# <http://jacquev6.github.io/PyGithub/github.html>`_ which can be used to
# list users, repos, etc.
# """
#
# credentials = getattr(settings, 'GITHUB_CREDENTIALS', None)
#
# if credentials is None:
# raise ImproperlyConfigured('No GITHUB_CREDENTIALS set')
#
# # Verify that only allowed keys are passed
# allowed_keys = set(['login_or_token', 'password', 'client_id', 'client_secret'])
# given_keys = set(credentials.keys())
# forbidden_keys = given_keys - allowed_keys
# if given_keys - allowed_keys:
# raise ImproperlyConfigured('Invalid keys in GITHUB_CREDENTIALS: '
# '{0}'.format(','.join(forbidden_keys)))
#
# if 'password' in credentials and not 'login_or_token' in credentials:
# raise ImproperlyConfigured('You need to define the login_or_token to '
# 'use password authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_secret' in credentials and not 'client_id' in credentials:
# raise ImproperlyConfigured('You need to define the client_id to '
# 'use client_secret authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_id' in credentials and not 'client_secret' in credentials:
# raise ImproperlyConfigured('You need to define the client_secret to '
# 'use client_id authentiaction in GITHUB_CREDENTIALS')
#
# return Github(**credentials)
#
# def get_repo_from_url(url, gh_setup=github_setup):
# """
# Given an URL like (ssh://)git@github.com/user/repo.git or any other url
# that defines the root of a repository, this function returns the PyGithub
# resource describing that object.
#
# One can use :func:`get_cookie_data_from_repo` or
# :func:`get_mapping_file_from_repo` to get further information about that
# repository such as the content of the ``cookiecutter.json`` file.
#
# :param str url: The root URL to a github repository
# :param gh_setup: If not the server-wide authentiaction :data:`github_setup`
# should be used, this parameter can be set to another, e.g. user
# authenticated PyGithub object
# :return: Returns an instance of a ``PyGithub.Repository``.
# :raises: ``InvalidRepositoryError`` if the given URL does not match a known
# GitHub URL.
# """
# if 'git@github.com' in url:
# identifier = 'git@github.com'
# elif 'https://github.com/' in url:
# identifier = 'https://github.com'
# else:
# raise InvalidRepositoryError('{0} is not a valid GitHub URL'.format(url))
# index = url.index(identifier)
# length = len(identifier)
# start = length + index + 1 # +1 for separator after identifier
# full_name = url[start:]
# if full_name.endswith('.git'):
# full_name = full_name[:-4] # strip .git
# return get_repo_from_full_name(full_name, gh_setup)
. Output only the next line. | self.assertRaises(InvalidRepositoryError, |
Continue the code snippet: <|code_start|>class TestGithub(TestCase):
@override_settings(GITHUB_CREDENTIALS=None)
def test_github_credentials_none(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'something': 'value'})
def test_github_credentials_invalid_key(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'something': 'value', 'password': 'foo'})
def test_github_credentials_invalid_key2(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'password': 'foo'})
def test_github_credentials_missing_login(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'client_id': 'foo'})
def test_github_credentials_missing_client_secret(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'client_secret': 'foo'})
def test_github_credentials_missing_client_id(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@httpretty.activate
def test_get_repo_ssh_url(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/muffins-on-dope/bakery',
<|code_end|>
. Use current file imports:
import httpretty
from django.core.exceptions import ImproperlyConfigured
from django.test.utils import override_settings
from django.utils.unittest import TestCase
from bakery.cookies.exceptions import InvalidRepositoryError
from bakery.utils.test import read
from bakery.utils.vcs.gh import _github_setup, get_repo_from_url
and context (classes, functions, or code) from other files:
# Path: bakery/cookies/exceptions.py
# class InvalidRepositoryError(ValueError):
# pass
#
# Path: bakery/utils/test.py
# def read(base, *paths):
# fp = join(dirname(base), *paths)
# with open(fp, 'r') as f:
# return f.read()
#
# Path: bakery/utils/vcs/gh.py
# def _github_setup():
# """
# Sets up the server-wide Github authentication for the project and returns
# an authorized `Github object from PyGithub
# <http://jacquev6.github.io/PyGithub/github.html>`_ which can be used to
# list users, repos, etc.
# """
#
# credentials = getattr(settings, 'GITHUB_CREDENTIALS', None)
#
# if credentials is None:
# raise ImproperlyConfigured('No GITHUB_CREDENTIALS set')
#
# # Verify that only allowed keys are passed
# allowed_keys = set(['login_or_token', 'password', 'client_id', 'client_secret'])
# given_keys = set(credentials.keys())
# forbidden_keys = given_keys - allowed_keys
# if given_keys - allowed_keys:
# raise ImproperlyConfigured('Invalid keys in GITHUB_CREDENTIALS: '
# '{0}'.format(','.join(forbidden_keys)))
#
# if 'password' in credentials and not 'login_or_token' in credentials:
# raise ImproperlyConfigured('You need to define the login_or_token to '
# 'use password authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_secret' in credentials and not 'client_id' in credentials:
# raise ImproperlyConfigured('You need to define the client_id to '
# 'use client_secret authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_id' in credentials and not 'client_secret' in credentials:
# raise ImproperlyConfigured('You need to define the client_secret to '
# 'use client_id authentiaction in GITHUB_CREDENTIALS')
#
# return Github(**credentials)
#
# def get_repo_from_url(url, gh_setup=github_setup):
# """
# Given an URL like (ssh://)git@github.com/user/repo.git or any other url
# that defines the root of a repository, this function returns the PyGithub
# resource describing that object.
#
# One can use :func:`get_cookie_data_from_repo` or
# :func:`get_mapping_file_from_repo` to get further information about that
# repository such as the content of the ``cookiecutter.json`` file.
#
# :param str url: The root URL to a github repository
# :param gh_setup: If not the server-wide authentiaction :data:`github_setup`
# should be used, this parameter can be set to another, e.g. user
# authenticated PyGithub object
# :return: Returns an instance of a ``PyGithub.Repository``.
# :raises: ``InvalidRepositoryError`` if the given URL does not match a known
# GitHub URL.
# """
# if 'git@github.com' in url:
# identifier = 'git@github.com'
# elif 'https://github.com/' in url:
# identifier = 'https://github.com'
# else:
# raise InvalidRepositoryError('{0} is not a valid GitHub URL'.format(url))
# index = url.index(identifier)
# length = len(identifier)
# start = length + index + 1 # +1 for separator after identifier
# full_name = url[start:]
# if full_name.endswith('.git'):
# full_name = full_name[:-4] # strip .git
# return get_repo_from_full_name(full_name, gh_setup)
. Output only the next line. | body=read(__file__, '..', '_replay_data', 'bakery-repository'), |
Here is a snippet: <|code_start|># -*- coding: utf-8 -*-
class TestGithub(TestCase):
@override_settings(GITHUB_CREDENTIALS=None)
def test_github_credentials_none(self):
<|code_end|>
. Write the next line using the current file imports:
import httpretty
from django.core.exceptions import ImproperlyConfigured
from django.test.utils import override_settings
from django.utils.unittest import TestCase
from bakery.cookies.exceptions import InvalidRepositoryError
from bakery.utils.test import read
from bakery.utils.vcs.gh import _github_setup, get_repo_from_url
and context from other files:
# Path: bakery/cookies/exceptions.py
# class InvalidRepositoryError(ValueError):
# pass
#
# Path: bakery/utils/test.py
# def read(base, *paths):
# fp = join(dirname(base), *paths)
# with open(fp, 'r') as f:
# return f.read()
#
# Path: bakery/utils/vcs/gh.py
# def _github_setup():
# """
# Sets up the server-wide Github authentication for the project and returns
# an authorized `Github object from PyGithub
# <http://jacquev6.github.io/PyGithub/github.html>`_ which can be used to
# list users, repos, etc.
# """
#
# credentials = getattr(settings, 'GITHUB_CREDENTIALS', None)
#
# if credentials is None:
# raise ImproperlyConfigured('No GITHUB_CREDENTIALS set')
#
# # Verify that only allowed keys are passed
# allowed_keys = set(['login_or_token', 'password', 'client_id', 'client_secret'])
# given_keys = set(credentials.keys())
# forbidden_keys = given_keys - allowed_keys
# if given_keys - allowed_keys:
# raise ImproperlyConfigured('Invalid keys in GITHUB_CREDENTIALS: '
# '{0}'.format(','.join(forbidden_keys)))
#
# if 'password' in credentials and not 'login_or_token' in credentials:
# raise ImproperlyConfigured('You need to define the login_or_token to '
# 'use password authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_secret' in credentials and not 'client_id' in credentials:
# raise ImproperlyConfigured('You need to define the client_id to '
# 'use client_secret authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_id' in credentials and not 'client_secret' in credentials:
# raise ImproperlyConfigured('You need to define the client_secret to '
# 'use client_id authentiaction in GITHUB_CREDENTIALS')
#
# return Github(**credentials)
#
# def get_repo_from_url(url, gh_setup=github_setup):
# """
# Given an URL like (ssh://)git@github.com/user/repo.git or any other url
# that defines the root of a repository, this function returns the PyGithub
# resource describing that object.
#
# One can use :func:`get_cookie_data_from_repo` or
# :func:`get_mapping_file_from_repo` to get further information about that
# repository such as the content of the ``cookiecutter.json`` file.
#
# :param str url: The root URL to a github repository
# :param gh_setup: If not the server-wide authentiaction :data:`github_setup`
# should be used, this parameter can be set to another, e.g. user
# authenticated PyGithub object
# :return: Returns an instance of a ``PyGithub.Repository``.
# :raises: ``InvalidRepositoryError`` if the given URL does not match a known
# GitHub URL.
# """
# if 'git@github.com' in url:
# identifier = 'git@github.com'
# elif 'https://github.com/' in url:
# identifier = 'https://github.com'
# else:
# raise InvalidRepositoryError('{0} is not a valid GitHub URL'.format(url))
# index = url.index(identifier)
# length = len(identifier)
# start = length + index + 1 # +1 for separator after identifier
# full_name = url[start:]
# if full_name.endswith('.git'):
# full_name = full_name[:-4] # strip .git
# return get_repo_from_full_name(full_name, gh_setup)
, which may include functions, classes, or code. Output only the next line. | self.assertRaises(ImproperlyConfigured, _github_setup) |
Using the snippet: <|code_start|> def test_github_credentials_none(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'something': 'value'})
def test_github_credentials_invalid_key(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'something': 'value', 'password': 'foo'})
def test_github_credentials_invalid_key2(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'password': 'foo'})
def test_github_credentials_missing_login(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'client_id': 'foo'})
def test_github_credentials_missing_client_secret(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@override_settings(GITHUB_CREDENTIALS={'client_secret': 'foo'})
def test_github_credentials_missing_client_id(self):
self.assertRaises(ImproperlyConfigured, _github_setup)
@httpretty.activate
def test_get_repo_ssh_url(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/muffins-on-dope/bakery',
body=read(__file__, '..', '_replay_data', 'bakery-repository'),
content_type='application/json; charset=utf-8'
)
<|code_end|>
, determine the next line of code. You have imports:
import httpretty
from django.core.exceptions import ImproperlyConfigured
from django.test.utils import override_settings
from django.utils.unittest import TestCase
from bakery.cookies.exceptions import InvalidRepositoryError
from bakery.utils.test import read
from bakery.utils.vcs.gh import _github_setup, get_repo_from_url
and context (class names, function names, or code) available:
# Path: bakery/cookies/exceptions.py
# class InvalidRepositoryError(ValueError):
# pass
#
# Path: bakery/utils/test.py
# def read(base, *paths):
# fp = join(dirname(base), *paths)
# with open(fp, 'r') as f:
# return f.read()
#
# Path: bakery/utils/vcs/gh.py
# def _github_setup():
# """
# Sets up the server-wide Github authentication for the project and returns
# an authorized `Github object from PyGithub
# <http://jacquev6.github.io/PyGithub/github.html>`_ which can be used to
# list users, repos, etc.
# """
#
# credentials = getattr(settings, 'GITHUB_CREDENTIALS', None)
#
# if credentials is None:
# raise ImproperlyConfigured('No GITHUB_CREDENTIALS set')
#
# # Verify that only allowed keys are passed
# allowed_keys = set(['login_or_token', 'password', 'client_id', 'client_secret'])
# given_keys = set(credentials.keys())
# forbidden_keys = given_keys - allowed_keys
# if given_keys - allowed_keys:
# raise ImproperlyConfigured('Invalid keys in GITHUB_CREDENTIALS: '
# '{0}'.format(','.join(forbidden_keys)))
#
# if 'password' in credentials and not 'login_or_token' in credentials:
# raise ImproperlyConfigured('You need to define the login_or_token to '
# 'use password authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_secret' in credentials and not 'client_id' in credentials:
# raise ImproperlyConfigured('You need to define the client_id to '
# 'use client_secret authentiaction in GITHUB_CREDENTIALS')
#
# if 'client_id' in credentials and not 'client_secret' in credentials:
# raise ImproperlyConfigured('You need to define the client_secret to '
# 'use client_id authentiaction in GITHUB_CREDENTIALS')
#
# return Github(**credentials)
#
# def get_repo_from_url(url, gh_setup=github_setup):
# """
# Given an URL like (ssh://)git@github.com/user/repo.git or any other url
# that defines the root of a repository, this function returns the PyGithub
# resource describing that object.
#
# One can use :func:`get_cookie_data_from_repo` or
# :func:`get_mapping_file_from_repo` to get further information about that
# repository such as the content of the ``cookiecutter.json`` file.
#
# :param str url: The root URL to a github repository
# :param gh_setup: If not the server-wide authentiaction :data:`github_setup`
# should be used, this parameter can be set to another, e.g. user
# authenticated PyGithub object
# :return: Returns an instance of a ``PyGithub.Repository``.
# :raises: ``InvalidRepositoryError`` if the given URL does not match a known
# GitHub URL.
# """
# if 'git@github.com' in url:
# identifier = 'git@github.com'
# elif 'https://github.com/' in url:
# identifier = 'https://github.com'
# else:
# raise InvalidRepositoryError('{0} is not a valid GitHub URL'.format(url))
# index = url.index(identifier)
# length = len(identifier)
# start = length + index + 1 # +1 for separator after identifier
# full_name = url[start:]
# if full_name.endswith('.git'):
# full_name = full_name[:-4] # strip .git
# return get_repo_from_full_name(full_name, gh_setup)
. Output only the next line. | repo = get_repo_from_url('git@github.com:muffins-on-dope/bakery') |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
DUMPS_KWARGS = {
'cls': DjangoJSONEncoder,
'indent': True if settings.DEBUG else None
}
class JSONResponse(HttpResponse):
def __init__(self, data):
super(JSONResponse, self).__init__(
json.dumps(data, **DUMPS_KWARGS),
content_type='application/json'
)
def cookies_list(request, page=1):
page = int(page)
if page < 1:
page = 1
per_page = settings.BAKERY_API_COOKIES_PER_PAGE
start = (page - 1) * per_page
end = page * per_page
<|code_end|>
, determine the next line of code. You have imports:
import json
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder, Serializer
from django.db.models import Q
from django.http import HttpResponse
from django.views.decorators.http import require_POST
from django.utils.encoding import smart_str
from bakery.cookies.models import Cookie
and context (class names, function names, or code) available:
# Path: bakery/cookies/models.py
# class Cookie(models.Model):
# name = models.CharField(_('Name'), max_length=50)
# owner_name = models.CharField(_('Owner name'), max_length=50)
# url = models.URLField(_('URL'), unique=True)
# owner = models.ForeignKey(get_user_model(), verbose_name=_('User'),
# on_delete=models.CASCADE)
# description = models.TextField(_('Description'), blank=True)
# last_change = models.DateTimeField(_('Last change'), null=True)
# last_poll = models.DateTimeField(_('Last poll'), null=True)
# backend = models.CharField(_('Backend'), max_length=25)
# mapping = JSONField(default={})
#
# # Hosting Statistics
# license = models.CharField(_('License'), max_length=50, blank=True)
# repo_watchers = models.IntegerField(_("repo watchers"), default=0)
# repo_forks = models.IntegerField(_("repo forks"), default=0)
# participants = models.TextField(_("Participants"),
# help_text="List of collaborats/participants on the project", null=True)
# language = models.CharField(_('Language'), max_length=50, null=True)
# homepage = models.CharField(_('Homepage'), max_length=255, null=True)
# clone_urls = JSONField(default={})
#
# # Social aspect, such as votes etc
# votes = models.ManyToManyField(BakeryUser, through='socialize.Vote',
# related_name='votes')
#
# objects = CookieManager()
#
# class Meta:
# ordering = ['-last_change', 'name']
# unique_together = ('name', 'owner_name')
# verbose_name = _('Cookie')
# verbose_name_plural = _('Cookies')
#
# def __str__(self):
# return self.name
#
# def get_absolute_url(self):
# return reverse_lazy('cookies:detail', args=(self.owner_name, self.name))
#
# @property
# def full_name(self):
# return "{0}/{1}".format(self.owner_name, self.name)
#
# @property
# def short_description(self):
# descr = self.mapping.get('project_short_description', None)
# if descr is None:
# descr = self.description
# return _punctuation.split(descr)[0]
#
# @property
# def activity(self):
# if self.last_change >= (datetime.utcnow() - timedelta(days=365)):
# return ACTIVITY['ancient']
# elif self.last_change >= (datetime.utcnow() - timedelta(days=10)):
# return ACTIVITY['moderate']
# else:
# return ACTIVITY['active']
#
# def fork(self, user):
# """
# :raises: ``UnknownObjectException`` is raised if the repository cannot
# be located.
# """
# fork = fork_repository(user, self.repository)
# cookie_dict = get_cookie_data_from_repo(fork)
# owner_dict = cookie_dict.pop('_owner', None)
# Cookie.objects.import_from_cookie_dict(
# cookie_dict,
# owner_dict,
# self.repository
# )
#
# @property
# def repository(self):
# repository = getattr(self, '_repository', None)
# if not repository:
# repository = get_repo_from_full_name(self.full_name)
# setattr(self, '_repository', repository)
# return repository
#
# @property
# def mapping_pretty(self):
# mapping_pretty = getattr(self, '_mapping_pretty', None)
# if not mapping_pretty:
# mapping_pretty = json.dumps(self.mapping, ensure_ascii=False,
# indent=4, sort_keys=True)
# setattr(self, '_mapping_pretty', mapping_pretty)
# return mapping_pretty
#
# @property
# def clone_urls_tuple(self):
# return sorted(tuple(self.clone_urls.items()))
. Output only the next line. | cookies = list(Cookie.objects.values('name', 'url', 'description', 'last_change').all()[start:end]) |
Continue the code snippet: <|code_start|># -*- coding: utf-8 -*-
def do_vote(user, cookie):
assert user and cookie
if not Vote.objects.has_voted(user, cookie):
with transaction.commit_on_success():
vote = Vote.objects.create(cookie=cookie, user=user)
candy_type = random.choice(CANDIES)
Candy.objects.create(candy_type=candy_type[0], user=user, vote=vote)
def do_unvote(user, cookie):
assert user and cookie
Vote.objects.get_for_user_and_cookie(user, cookie).delete()
class Vote(models.Model):
cookie = models.ForeignKey('cookies.Cookie')
user = models.ForeignKey('auth.BakeryUser')
datetime = models.DateTimeField(auto_now_add=True)
<|code_end|>
. Use current file imports:
import random
from django.db import models, transaction
from django.utils.translation import ugettext_lazy as _
from bakery.socialize.managers import VoteManager
and context (classes, functions, or code) from other files:
# Path: bakery/socialize/managers.py
# class VoteManager(Manager):
#
# use_for_related_fields = True
#
# def get_query_set(self):
# return VoteQuerySet(self.model)
#
# def get_for_cookie(self, cookie):
# return self.get_query_set().get_for_cookie(cookie)
#
# def get_for_user(self, user):
# return self.get_query_set().get_for_user(user)
#
# def get_for_user_and_cookie(self, user, cookie):
# return self.get_query_set().get_for_user_and_cookie(user, cookie)
#
# def has_voted(self, user, cookie):
# return self.get_query_set().has_voted(user, cookie)
. Output only the next line. | objects = VoteManager() |
Given snippet: <|code_start|># -*- coding: utf-8 -*-
class BakeryUser(AbstractBaseUser):
username = models.CharField(_('Username'), max_length=50, unique=True)
email = models.EmailField(_('Email'), max_length=254, unique=True)
name = models.CharField(_('Name'), max_length=100, blank=True, null=True)
is_superuser = models.BooleanField(_('Superuser'), default=False)
is_staff = models.BooleanField(_('Staff'), default=False)
is_active = models.BooleanField(_('Active'), default=True)
is_organization = models.BooleanField(_('Organization'))
profile_url = models.URLField(_('Profile'), blank=True, null=True)
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.core.urlresolvers import reverse_lazy
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.auth.models import AbstractBaseUser
from bakery.auth.managers import BakeryUserManager
from bakery.socialize.models import do_vote, CANDIES
from bakery.utils.gravatar import get_gravatar
and context:
# Path: bakery/auth/managers.py
# class BakeryUserManager(UserManager):
# pass
#
# Path: bakery/socialize/models.py
# def do_vote(user, cookie):
# assert user and cookie
# if not Vote.objects.has_voted(user, cookie):
# with transaction.commit_on_success():
# vote = Vote.objects.create(cookie=cookie, user=user)
# candy_type = random.choice(CANDIES)
# Candy.objects.create(candy_type=candy_type[0], user=user, vote=vote)
#
# CANDIES = (
# ('rice-cracker', _('Rice Cracker'), '🍘'),
# ('candy', _('Candy'), '🍬'),
# ('lollipop', _('Lollipop'), '🍭'),
# ('chocolate-bar', _('Chocolate Bar'), '🍫'),
# ('doughnut', _('Doughnut'), '🍩'),
# ('cookie', _('Cookie'), '🍪'),
# )
#
# Path: bakery/utils/gravatar.py
# def get_gravatar(email, secure=False, rating='g', size=80, default='mm'):
# """Generate a link to the users' Gravatar."""
# assert rating.lower() in RATINGS
# assert MIN_SIZE <= size <= MAX_SIZE
#
# url = SECURE_BASE_URL if secure else BASE_URL
#
# options = {'s': size, 'r': rating, 'd': default}
# url += email_hash(email) + '?' + urlencode(options)
# return url
which might include code, classes, or functions. Output only the next line. | objects = BakeryUserManager() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.