Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Using the snippet: <|code_start|> 'version': 1,
'disable_existing_loggers': True,
'handlers': {
'queue': {
'class': 'logutils.queue.QueueHandler',
'queue': queue,
},
},
'loggers': {
'boto': {
'handlers': ['queue']
},
'lacli': {
'level': 'DEBUG',
'handlers': ['queue']
},
'lacore': {
'level': 'DEBUG',
'handlers': ['queue']
},
},
'root': {
'level': 'DEBUG',
},
})
def initworker(logq, progq, ctrlq, stdin=None):
"""initializer that sets up logging and progress from sub procs """
logToQueue(logq)
<|code_end|>
, determine the next line of code. You have imports:
import logging
import sys
import os
import signal
from lacli.log import getLogger
from lacli.progress import progressToQueue
from lacli.control import controlByQueue
from multiprocessing import cpu_count, pool, current_process, Process
from setproctitle import setproctitle
and context (class names, function names, or code) available:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/progress.py
# def progressToQueue(queue):
# global progress
# progress = queue
#
# Path: lacli/control.py
# def controlByQueue(queue):
# global controlq
# controlq = queue
. Output only the next line. | progressToQueue(progq) |
Based on the snippet: <|code_start|> 'disable_existing_loggers': True,
'handlers': {
'queue': {
'class': 'logutils.queue.QueueHandler',
'queue': queue,
},
},
'loggers': {
'boto': {
'handlers': ['queue']
},
'lacli': {
'level': 'DEBUG',
'handlers': ['queue']
},
'lacore': {
'level': 'DEBUG',
'handlers': ['queue']
},
},
'root': {
'level': 'DEBUG',
},
})
def initworker(logq, progq, ctrlq, stdin=None):
"""initializer that sets up logging and progress from sub procs """
logToQueue(logq)
progressToQueue(progq)
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
import sys
import os
import signal
from lacli.log import getLogger
from lacli.progress import progressToQueue
from lacli.control import controlByQueue
from multiprocessing import cpu_count, pool, current_process, Process
from setproctitle import setproctitle
and context (classes, functions, sometimes code) from other files:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/progress.py
# def progressToQueue(queue):
# global progress
# progress = queue
#
# Path: lacli/control.py
# def controlByQueue(queue):
# global controlq
# controlq = queue
. Output only the next line. | controlByQueue(ctrlq) |
Given snippet: <|code_start|>
class ControlHandler(object):
def __init__(self):
self.q = None
def __enter__(self):
self.q = Queue()
controlByQueue(self.q)
return self.q
def __exit__(self, type, value, traceback):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from lacli.log import getLogger
from lacli.exceptions import PauseEvent
from multiprocessing import Queue, active_children
from Queue import Empty
and context:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/exceptions.py
# class PauseEvent(BaseAppException):
# msg = "Paused"
which might include code, classes, or functions. Output only the next line. | getLogger().debug("control handler exiting.") |
Given the following code snippet before the placeholder: <|code_start|>
def pause(self):
if self.q is not None:
children = len(active_children())
getLogger().debug(
"ControlHandler pausing {} children".format(children))
map(self.q.put, [{'pause': True}] * children)
else:
getLogger().debug(
"ControlHandler.pause() called " +
"when no control context in effect")
controlq = None
def controlByQueue(queue):
global controlq
controlq = queue
def stopControlByQueue():
global controlq
controlq = None
def readControl():
global controlq
try:
msg = controlq.get(False)
if 'pause' in msg:
<|code_end|>
, predict the next line using imports from the current file:
from lacli.log import getLogger
from lacli.exceptions import PauseEvent
from multiprocessing import Queue, active_children
from Queue import Empty
and context including class names, function names, and sometimes code from other files:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/exceptions.py
# class PauseEvent(BaseAppException):
# msg = "Paused"
. Output only the next line. | raise PauseEvent() |
Next line prediction: <|code_start|> Usage: lacli login [<username> <password>]
"""
prompt = 'lacli:login> '
email = None
def makecmd(self, options):
cmd = ["login"]
if options['<username>']:
cmd.append(options['<username>'])
if options['<password>']:
cmd.append(options['<password>'])
return " ".join(cmd)
@property
def username(self):
return self.prefs['api']['user']
@username.setter
def username(self, newuser):
self.prefs['api']['user'] = newuser
@property
def password(self):
return self.prefs['api']['pass']
@password.setter
def password(self, newpassword):
self.prefs['api']['pass'] = newpassword
<|code_end|>
. Use current file imports:
(from lacli.cmdutil import command
from lacore.async import block
from lacli.command import LaBaseCommand
from lacli.log import getLogger
from lacli.exceptions import ApiNoSessionError
from lacore.exceptions import (ApiAuthException, ApiErrorException,
ApiUnavailableException)
from twisted.internet import defer
from re import match, IGNORECASE
from getpass import getpass)
and context including class names, function names, or small code snippets from other files:
# Path: lacli/cmdutil.py
# def command(**types):
# """ Decorator to parse command options with docopt and
# validate the types.
# """
# def decorate(func):
# @wraps(func)
# def wrap(self, line):
# kwargs = {}
# try:
# opts = docopt(func.__doc__, shlex.split(line))
# for opt, val in opts.iteritems():
# kw = opt.strip('<>')
# if val and kw in types:
# if types[kw] == unicode:
# kwargs[kw] = get_unicode(val)
# else:
# kwargs[kw] = types[kw](val)
# except ValueError as e:
# print "error: invalid value:", e
# print func.__doc__
# return
# except DocoptExit as e:
# print e
# return
# try:
# func(self, **kwargs)
# except CacheInitException as e:
# getLogger().debug("Cache not initialized", exc_info=True)
# print "Could not initialize cache"
# return
# return wrap
# return decorate
#
# Path: lacli/command.py
# class LaCertsCommand(LaBaseCommand):
# class LaCapsuleCommand(LaBaseCommand):
# class LaArchiveCommand(LaBaseCommand):
# class LaFetchCommand(LaBaseCommand):
# def makecmd(self, options):
# def do_list(self):
# def do_delete(self, cert_id=None, srm=None):
# def _countdown():
# def do_export(self, cert_id=None):
# def do_print(self, cert_id=None):
# def do_import(self, filename=None):
# def makecmd(self, options):
# def do_list(self):
# def do_archives(self, capsule=None):
# def __init__(self, *args, **kwargs):
# def setopt(self, options):
# def makecmd(self, options):
# def do_upload(self, index=1, capsule=None):
# def _poll_status_async(self, link):
# def _poll_status(self, link):
# def upload_async(self, docs, fname, progq, state):
# def upload(self, docs, fname, progq, state):
# def do_create(self, directory=None, title="my archive", description=None):
# def mycb(path, rel):
# def do_list(self):
# def do_status(self, index=1):
# def do_extract(self, path=None, dest=None, cert_id=None, cert_file=None):
# def extract(cert, archive, dest=dest):
# def _print(f):
# def complete_put(self, text, line, begidx, endidx): # pragma: no cover
# def do_reset(self, index):
# def do_delete(self, index=None, srm=None):
# def makecmd(self, options):
# def do_fetch(self, archiveid, key=None):
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/exceptions.py
# class ApiNoSessionError(BaseAppException):
# msg = "no session credentials provided."
. Output only the next line. | @command(username=str, password=str) |
Given the code snippet: <|code_start|> @command(username=str, password=str)
def do_login(self, username=None, password=None):
"""
Usage: login [<username>] [<password>]
"""
save = (self.username, self.password)
if not username:
if self.batch:
username = self.username
else:
username = self.input("Username/email: ")
if not password:
if self.batch:
password = self.password
else:
password = getpass("Password: ")
try:
self.login_batch(username, password)
print "authentication succesfull as", self.email
if not self.batch:
if self.username != save[0] or self.password != save[1]:
if match('y(es)?$', self.input("Save credentials? "),
IGNORECASE):
self.registry.save_session(
self.username, self.password)
except Exception:
<|code_end|>
, generate the next line using the imports in this file:
from lacli.cmdutil import command
from lacore.async import block
from lacli.command import LaBaseCommand
from lacli.log import getLogger
from lacli.exceptions import ApiNoSessionError
from lacore.exceptions import (ApiAuthException, ApiErrorException,
ApiUnavailableException)
from twisted.internet import defer
from re import match, IGNORECASE
from getpass import getpass
and context (functions, classes, or occasionally code) from other files:
# Path: lacli/cmdutil.py
# def command(**types):
# """ Decorator to parse command options with docopt and
# validate the types.
# """
# def decorate(func):
# @wraps(func)
# def wrap(self, line):
# kwargs = {}
# try:
# opts = docopt(func.__doc__, shlex.split(line))
# for opt, val in opts.iteritems():
# kw = opt.strip('<>')
# if val and kw in types:
# if types[kw] == unicode:
# kwargs[kw] = get_unicode(val)
# else:
# kwargs[kw] = types[kw](val)
# except ValueError as e:
# print "error: invalid value:", e
# print func.__doc__
# return
# except DocoptExit as e:
# print e
# return
# try:
# func(self, **kwargs)
# except CacheInitException as e:
# getLogger().debug("Cache not initialized", exc_info=True)
# print "Could not initialize cache"
# return
# return wrap
# return decorate
#
# Path: lacli/command.py
# class LaCertsCommand(LaBaseCommand):
# class LaCapsuleCommand(LaBaseCommand):
# class LaArchiveCommand(LaBaseCommand):
# class LaFetchCommand(LaBaseCommand):
# def makecmd(self, options):
# def do_list(self):
# def do_delete(self, cert_id=None, srm=None):
# def _countdown():
# def do_export(self, cert_id=None):
# def do_print(self, cert_id=None):
# def do_import(self, filename=None):
# def makecmd(self, options):
# def do_list(self):
# def do_archives(self, capsule=None):
# def __init__(self, *args, **kwargs):
# def setopt(self, options):
# def makecmd(self, options):
# def do_upload(self, index=1, capsule=None):
# def _poll_status_async(self, link):
# def _poll_status(self, link):
# def upload_async(self, docs, fname, progq, state):
# def upload(self, docs, fname, progq, state):
# def do_create(self, directory=None, title="my archive", description=None):
# def mycb(path, rel):
# def do_list(self):
# def do_status(self, index=1):
# def do_extract(self, path=None, dest=None, cert_id=None, cert_file=None):
# def extract(cert, archive, dest=dest):
# def _print(f):
# def complete_put(self, text, line, begidx, endidx): # pragma: no cover
# def do_reset(self, index):
# def do_delete(self, index=None, srm=None):
# def makecmd(self, options):
# def do_fetch(self, archiveid, key=None):
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/exceptions.py
# class ApiNoSessionError(BaseAppException):
# msg = "no session credentials provided."
. Output only the next line. | getLogger().debug("auth failure", exc_info=True) |
Using the snippet: <|code_start|> try:
self.login_batch(username, password)
print "authentication succesfull as", self.email
if not self.batch:
if self.username != save[0] or self.password != save[1]:
if match('y(es)?$', self.input("Save credentials? "),
IGNORECASE):
self.registry.save_session(
self.username, self.password)
except Exception:
getLogger().debug("auth failure", exc_info=True)
print "authentication failed"
def login_batch(self, username, password):
block(self.login_async)(username, password)
@defer.inlineCallbacks
def login_async(self, username, password):
self.username = username
self.password = password
session = self.registry.new_session()
try:
account = yield session.async_account
self.email = account['email']
self.session = session
getLogger().debug("logged in {}".format(self.email))
except ApiErrorException:
raise
except ApiUnavailableException:
raise
<|code_end|>
, determine the next line of code. You have imports:
from lacli.cmdutil import command
from lacore.async import block
from lacli.command import LaBaseCommand
from lacli.log import getLogger
from lacli.exceptions import ApiNoSessionError
from lacore.exceptions import (ApiAuthException, ApiErrorException,
ApiUnavailableException)
from twisted.internet import defer
from re import match, IGNORECASE
from getpass import getpass
and context (class names, function names, or code) available:
# Path: lacli/cmdutil.py
# def command(**types):
# """ Decorator to parse command options with docopt and
# validate the types.
# """
# def decorate(func):
# @wraps(func)
# def wrap(self, line):
# kwargs = {}
# try:
# opts = docopt(func.__doc__, shlex.split(line))
# for opt, val in opts.iteritems():
# kw = opt.strip('<>')
# if val and kw in types:
# if types[kw] == unicode:
# kwargs[kw] = get_unicode(val)
# else:
# kwargs[kw] = types[kw](val)
# except ValueError as e:
# print "error: invalid value:", e
# print func.__doc__
# return
# except DocoptExit as e:
# print e
# return
# try:
# func(self, **kwargs)
# except CacheInitException as e:
# getLogger().debug("Cache not initialized", exc_info=True)
# print "Could not initialize cache"
# return
# return wrap
# return decorate
#
# Path: lacli/command.py
# class LaCertsCommand(LaBaseCommand):
# class LaCapsuleCommand(LaBaseCommand):
# class LaArchiveCommand(LaBaseCommand):
# class LaFetchCommand(LaBaseCommand):
# def makecmd(self, options):
# def do_list(self):
# def do_delete(self, cert_id=None, srm=None):
# def _countdown():
# def do_export(self, cert_id=None):
# def do_print(self, cert_id=None):
# def do_import(self, filename=None):
# def makecmd(self, options):
# def do_list(self):
# def do_archives(self, capsule=None):
# def __init__(self, *args, **kwargs):
# def setopt(self, options):
# def makecmd(self, options):
# def do_upload(self, index=1, capsule=None):
# def _poll_status_async(self, link):
# def _poll_status(self, link):
# def upload_async(self, docs, fname, progq, state):
# def upload(self, docs, fname, progq, state):
# def do_create(self, directory=None, title="my archive", description=None):
# def mycb(path, rel):
# def do_list(self):
# def do_status(self, index=1):
# def do_extract(self, path=None, dest=None, cert_id=None, cert_file=None):
# def extract(cert, archive, dest=dest):
# def _print(f):
# def complete_put(self, text, line, begidx, endidx): # pragma: no cover
# def do_reset(self, index):
# def do_delete(self, index=None, srm=None):
# def makecmd(self, options):
# def do_fetch(self, archiveid, key=None):
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/exceptions.py
# class ApiNoSessionError(BaseAppException):
# msg = "no session credentials provided."
. Output only the next line. | except ApiNoSessionError: |
Predict the next line for this snippet: <|code_start|>
class login_async(object):
def __init__(self, f, obj=None):
self.f = f
self.obj = obj
update_wrapper(self, self.f)
def __get__(self, obj, cls):
return wraps(self.f)(login(self.f, obj))
def dologin(self, prefs):
return self.obj.registry.cmd.login.login_async(
prefs.get('user'), prefs.get('pass'))
@defer.inlineCallbacks
def loginfirst(self, prefs, *args, **kwargs):
try:
yield self.dologin(prefs)
r = yield self.f(self.obj, *args, **kwargs)
defer.returnValue(r)
except Exception:
<|code_end|>
with the help of current file imports:
from functools import update_wrapper, wraps
from twisted.internet import defer
from lacli.log import getLogger
and context from other files:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
, which may contain function names, class names, or code. Output only the next line. | getLogger().debug("unhandled error in login decorator", |
Based on the snippet: <|code_start|>
def command(**types):
""" Decorator to parse command options with docopt and
validate the types.
"""
def decorate(func):
@wraps(func)
def wrap(self, line):
kwargs = {}
try:
opts = docopt(func.__doc__, shlex.split(line))
for opt, val in opts.iteritems():
kw = opt.strip('<>')
if val and kw in types:
if types[kw] == unicode:
kwargs[kw] = get_unicode(val)
else:
kwargs[kw] = types[kw](val)
except ValueError as e:
print "error: invalid value:", e
print func.__doc__
return
except DocoptExit as e:
print e
return
try:
func(self, **kwargs)
except CacheInitException as e:
<|code_end|>
, predict the immediate next line with the help of imports:
import shlex
from lacli.log import getLogger
from lacore.enc import get_unicode
from lacli.exceptions import CacheInitException
from functools import wraps
from docopt import docopt, DocoptExit
and context (classes, functions, sometimes code) from other files:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/exceptions.py
# class CacheInitException(BaseAppException):
# msg = "Application cache not initialized correctly"
. Output only the next line. | getLogger().debug("Cache not initialized", exc_info=True) |
Using the snippet: <|code_start|>
def command(**types):
""" Decorator to parse command options with docopt and
validate the types.
"""
def decorate(func):
@wraps(func)
def wrap(self, line):
kwargs = {}
try:
opts = docopt(func.__doc__, shlex.split(line))
for opt, val in opts.iteritems():
kw = opt.strip('<>')
if val and kw in types:
if types[kw] == unicode:
kwargs[kw] = get_unicode(val)
else:
kwargs[kw] = types[kw](val)
except ValueError as e:
print "error: invalid value:", e
print func.__doc__
return
except DocoptExit as e:
print e
return
try:
func(self, **kwargs)
<|code_end|>
, determine the next line of code. You have imports:
import shlex
from lacli.log import getLogger
from lacore.enc import get_unicode
from lacli.exceptions import CacheInitException
from functools import wraps
from docopt import docopt, DocoptExit
and context (class names, function names, or code) available:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/exceptions.py
# class CacheInitException(BaseAppException):
# msg = "Application cache not initialized correctly"
. Output only the next line. | except CacheInitException as e: |
Continue the code snippet: <|code_start|>
def init_prefs(self):
prefs = self.prefs['api']
if prefs.get('url') is None:
prefs['url'] = API_URL
if not prefs['user']:
prefs['user'] = self._saved_session[0]
prefs['pass'] = self._saved_session[1]
return prefs
def new_session(self, prefs=None):
if not prefs:
prefs = self.init_prefs()
return self.prefs['api']['factory'](prefs)
def netarsee(self):
ours = os.path.join(self.cache.home, ".netrc")
users = os.path.expanduser('~/.netrc')
if not os.path.exists(ours) and os.path.exists(users):
return users
return ours
@cached_property
def _saved_session(self):
hostname = urlparse(self.prefs['api']['url']).hostname
try:
for host, creds in netrc(self.netarsee()).hosts.iteritems():
if host == hostname:
return (creds[0], creds[2])
except:
<|code_end|>
. Use current file imports:
import os
from urlparse import urlparse
from netrc import netrc
from lacli.log import getLogger
from lacore.decorators import cached_property
and context (classes, functions, or code) from other files:
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
. Output only the next line. | getLogger().debug("Couldn't read from netrc", exc_info=True) |
Predict the next line after this snippet: <|code_start|> while True:
key = rs.next(self.conn.timeout())
getLogger().debug("got key {} with etag: {}".format(
key.name, key.etag))
etags.append(key.etag)
except StopIteration:
pass
except WorkerFailureError:
getLogger().debug("error getting result.", exc_info=True)
if not etags:
raise UploadEmptyError()
if hasattr(self.upload, 'complete_upload'):
try:
key = self.conn.complete_multipart(self.upload, etags)
except Exception as e:
getLogger().debug("error completing multipart", exc_info=True)
raise CloudProviderUploadError(e)
name = key.key_name
else:
name = key.name
uploaded = len(etags)
total = self.source.chunks
getLogger().debug("Uploaded {} out of {} chunks".format(
uploaded, total))
size = self.source.size
if uploaded < total:
for seq in xrange(uploaded, total):
<|code_end|>
using the current file's imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and any relevant context from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | make_progress({'part': seq, 'tx': 0}) |
Continue the code snippet: <|code_start|> pass
except WorkerFailureError:
getLogger().debug("error getting result.", exc_info=True)
if not etags:
raise UploadEmptyError()
if hasattr(self.upload, 'complete_upload'):
try:
key = self.conn.complete_multipart(self.upload, etags)
except Exception as e:
getLogger().debug("error completing multipart", exc_info=True)
raise CloudProviderUploadError(e)
name = key.key_name
else:
name = key.name
uploaded = len(etags)
total = self.source.chunks
getLogger().debug("Uploaded {} out of {} chunks".format(
uploaded, total))
size = self.source.size
if uploaded < total:
for seq in xrange(uploaded, total):
make_progress({'part': seq, 'tx': 0})
skip = self.source.chunkstart(uploaded)
newsource = ChunkedFile(
self.source.path, skip=skip, chunk=self.source.chunk)
size = size - newsource.size
getLogger().debug("saving progress for {}".format(key))
<|code_end|>
. Use current file imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and context (classes, functions, or code) from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | save_progress(name, size) |
Based on the snippet: <|code_start|> self.key = key
self.upload_id = None
self.complete = None
self.tempdir = None
self.step = step
def __str__(self):
return "<MPUpload key={} id={} source={}>".format(
self.key, self.upload_id, self.source)
def iterargs(self, chunks):
partinfo = repeat(None)
if not self.source.isfile:
partinfo = self.source._savedchunks(self.tempdir)
for seq, info in izip(xrange(chunks), partinfo):
yield {'uploader': self, 'seq': seq, 'fname': info}
def _getupload(self):
if self.source.chunks > 1 and self.source.isfile:
if self.upload_id is None:
return self.conn.newupload(self.key)
else:
return self.conn.getupload(self.upload_id)
else:
return self.conn.newkey(self.key)
def __enter__(self):
try:
self.upload = self._getupload()
except Exception as e:
<|code_end|>
, predict the immediate next line with the help of imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and context (classes, functions, sometimes code) from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | getLogger().debug("error getting upload", exc_info=True) |
Predict the next line after this snippet: <|code_start|>
if not etags:
raise UploadEmptyError()
if hasattr(self.upload, 'complete_upload'):
try:
key = self.conn.complete_multipart(self.upload, etags)
except Exception as e:
getLogger().debug("error completing multipart", exc_info=True)
raise CloudProviderUploadError(e)
name = key.key_name
else:
name = key.name
uploaded = len(etags)
total = self.source.chunks
getLogger().debug("Uploaded {} out of {} chunks".format(
uploaded, total))
size = self.source.size
if uploaded < total:
for seq in xrange(uploaded, total):
make_progress({'part': seq, 'tx': 0})
skip = self.source.chunkstart(uploaded)
newsource = ChunkedFile(
self.source.path, skip=skip, chunk=self.source.chunk)
size = size - newsource.size
getLogger().debug("saving progress for {}".format(key))
save_progress(name, size)
return (key.etag, newsource)
<|code_end|>
using the current file's imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and any relevant context from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | @with_low_priority |
Predict the next line after this snippet: <|code_start|> getLogger().debug("got key {} with etag: {}".format(
key.name, key.etag))
etags.append(key.etag)
except StopIteration:
pass
except WorkerFailureError:
getLogger().debug("error getting result.", exc_info=True)
if not etags:
raise UploadEmptyError()
if hasattr(self.upload, 'complete_upload'):
try:
key = self.conn.complete_multipart(self.upload, etags)
except Exception as e:
getLogger().debug("error completing multipart", exc_info=True)
raise CloudProviderUploadError(e)
name = key.key_name
else:
name = key.name
uploaded = len(etags)
total = self.source.chunks
getLogger().debug("Uploaded {} out of {} chunks".format(
uploaded, total))
size = self.source.size
if uploaded < total:
for seq in xrange(uploaded, total):
make_progress({'part': seq, 'tx': 0})
skip = self.source.chunkstart(uploaded)
<|code_end|>
using the current file's imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and any relevant context from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | newsource = ChunkedFile( |
Given the code snippet: <|code_start|> if hasattr(self.upload, 'cancel_upload'):
self.upload.cancel_upload()
if self.tempdir is not None:
os.rmdir(self.tempdir)
return type is None
def submit_job(self, pool):
getLogger().debug("total of %d upload jobs for workers..",
self.source.chunks)
chunks = self.source.chunks
step = self.step
if step is None:
step = len(active_children()) or 5
if chunks > step:
chunks = step
return pool.imap(upload_part, self.iterargs(chunks))
def get_result(self, rs):
etags = []
key = name = None
newsource = None
if rs is not None:
try:
while True:
key = rs.next(self.conn.timeout())
getLogger().debug("got key {} with etag: {}".format(
key.name, key.etag))
etags.append(key.etag)
except StopIteration:
pass
<|code_end|>
, generate the next line using the imports in this file:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and context (functions, classes, or occasionally code) from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | except WorkerFailureError: |
Using the snippet: <|code_start|> with self.source.chunkfile(seq, **kwargs) as part:
try:
def cb(tx, total):
readControl()
make_progress({'part': seq,
'tx': tx,
'total': total})
if hasattr(self.upload, 'upload_part_from_file'):
key = self.upload.upload_part_from_file(
fp=part,
part_num=seq+1,
cb=cb,
num_cb=100,
size=self.source.chunksize(seq),
# although not necessary, boto does it,
# good to know how:
md5=part.hash
)
getLogger().debug("uploaded multi part key %s/%d",
key, seq+1)
else:
self.upload.set_contents_from_file(
fp=part,
cb=cb,
num_cb=100,
md5=part.hash
)
key = self.upload
getLogger().debug("uploaded single part key %s", key)
<|code_end|>
, determine the next line of code. You have imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and context (class names, function names, or code) available:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | except PauseEvent: |
Predict the next line for this snippet: <|code_start|> self.upload_id = None
self.complete = None
self.tempdir = None
self.step = step
def __str__(self):
return "<MPUpload key={} id={} source={}>".format(
self.key, self.upload_id, self.source)
def iterargs(self, chunks):
partinfo = repeat(None)
if not self.source.isfile:
partinfo = self.source._savedchunks(self.tempdir)
for seq, info in izip(xrange(chunks), partinfo):
yield {'uploader': self, 'seq': seq, 'fname': info}
def _getupload(self):
if self.source.chunks > 1 and self.source.isfile:
if self.upload_id is None:
return self.conn.newupload(self.key)
else:
return self.conn.getupload(self.upload_id)
else:
return self.conn.newkey(self.key)
def __enter__(self):
try:
self.upload = self._getupload()
except Exception as e:
getLogger().debug("error getting upload", exc_info=True)
<|code_end|>
with the help of current file imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and context from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
, which may contain function names, class names, or code. Output only the next line. | raise CloudProviderUploadError(e) |
Predict the next line after this snippet: <|code_start|> name = key.name
uploaded = len(etags)
total = self.source.chunks
getLogger().debug("Uploaded {} out of {} chunks".format(
uploaded, total))
size = self.source.size
if uploaded < total:
for seq in xrange(uploaded, total):
make_progress({'part': seq, 'tx': 0})
skip = self.source.chunkstart(uploaded)
newsource = ChunkedFile(
self.source.path, skip=skip, chunk=self.source.chunk)
size = size - newsource.size
getLogger().debug("saving progress for {}".format(key))
save_progress(name, size)
return (key.etag, newsource)
@with_low_priority
def do_part(self, seq, **kwargs):
""" transfer a part. runs in a separate process. """
key = None
for attempt in range(self.retries):
getLogger().debug("attempt %d/%d to transfer part %d",
attempt, self.retries, seq)
with self.source.chunkfile(seq, **kwargs) as part:
try:
def cb(tx, total):
<|code_end|>
using the current file's imports:
import os
from lacli.progress import make_progress, save_progress
from itertools import repeat, izip
from lacli.log import getLogger
from lacli.nice import with_low_priority
from lacli.source.chunked import ChunkedFile
from lacore.exceptions import UploadEmptyError
from lacli.exceptions import (WorkerFailureError, PauseEvent,
CloudProviderUploadError)
from lacli.control import readControl
from tempfile import mkdtemp
from multiprocessing import active_children
and any relevant context from other files:
# Path: lacli/progress.py
# def make_progress(msg):
# global progress
# progress.put(msg)
#
# def save_progress(key, size):
# global progress
# progress.put({'save': True, 'key': key, 'size': size})
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
#
# Path: lacli/nice.py
# def with_low_priority(f):
# @wraps(f)
# def wrapper(*args, **kwargs):
#
# p = psutil.Process()
# op = p.nice()
# try:
# p.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
# except:
# p.nice(20)
# r = f(*args, **kwargs)
# try:
# p.nice(op) # This will fail in POSIX systems
# except psutil.AccessDenied:
# pass
# return r
# return wrapper
#
# Path: lacli/source/chunked.py
# class ChunkedFile(BaseChunkedFile):
# maxchunk = 20971520
#
# Path: lacli/exceptions.py
# class WorkerFailureError(BaseAppException):
# def __init__(self, *args, **kwargs):
# super(BaseAppException, self).__init__(self.msg, *args, **kwargs)
# self.msg = "worker '{}' failed".format(current_process())
#
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# class CloudProviderUploadError(BaseAppException):
# msg = "cloud provider indicated an error while uploading"
#
# Path: lacli/control.py
# def readControl():
# global controlq
# try:
# msg = controlq.get(False)
# if 'pause' in msg:
# raise PauseEvent()
# except Empty:
# pass
. Output only the next line. | readControl() |
Using the snippet: <|code_start|> raise InvalidOperation(ErrorType.NotImplemented, str(e))
except OSError as e:
getLogger().debug("{} threw exception".format(f), exc_info=True)
if e.errno == errno.ENOENT:
raise InvalidOperation(ErrorType.FileNotFound,
"File not found",
filename=e.filename)
if e.errno == errno.EACCES:
raise InvalidOperation(
ErrorType.Other, "Access denied or file in use")
getLogger().debug("unknown exception", exc_info=True)
raise InvalidOperation(ErrorType.Other, "Unknown error")
except IOError as e:
getLogger().debug("{} threw exception".format(f), exc_info=True)
if e.errno == errno.ENOENT:
raise InvalidOperation(ErrorType.FileNotFound,
"File not found",
filename=e.filename)
if e.errno == errno.ENOSPC:
raise InvalidOperation(ErrorType.Other,
"No space left on device")
getLogger().debug("unknown exception", exc_info=True)
raise InvalidOperation(ErrorType.Other, "")
except ValueError as e:
getLogger().debug("{} threw exception".format(f), exc_info=True)
raise InvalidOperation(ErrorType.Validation, str(e))
except BaseAppException as e:
getLogger().debug("application exception", exc_info=True)
twisted_log.err(e)
raise InvalidOperation(ErrorType.Other, str(e))
<|code_end|>
, determine the next line of code. You have imports:
from lacore.exceptions import ApiAuthException, BaseAppException
from lacli.exceptions import PauseEvent
from lacli.server.interface.ClientInterface.ttypes import InvalidOperation
from lacli.server.interface.ClientInterface.ttypes import ErrorType
from lacli.log import getLogger
from twisted.python import log as twisted_log
from twisted.internet import defer
from functools import wraps
import errno
and context (class names, function names, or code) available:
# Path: lacli/exceptions.py
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class InvalidOperation(TException):
# """
# Attributes:
# - what
# - why
# - filename
# """
#
# thrift_spec = (
# None, # 0
# (1, TType.I32, 'what', None, None, ), # 1
# (2, TType.STRING, 'why', None, None, ), # 2
# (3, TType.STRING, 'filename', None, None, ), # 3
# )
#
# def __init__(self, what=None, why=None, filename=None,):
# self.what = what
# self.why = why
# self.filename = filename
#
# def read(self, iprot):
# if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
# fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
# return
# iprot.readStructBegin()
# while True:
# (fname, ftype, fid) = iprot.readFieldBegin()
# if ftype == TType.STOP:
# break
# if fid == 1:
# if ftype == TType.I32:
# self.what = iprot.readI32();
# else:
# iprot.skip(ftype)
# elif fid == 2:
# if ftype == TType.STRING:
# self.why = iprot.readString();
# else:
# iprot.skip(ftype)
# elif fid == 3:
# if ftype == TType.STRING:
# self.filename = iprot.readString();
# else:
# iprot.skip(ftype)
# else:
# iprot.skip(ftype)
# iprot.readFieldEnd()
# iprot.readStructEnd()
#
# def write(self, oprot):
# if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
# oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
# return
# oprot.writeStructBegin('InvalidOperation')
# if self.what is not None:
# oprot.writeFieldBegin('what', TType.I32, 1)
# oprot.writeI32(self.what)
# oprot.writeFieldEnd()
# if self.why is not None:
# oprot.writeFieldBegin('why', TType.STRING, 2)
# oprot.writeString(self.why)
# oprot.writeFieldEnd()
# if self.filename is not None:
# oprot.writeFieldBegin('filename', TType.STRING, 3)
# oprot.writeString(self.filename)
# oprot.writeFieldEnd()
# oprot.writeFieldStop()
# oprot.writeStructEnd()
#
# def validate(self):
# return
#
#
# def __str__(self):
# return repr(self)
#
# def __repr__(self):
# L = ['%s=%r' % (key, value)
# for key, value in self.__dict__.iteritems()]
# return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# def __ne__(self, other):
# return not (self == other)
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class ErrorType(object):
# NoError = 0
# Server = 1
# Network = 2
# Authentication = 3
# Validation = 4
# Other = 5
# NotImplemented = 6
# FileNotFound = 7
#
# _VALUES_TO_NAMES = {
# 0: "NoError",
# 1: "Server",
# 2: "Network",
# 3: "Authentication",
# 4: "Validation",
# 5: "Other",
# 6: "NotImplemented",
# 7: "FileNotFound",
# }
#
# _NAMES_TO_VALUES = {
# "NoError": 0,
# "Server": 1,
# "Network": 2,
# "Authentication": 3,
# "Validation": 4,
# "Other": 5,
# "NotImplemented": 6,
# "FileNotFound": 7,
# }
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
. Output only the next line. | except PauseEvent: |
Predict the next line after this snippet: <|code_start|> try:
if int(args[0].debug) > 4:
hide = {} # don't hide anything
except:
pass
hidden = lambda k: k in hide and hide[k] is True
maybe = lambda k, v: "<...>" if hidden(k) else v
_as = maybe('_args', args[1:])
_ks = {(k, maybe(k, v)) for k, v in kwargs.iteritems()}
getLogger().debug("calling {} with {}, {}".format(f, _as, _ks))
r = yield f(*args, **kwargs)
getLogger().debug(
"return value for {} is {}".format(f, maybe('_ret', r)))
defer.returnValue(r)
return w
def tthrow(f):
""" Decorate a method to raise InvalidOperation instead
of lacli exceptions
"""
@wraps(f)
@defer.inlineCallbacks
def w(*args, **kwargs):
r = None
try:
r = yield f(*args, **kwargs)
defer.returnValue(r)
except ApiAuthException as e:
twisted_log.err(e)
<|code_end|>
using the current file's imports:
from lacore.exceptions import ApiAuthException, BaseAppException
from lacli.exceptions import PauseEvent
from lacli.server.interface.ClientInterface.ttypes import InvalidOperation
from lacli.server.interface.ClientInterface.ttypes import ErrorType
from lacli.log import getLogger
from twisted.python import log as twisted_log
from twisted.internet import defer
from functools import wraps
import errno
and any relevant context from other files:
# Path: lacli/exceptions.py
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class InvalidOperation(TException):
# """
# Attributes:
# - what
# - why
# - filename
# """
#
# thrift_spec = (
# None, # 0
# (1, TType.I32, 'what', None, None, ), # 1
# (2, TType.STRING, 'why', None, None, ), # 2
# (3, TType.STRING, 'filename', None, None, ), # 3
# )
#
# def __init__(self, what=None, why=None, filename=None,):
# self.what = what
# self.why = why
# self.filename = filename
#
# def read(self, iprot):
# if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
# fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
# return
# iprot.readStructBegin()
# while True:
# (fname, ftype, fid) = iprot.readFieldBegin()
# if ftype == TType.STOP:
# break
# if fid == 1:
# if ftype == TType.I32:
# self.what = iprot.readI32();
# else:
# iprot.skip(ftype)
# elif fid == 2:
# if ftype == TType.STRING:
# self.why = iprot.readString();
# else:
# iprot.skip(ftype)
# elif fid == 3:
# if ftype == TType.STRING:
# self.filename = iprot.readString();
# else:
# iprot.skip(ftype)
# else:
# iprot.skip(ftype)
# iprot.readFieldEnd()
# iprot.readStructEnd()
#
# def write(self, oprot):
# if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
# oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
# return
# oprot.writeStructBegin('InvalidOperation')
# if self.what is not None:
# oprot.writeFieldBegin('what', TType.I32, 1)
# oprot.writeI32(self.what)
# oprot.writeFieldEnd()
# if self.why is not None:
# oprot.writeFieldBegin('why', TType.STRING, 2)
# oprot.writeString(self.why)
# oprot.writeFieldEnd()
# if self.filename is not None:
# oprot.writeFieldBegin('filename', TType.STRING, 3)
# oprot.writeString(self.filename)
# oprot.writeFieldEnd()
# oprot.writeFieldStop()
# oprot.writeStructEnd()
#
# def validate(self):
# return
#
#
# def __str__(self):
# return repr(self)
#
# def __repr__(self):
# L = ['%s=%r' % (key, value)
# for key, value in self.__dict__.iteritems()]
# return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# def __ne__(self, other):
# return not (self == other)
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class ErrorType(object):
# NoError = 0
# Server = 1
# Network = 2
# Authentication = 3
# Validation = 4
# Other = 5
# NotImplemented = 6
# FileNotFound = 7
#
# _VALUES_TO_NAMES = {
# 0: "NoError",
# 1: "Server",
# 2: "Network",
# 3: "Authentication",
# 4: "Validation",
# 5: "Other",
# 6: "NotImplemented",
# 7: "FileNotFound",
# }
#
# _NAMES_TO_VALUES = {
# "NoError": 0,
# "Server": 1,
# "Network": 2,
# "Authentication": 3,
# "Validation": 4,
# "Other": 5,
# "NotImplemented": 6,
# "FileNotFound": 7,
# }
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
. Output only the next line. | raise InvalidOperation(ErrorType.Authentication, e.msg) |
Using the snippet: <|code_start|> try:
if int(args[0].debug) > 4:
hide = {} # don't hide anything
except:
pass
hidden = lambda k: k in hide and hide[k] is True
maybe = lambda k, v: "<...>" if hidden(k) else v
_as = maybe('_args', args[1:])
_ks = {(k, maybe(k, v)) for k, v in kwargs.iteritems()}
getLogger().debug("calling {} with {}, {}".format(f, _as, _ks))
r = yield f(*args, **kwargs)
getLogger().debug(
"return value for {} is {}".format(f, maybe('_ret', r)))
defer.returnValue(r)
return w
def tthrow(f):
""" Decorate a method to raise InvalidOperation instead
of lacli exceptions
"""
@wraps(f)
@defer.inlineCallbacks
def w(*args, **kwargs):
r = None
try:
r = yield f(*args, **kwargs)
defer.returnValue(r)
except ApiAuthException as e:
twisted_log.err(e)
<|code_end|>
, determine the next line of code. You have imports:
from lacore.exceptions import ApiAuthException, BaseAppException
from lacli.exceptions import PauseEvent
from lacli.server.interface.ClientInterface.ttypes import InvalidOperation
from lacli.server.interface.ClientInterface.ttypes import ErrorType
from lacli.log import getLogger
from twisted.python import log as twisted_log
from twisted.internet import defer
from functools import wraps
import errno
and context (class names, function names, or code) available:
# Path: lacli/exceptions.py
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class InvalidOperation(TException):
# """
# Attributes:
# - what
# - why
# - filename
# """
#
# thrift_spec = (
# None, # 0
# (1, TType.I32, 'what', None, None, ), # 1
# (2, TType.STRING, 'why', None, None, ), # 2
# (3, TType.STRING, 'filename', None, None, ), # 3
# )
#
# def __init__(self, what=None, why=None, filename=None,):
# self.what = what
# self.why = why
# self.filename = filename
#
# def read(self, iprot):
# if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
# fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
# return
# iprot.readStructBegin()
# while True:
# (fname, ftype, fid) = iprot.readFieldBegin()
# if ftype == TType.STOP:
# break
# if fid == 1:
# if ftype == TType.I32:
# self.what = iprot.readI32();
# else:
# iprot.skip(ftype)
# elif fid == 2:
# if ftype == TType.STRING:
# self.why = iprot.readString();
# else:
# iprot.skip(ftype)
# elif fid == 3:
# if ftype == TType.STRING:
# self.filename = iprot.readString();
# else:
# iprot.skip(ftype)
# else:
# iprot.skip(ftype)
# iprot.readFieldEnd()
# iprot.readStructEnd()
#
# def write(self, oprot):
# if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
# oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
# return
# oprot.writeStructBegin('InvalidOperation')
# if self.what is not None:
# oprot.writeFieldBegin('what', TType.I32, 1)
# oprot.writeI32(self.what)
# oprot.writeFieldEnd()
# if self.why is not None:
# oprot.writeFieldBegin('why', TType.STRING, 2)
# oprot.writeString(self.why)
# oprot.writeFieldEnd()
# if self.filename is not None:
# oprot.writeFieldBegin('filename', TType.STRING, 3)
# oprot.writeString(self.filename)
# oprot.writeFieldEnd()
# oprot.writeFieldStop()
# oprot.writeStructEnd()
#
# def validate(self):
# return
#
#
# def __str__(self):
# return repr(self)
#
# def __repr__(self):
# L = ['%s=%r' % (key, value)
# for key, value in self.__dict__.iteritems()]
# return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# def __ne__(self, other):
# return not (self == other)
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class ErrorType(object):
# NoError = 0
# Server = 1
# Network = 2
# Authentication = 3
# Validation = 4
# Other = 5
# NotImplemented = 6
# FileNotFound = 7
#
# _VALUES_TO_NAMES = {
# 0: "NoError",
# 1: "Server",
# 2: "Network",
# 3: "Authentication",
# 4: "Validation",
# 5: "Other",
# 6: "NotImplemented",
# 7: "FileNotFound",
# }
#
# _NAMES_TO_VALUES = {
# "NoError": 0,
# "Server": 1,
# "Network": 2,
# "Authentication": 3,
# "Validation": 4,
# "Other": 5,
# "NotImplemented": 6,
# "FileNotFound": 7,
# }
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
. Output only the next line. | raise InvalidOperation(ErrorType.Authentication, e.msg) |
Next line prediction: <|code_start|>
def log_hide(**kwargs):
def decorator(f):
setattr(f, '__hidden', kwargs)
return f
return decorator
def log_call(f):
@wraps(f)
@defer.inlineCallbacks
def w(*args, **kwargs):
hide = getattr(f, '__hidden', {})
if len(args) > 0 and hasattr(args[0], "debug"):
try:
if int(args[0].debug) > 4:
hide = {} # don't hide anything
except:
pass
hidden = lambda k: k in hide and hide[k] is True
maybe = lambda k, v: "<...>" if hidden(k) else v
_as = maybe('_args', args[1:])
_ks = {(k, maybe(k, v)) for k, v in kwargs.iteritems()}
<|code_end|>
. Use current file imports:
(from lacore.exceptions import ApiAuthException, BaseAppException
from lacli.exceptions import PauseEvent
from lacli.server.interface.ClientInterface.ttypes import InvalidOperation
from lacli.server.interface.ClientInterface.ttypes import ErrorType
from lacli.log import getLogger
from twisted.python import log as twisted_log
from twisted.internet import defer
from functools import wraps
import errno)
and context including class names, function names, or small code snippets from other files:
# Path: lacli/exceptions.py
# class PauseEvent(BaseAppException):
# msg = "Paused"
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class InvalidOperation(TException):
# """
# Attributes:
# - what
# - why
# - filename
# """
#
# thrift_spec = (
# None, # 0
# (1, TType.I32, 'what', None, None, ), # 1
# (2, TType.STRING, 'why', None, None, ), # 2
# (3, TType.STRING, 'filename', None, None, ), # 3
# )
#
# def __init__(self, what=None, why=None, filename=None,):
# self.what = what
# self.why = why
# self.filename = filename
#
# def read(self, iprot):
# if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
# fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
# return
# iprot.readStructBegin()
# while True:
# (fname, ftype, fid) = iprot.readFieldBegin()
# if ftype == TType.STOP:
# break
# if fid == 1:
# if ftype == TType.I32:
# self.what = iprot.readI32();
# else:
# iprot.skip(ftype)
# elif fid == 2:
# if ftype == TType.STRING:
# self.why = iprot.readString();
# else:
# iprot.skip(ftype)
# elif fid == 3:
# if ftype == TType.STRING:
# self.filename = iprot.readString();
# else:
# iprot.skip(ftype)
# else:
# iprot.skip(ftype)
# iprot.readFieldEnd()
# iprot.readStructEnd()
#
# def write(self, oprot):
# if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
# oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
# return
# oprot.writeStructBegin('InvalidOperation')
# if self.what is not None:
# oprot.writeFieldBegin('what', TType.I32, 1)
# oprot.writeI32(self.what)
# oprot.writeFieldEnd()
# if self.why is not None:
# oprot.writeFieldBegin('why', TType.STRING, 2)
# oprot.writeString(self.why)
# oprot.writeFieldEnd()
# if self.filename is not None:
# oprot.writeFieldBegin('filename', TType.STRING, 3)
# oprot.writeString(self.filename)
# oprot.writeFieldEnd()
# oprot.writeFieldStop()
# oprot.writeStructEnd()
#
# def validate(self):
# return
#
#
# def __str__(self):
# return repr(self)
#
# def __repr__(self):
# L = ['%s=%r' % (key, value)
# for key, value in self.__dict__.iteritems()]
# return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# def __ne__(self, other):
# return not (self == other)
#
# Path: lacli/server/interface/ClientInterface/ttypes.py
# class ErrorType(object):
# NoError = 0
# Server = 1
# Network = 2
# Authentication = 3
# Validation = 4
# Other = 5
# NotImplemented = 6
# FileNotFound = 7
#
# _VALUES_TO_NAMES = {
# 0: "NoError",
# 1: "Server",
# 2: "Network",
# 3: "Authentication",
# 4: "Validation",
# 5: "Other",
# 6: "NotImplemented",
# 7: "FileNotFound",
# }
#
# _NAMES_TO_VALUES = {
# "NoError": 0,
# "Server": 1,
# "Network": 2,
# "Authentication": 3,
# "Validation": 4,
# "Other": 5,
# "NotImplemented": 6,
# "FileNotFound": 7,
# }
#
# Path: lacli/log.py
# def getLogger(logger='lacli'):
# return lacore.log.getLogger(logger)
. Output only the next line. | getLogger().debug("calling {} with {}, {}".format(f, _as, _ks)) |
Continue the code snippet: <|code_start|> os.rename(new_scripts_dir + script, scripts_dir_0)
if self.overwrite == True:
os.system('cp -r "' + new_scripts_dir + '"* "' + scripts_dir_0 + '"')
shutil.rmtree(new_scripts_dir)
os.remove(archive_path)
new_scripts = sorted(new_scripts)
n_new_scripts = len(new_scripts)
if self.lib == 'goglib':
goglib_data = goglib_get_data.games_info(data_dir)
goglib_names = goglib_data[1]
goglib_titles = goglib_data[2]
name_to_title = {}
for i in range(len(goglib_names)):
name_to_title[goglib_names[i]] = goglib_titles[i]
new_own_scripts = []
for game in new_scripts:
if game in goglib_names:
new_own_scripts.append(game)
elif self.lib == 'mylib':
<|code_end|>
. Use current file imports:
import os, sys, subprocess, shutil
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk
from urllib2 import Request as urllib_request
from urllib2 import urlopen as urllib_urlopen
from urllib2 import URLError as urllib_urlerror
from urllib2 import HTTPError as urllib_httperror
from urllib.request import Request as urllib_request
from urllib.request import urlopen as urllib_urlopen
from urllib.request import URLError as urllib_urlerror
from urllib.request import HTTPError as urllib_httperror
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import mylib_get_data, goglib_get_data, get_banner
and context (classes, functions, or code) from other files:
# Path: modules/mylib_get_data.py
# def games_info(data_dir):
# def get_info(games_list, mylib_dir):
#
# Path: modules/goglib_get_data.py
# def games_info(data_dir):
#
# Path: modules/get_banner.py
# def get_banner(game_name, url, banner_path, lib):
#
# banner_req = urllib_request(url)
#
# try:
#
# if not os.path.exists(banner_path):
# os.makedirs(banner_path)
#
# banner_data = urllib_urlopen(banner_req).read()
# banner_file = open(banner_path + '/' + game_name + '.jpg', 'wb')
# banner_file.write(banner_data)
# banner_file.close()
#
# pic_src = Image.open(banner_path + '/' + game_name + '.jpg')
# pic = pic_src.resize((518, 240), PIL.Image.ANTIALIAS)
# pic.save(banner_path + '/' + game_name + '.jpg')
#
# if lib == 'goglib':
#
# if not os.path.exists(banner_path + '/unavailable/'):
# os.makedirs(banner_path + '/unavailable/')
#
# new_pic = Image.open(banner_path + '/' + game_name + '.jpg')
# pic_grey = new_pic.convert('L')
# pic_grey.save(banner_path + '/unavailable/' + game_name + '.jpg')
#
# except urllib_urlerror as e:
# print(e.reason)
# except urllib_httperror as e:
# print(e.code)
# print(e.read())
. Output only the next line. | mylib_data = mylib_get_data.games_info(data_dir) |
Predict the next line after this snippet: <|code_start|> if self.lib == 'mylib':
new_scripts_dir = tmp + '/games_nebula_' + self.lib + '_scripts-master/free/'
else:
new_scripts_dir = tmp + '/games_nebula_' + self.lib + '_scripts-master/'
subprocess.call(['7z', 'x', '-aoa', '-o' + tmp, archive_path])
os.remove(new_scripts_dir + 'LICENSE')
git_sctipts = os.listdir(new_scripts_dir)
existing_scripts_0 = os.listdir(scripts_dir_0)
existing_scripts_1 = os.listdir(scripts_dir_1)
existing_scripts = existing_scripts_0 + existing_scripts_1
new_scripts = []
for script in git_sctipts:
if script not in existing_scripts:
new_scripts.append(script)
os.rename(new_scripts_dir + script, scripts_dir_0)
if self.overwrite == True:
os.system('cp -r "' + new_scripts_dir + '"* "' + scripts_dir_0 + '"')
shutil.rmtree(new_scripts_dir)
os.remove(archive_path)
new_scripts = sorted(new_scripts)
n_new_scripts = len(new_scripts)
if self.lib == 'goglib':
<|code_end|>
using the current file's imports:
import os, sys, subprocess, shutil
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk
from urllib2 import Request as urllib_request
from urllib2 import urlopen as urllib_urlopen
from urllib2 import URLError as urllib_urlerror
from urllib2 import HTTPError as urllib_httperror
from urllib.request import Request as urllib_request
from urllib.request import urlopen as urllib_urlopen
from urllib.request import URLError as urllib_urlerror
from urllib.request import HTTPError as urllib_httperror
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import mylib_get_data, goglib_get_data, get_banner
and any relevant context from other files:
# Path: modules/mylib_get_data.py
# def games_info(data_dir):
# def get_info(games_list, mylib_dir):
#
# Path: modules/goglib_get_data.py
# def games_info(data_dir):
#
# Path: modules/get_banner.py
# def get_banner(game_name, url, banner_path, lib):
#
# banner_req = urllib_request(url)
#
# try:
#
# if not os.path.exists(banner_path):
# os.makedirs(banner_path)
#
# banner_data = urllib_urlopen(banner_req).read()
# banner_file = open(banner_path + '/' + game_name + '.jpg', 'wb')
# banner_file.write(banner_data)
# banner_file.close()
#
# pic_src = Image.open(banner_path + '/' + game_name + '.jpg')
# pic = pic_src.resize((518, 240), PIL.Image.ANTIALIAS)
# pic.save(banner_path + '/' + game_name + '.jpg')
#
# if lib == 'goglib':
#
# if not os.path.exists(banner_path + '/unavailable/'):
# os.makedirs(banner_path + '/unavailable/')
#
# new_pic = Image.open(banner_path + '/' + game_name + '.jpg')
# pic_grey = new_pic.convert('L')
# pic_grey.save(banner_path + '/unavailable/' + game_name + '.jpg')
#
# except urllib_urlerror as e:
# print(e.reason)
# except urllib_httperror as e:
# print(e.code)
# print(e.read())
. Output only the next line. | goglib_data = goglib_get_data.games_info(data_dir) |
Using the snippet: <|code_start|> goglib_data = goglib_get_data.games_info(data_dir)
goglib_names = goglib_data[1]
goglib_titles = goglib_data[2]
name_to_title = {}
for i in range(len(goglib_names)):
name_to_title[goglib_names[i]] = goglib_titles[i]
new_own_scripts = []
for game in new_scripts:
if game in goglib_names:
new_own_scripts.append(game)
elif self.lib == 'mylib':
mylib_data = mylib_get_data.games_info(data_dir)
mylib_names = mylib_data[1]
mylib_titles = mylib_data[2]
name_to_title = {}
for i in range(len(mylib_names)):
name_to_title[mylib_names[i]] = mylib_titles[i]
for game_name in new_scripts:
image_url = images_url + game_name + '.jpg'
banners_dir = data_dir + '/images/mylib/'
<|code_end|>
, determine the next line of code. You have imports:
import os, sys, subprocess, shutil
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk
from urllib2 import Request as urllib_request
from urllib2 import urlopen as urllib_urlopen
from urllib2 import URLError as urllib_urlerror
from urllib2 import HTTPError as urllib_httperror
from urllib.request import Request as urllib_request
from urllib.request import urlopen as urllib_urlopen
from urllib.request import URLError as urllib_urlerror
from urllib.request import HTTPError as urllib_httperror
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import mylib_get_data, goglib_get_data, get_banner
and context (class names, function names, or code) available:
# Path: modules/mylib_get_data.py
# def games_info(data_dir):
# def get_info(games_list, mylib_dir):
#
# Path: modules/goglib_get_data.py
# def games_info(data_dir):
#
# Path: modules/get_banner.py
# def get_banner(game_name, url, banner_path, lib):
#
# banner_req = urllib_request(url)
#
# try:
#
# if not os.path.exists(banner_path):
# os.makedirs(banner_path)
#
# banner_data = urllib_urlopen(banner_req).read()
# banner_file = open(banner_path + '/' + game_name + '.jpg', 'wb')
# banner_file.write(banner_data)
# banner_file.close()
#
# pic_src = Image.open(banner_path + '/' + game_name + '.jpg')
# pic = pic_src.resize((518, 240), PIL.Image.ANTIALIAS)
# pic.save(banner_path + '/' + game_name + '.jpg')
#
# if lib == 'goglib':
#
# if not os.path.exists(banner_path + '/unavailable/'):
# os.makedirs(banner_path + '/unavailable/')
#
# new_pic = Image.open(banner_path + '/' + game_name + '.jpg')
# pic_grey = new_pic.convert('L')
# pic_grey.save(banner_path + '/unavailable/' + game_name + '.jpg')
#
# except urllib_urlerror as e:
# print(e.reason)
# except urllib_httperror as e:
# print(e.code)
# print(e.read())
. Output only the next line. | get_banner.get_banner(game_name, image_url, banners_dir, self.lib) |
Continue the code snippet: <|code_start|> token = Token.from_file(token_path)
if token.expired():
token.refresh()
token.save(token_path)
api = GogApi(token)
prod = api.product(game_id)
prod.update_galaxy(expand=True)
banner_url = 'https:' + ''.join(prod.image_logo.split('_glx_logo'))
banner_req = urllib_request(banner_url)
banner_data = urllib_urlopen(banner_req).read()
banner_file = open(banner_path, 'wb')
banner_file.write(banner_data)
banner_file.close()
pic_src = Image.open(banner_path)
scale_lvl = banner_height/float(pic_src.size[1])
scaled_width = int(float(pic_src.size[0])*scale_lvl)
pic = pic_src.resize((scaled_width, banner_height), PIL.Image.ANTIALIAS)
pic.save(banner_path)
pic = pic.convert('L')
pic.save(unavailable_path)
except urllib_urlerror as e:
print(e.reason)
except urllib_httperror as e:
print(e.code)
print(e.read())
except:
<|code_end|>
. Use current file imports:
import os
import PIL
from PIL import Image
from gogapi import GogApi, Token
from modules import goglib_recreate_banner
from urllib2 import Request as urllib_request
from urllib2 import urlopen as urllib_urlopen
from urllib2 import URLError as urllib_urlerror
from urllib2 import HTTPError as urllib_httperror
from urllib.request import Request as urllib_request
from urllib.request import urlopen as urllib_urlopen
from urllib.request import URLError as urllib_urlerror
from urllib.request import HTTPError as urllib_httperror
and context (classes, functions, or code) from other files:
# Path: modules/goglib_recreate_banner.py
# def goglib_recreate_banner(game_name, banner_path):
#
# nebula_dir = os.getenv('NEBULA_DIR')
# banner = Image.new('1', (518, 240), 0)
# draw = ImageDraw.Draw(banner)
# font = ImageFont.truetype(nebula_dir + '/fonts/DejaVuSans.ttf', 24)
# game_name_width = font.getsize(game_name)[0]
# game_name_height = font.getsize(game_name)[1]
#
# if game_name_width > 518:
#
# game_name_list = []
# for i in range(0, len(game_name), 28):
# game_name_list.append(game_name[i:i+28])
#
# all_lines_height = len(game_name_list) * game_name_height
# full_title_y = 120 - all_lines_height/2
#
# n = 0
# for line in game_name_list:
# line_width = font.getsize(line)[0]
# x = 259 - line_width/2
# y = full_title_y + (game_name_height * n)
# draw.text((x, y), line, fill=1, font=font)
# n += 1
#
# else:
#
# x = 259 - game_name_width/2
# y = 120 - game_name_height/2
# draw.text((x, y), game_name, fill=1, font=font)
#
# banner.save(banner_path, 'JPEG')
. Output only the next line. | goglib_recreate_banner.goglib_recreate_banner(game_name, banner_path) |
Given snippet: <|code_start|>try:
except:
nebula_dir = sys.path[0]
def autosetup(lib, install_dir, game_name):
parser = ConfigParser()
if lib == 'goglib':
banners_dir = os.getenv('HOME') + '/.games_nebula/images/goglib/'
path_0 = os.getenv('HOME') + '/.games_nebula/scripts/goglib/autosetup.ini'
path_1 = nebula_dir + '/scripts/goglib/autosetup.ini'
elif lib == 'mylib':
banners_dir = os.getenv('HOME') + '/.games_nebula/images/mylib/'
path_0 = os.getenv('HOME') + '/.games_nebula/scripts/mylib/autosetup.ini'
path_1 = nebula_dir + '/scripts/mylib/autosetup.ini'
if os.path.exists(path_0):
parser.read(path_0)
else:
parser.read(path_1)
if (os.path.exists(install_dir + '/' + game_name)) and (parser.has_section(game_name)):
if parser.has_option(game_name, 'image'):
image = parser.get(game_name, 'image')
if image != '':
print("Downloading image")
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os, sys
import sys
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import get_banner
and context:
# Path: modules/get_banner.py
# def get_banner(game_name, url, banner_path, lib):
#
# banner_req = urllib_request(url)
#
# try:
#
# if not os.path.exists(banner_path):
# os.makedirs(banner_path)
#
# banner_data = urllib_urlopen(banner_req).read()
# banner_file = open(banner_path + '/' + game_name + '.jpg', 'wb')
# banner_file.write(banner_data)
# banner_file.close()
#
# pic_src = Image.open(banner_path + '/' + game_name + '.jpg')
# pic = pic_src.resize((518, 240), PIL.Image.ANTIALIAS)
# pic.save(banner_path + '/' + game_name + '.jpg')
#
# if lib == 'goglib':
#
# if not os.path.exists(banner_path + '/unavailable/'):
# os.makedirs(banner_path + '/unavailable/')
#
# new_pic = Image.open(banner_path + '/' + game_name + '.jpg')
# pic_grey = new_pic.convert('L')
# pic_grey.save(banner_path + '/unavailable/' + game_name + '.jpg')
#
# except urllib_urlerror as e:
# print(e.reason)
# except urllib_httperror as e:
# print(e.code)
# print(e.read())
which might include code, classes, or functions. Output only the next line. | get_banner.get_banner(game_name, image, banners_dir, lib) |
Continue the code snippet: <|code_start|>
if not os.path.exists(data_dir + '/images'):
os.makedirs(data_dir + '/images')
if not os.path.exists(data_dir + '/images/mylib'):
os.makedirs(data_dir + '/images/mylib')
if not os.path.exists(data_dir + '/images/goglib'):
os.makedirs(data_dir + '/images/goglib')
if not os.path.exists(data_dir + '/images/goglib/unavailable'):
os.makedirs(data_dir + '/images/goglib/unavailable')
if lib == 'goglib':
color = args[0]
if color == 'normal':
path_0 = data_dir + '/images/goglib/' + game_name + '.jpg'
path_1 = nebula_dir + '/images/goglib/' + game_name + '.jpg'
elif color == 'gray':
path_0 = data_dir + '/images/goglib/unavailable/' + game_name + '.jpg'
path_1 = nebula_dir + '/images/goglib/unavailable/' + game_name + '.jpg'
elif lib == 'mylib':
path_0 = data_dir + '/images/mylib/' + game_name + '.jpg'
path_1 = nebula_dir + '/images/mylib/' + game_name + '.jpg'
if os.path.exists(path_0):
return path_0
elif os.path.exists(path_1):
return path_1
else:
if lib == 'mylib':
# FIX Hack to prevent crash in launchers (trying to create banner for game that not in lib)
if len(args) == 0:
<|code_end|>
. Use current file imports:
import os, sys
from modules import mylib_create_banner, goglib_get_banner
and context (classes, functions, or code) from other files:
# Path: modules/mylib_create_banner.py
# def mylib_create_banner(game_name):
#
# nebula_dir = sys.path[0]
# data_dir = os.getenv('HOME') + '/.games_nebula'
#
# banners_dir = data_dir + '/images/mylib/'
# banner_path = banners_dir + game_name + '.jpg'
#
# if not os.path.exists(banners_dir):
# os.makedirs(banners_dir)
#
# script_path_0 = data_dir + '/scripts/mylib/' + game_name + '/setup'
# script_path_1 = nebula_dir + '/scripts/mylib/' + game_name + '/setup'
#
# if os.path.exists(script_path_0):
# mylib_setup_script_path = script_path_0
# else:
# mylib_setup_script_path = script_path_1
#
# script_file = open(mylib_setup_script_path, 'r')
# file_content = script_file.readlines()
# raw_game_title = file_content[-1].replace('#', '').replace('\n', '')
#
# if sys.version_info[0] == 2:
# game_title = raw_game_title.decode('utf-8')
# elif sys.version_info[0] == 3:
# game_title = raw_game_title
#
# script_file.close()
#
# banner = Image.new('1', (518, 240), 0)
# draw = ImageDraw.Draw(banner)
# font = ImageFont.truetype(nebula_dir + '/fonts/DejaVuSans.ttf', 24)
#
# game_title_width = font.getsize(game_title)[0]
# game_title_height = font.getsize(game_title)[1]
#
# if game_title_width > 518:
#
# game_title_list = []
# for i in range(0, len(game_title), 28):
# game_title_list.append(game_title[i:i+28])
#
# all_lines_height = len(game_title_list) * game_title_height
# full_title_y = 120 - all_lines_height/2
#
# n = 0
# for line in game_title_list:
# line_width = font.getsize(line)[0]
# x = 259 - line_width/2
# y = full_title_y + (game_title_height * n)
# draw.text((x, y), line, fill=1, font=font)
# n += 1
#
# else:
#
# x = 259 - game_title_width/2
# y = 120 - game_title_height/2
# draw.text((x, y), game_title, fill=1, font=font)
#
# banner.save(banner_path, 'JPEG')
#
# Path: modules/goglib_get_banner.py
# def goglib_get_banner(banner_path, unavailable_path, game_id, *args):
#
# banner_height = 240
# game_name = os.path.basename(banner_path).split('.jpg')[0]
# print("Getting picture for: '" + game_name + "'")
#
# try:
# token_path = os.getenv('HOME') + '/.config/lgogdownloader/galaxy_tokens.json'
# token = Token.from_file(token_path)
# if token.expired():
# token.refresh()
# token.save(token_path)
# api = GogApi(token)
#
# prod = api.product(game_id)
# prod.update_galaxy(expand=True)
# banner_url = 'https:' + ''.join(prod.image_logo.split('_glx_logo'))
#
# banner_req = urllib_request(banner_url)
# banner_data = urllib_urlopen(banner_req).read()
# banner_file = open(banner_path, 'wb')
# banner_file.write(banner_data)
# banner_file.close()
#
# pic_src = Image.open(banner_path)
# scale_lvl = banner_height/float(pic_src.size[1])
# scaled_width = int(float(pic_src.size[0])*scale_lvl)
# pic = pic_src.resize((scaled_width, banner_height), PIL.Image.ANTIALIAS)
# pic.save(banner_path)
# pic = pic.convert('L')
# pic.save(unavailable_path)
#
# except urllib_urlerror as e:
# print(e.reason)
# except urllib_httperror as e:
# print(e.code)
# print(e.read())
# except:
# goglib_recreate_banner.goglib_recreate_banner(game_name, banner_path)
. Output only the next line. | mylib_create_banner.mylib_create_banner(game_name) |
Next line prediction: <|code_start|> config_parser.set('Settings', 'scummvm_path', str(self.scummvm_path))
config_parser.set('Settings', 'scummvm_version', str(self.scummvm_version))
config_parser.set('Settings', 'monitor', str(self.monitor))
config_parser.set('Settings', 'launcher', str(self.launcher))
config_parser.set('Settings', 'show_banner', str(self.show_banner))
config_parser.set('Settings', 'command_before', str(self.command_before))
config_parser.set('Settings', 'command_after', str(self.command_after))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check')
mylib_image_path = paths.get_image_path('mylib', self.game_name, 'check')
if os.path.exists(goglib_image_path) and \
os.path.exists(goglib_install_dir + '/' + self.game_name):
self.banner_path = goglib_image_path
else:
self.banner_path = mylib_image_path
def quit_app(self, window, event):
self.config_save()
Gtk.main_quit()
def create_main_window(self):
self.get_banner()
<|code_end|>
. Use current file imports:
(import sys, os, subprocess, re, shutil
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths)
and context including class names, function names, or small code snippets from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
. Output only the next line. | self.monitors_list, self.monitor_primary = monitors.get_monitors() |
Given the code snippet: <|code_start|> self.command_after = ''
config_parser.set('Settings', 'command_after', str(self.command_after))
else:
self.command_after = config_parser.get('Settings', 'command_after')
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def config_save(self):
config_file = self.install_dir + '/' + self.game_name + '/config.ini'
config_parser = ConfigParser()
config_parser.read(config_file)
config_parser.set('Settings', 'scummvm', str(self.scummvm))
config_parser.set('Settings', 'scummvm_path', str(self.scummvm_path))
config_parser.set('Settings', 'scummvm_version', str(self.scummvm_version))
config_parser.set('Settings', 'monitor', str(self.monitor))
config_parser.set('Settings', 'launcher', str(self.launcher))
config_parser.set('Settings', 'show_banner', str(self.show_banner))
config_parser.set('Settings', 'command_before', str(self.command_before))
config_parser.set('Settings', 'command_after', str(self.command_after))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
<|code_end|>
, generate the next line using the imports in this file:
import sys, os, subprocess, re, shutil
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths
and context (functions, classes, or occasionally code) from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
. Output only the next line. | goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check') |
Predict the next line for this snippet: <|code_start|> for line in in_file:
if 'gamename: ' in line:
game_name = line.split('gamename: ',1)[1].replace('\n', '')
list_names.append(game_name)
if 'product id: ' in line:
game_id = line.split('product id: ',1)[1].replace('\n', '')
name2id[game_name] = game_id
if 'title: ' in line:
list_titles.append(line.split('title: ',1)[1].replace('\n', ''))
if 'icon: ' in line:
list_icons.append(line.split('icon: ',1)[1].replace('\n', ''))
in_file.close()
number_of_games = len(list_names)
if not os.path.exists(data_dir + '/images'):
os.makedirs(data_dir + '/images')
if not os.path.exists(data_dir + '/images/goglib'):
os.makedirs(data_dir + '/images/goglib')
if not os.path.exists(data_dir + '/images/goglib/unavailable'):
os.makedirs(data_dir + '/images/goglib/unavailable')
for i in range(0, number_of_games):
banner_path_0 = data_dir + '/images/goglib/' + list_names[i] + '.jpg'
banner_path_1 = nebula_dir + '/images/goglib/' + list_names[i] + '.jpg'
unavailable_path = data_dir + '/images/goglib/unavailable/' + list_names[i] + '.jpg'
game_id = name2id.get(list_names[i], None)
if (not os.path.exists(banner_path_0)) and (not os.path.exists(banner_path_1)):
<|code_end|>
with the help of current file imports:
import os, sys
from modules import goglib_get_banner
and context from other files:
# Path: modules/goglib_get_banner.py
# def goglib_get_banner(banner_path, unavailable_path, game_id, *args):
#
# banner_height = 240
# game_name = os.path.basename(banner_path).split('.jpg')[0]
# print("Getting picture for: '" + game_name + "'")
#
# try:
# token_path = os.getenv('HOME') + '/.config/lgogdownloader/galaxy_tokens.json'
# token = Token.from_file(token_path)
# if token.expired():
# token.refresh()
# token.save(token_path)
# api = GogApi(token)
#
# prod = api.product(game_id)
# prod.update_galaxy(expand=True)
# banner_url = 'https:' + ''.join(prod.image_logo.split('_glx_logo'))
#
# banner_req = urllib_request(banner_url)
# banner_data = urllib_urlopen(banner_req).read()
# banner_file = open(banner_path, 'wb')
# banner_file.write(banner_data)
# banner_file.close()
#
# pic_src = Image.open(banner_path)
# scale_lvl = banner_height/float(pic_src.size[1])
# scaled_width = int(float(pic_src.size[0])*scale_lvl)
# pic = pic_src.resize((scaled_width, banner_height), PIL.Image.ANTIALIAS)
# pic.save(banner_path)
# pic = pic.convert('L')
# pic.save(unavailable_path)
#
# except urllib_urlerror as e:
# print(e.reason)
# except urllib_httperror as e:
# print(e.code)
# print(e.read())
# except:
# goglib_recreate_banner.goglib_recreate_banner(game_name, banner_path)
, which may contain function names, class names, or code. Output only the next line. | goglib_get_banner.goglib_get_banner(banner_path_0, unavailable_path, game_id) |
Here is a snippet: <|code_start|> config_parser.set('Settings', 'custom_exe_0_name', str(self.custom_exe_0_name))
config_parser.set('Settings', 'custom_exe_1_path', str(self.custom_exe_1_path))
config_parser.set('Settings', 'custom_exe_1_name', str(self.custom_exe_1_name))
config_parser.set('Settings', 'custom_exe_2_path', str(self.custom_exe_2_path))
config_parser.set('Settings', 'custom_exe_2_name', str(self.custom_exe_2_name))
config_parser.set('Settings', 'custom_exe_3_path', str(self.custom_exe_3_path))
config_parser.set('Settings', 'custom_exe_3_name', str(self.custom_exe_3_name))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check')
mylib_image_path = paths.get_image_path('mylib', self.game_name, 'check')
if os.path.exists(goglib_image_path) and \
os.path.exists(goglib_install_dir + '/' + self.game_name):
self.banner_path = goglib_image_path
else:
self.banner_path = mylib_image_path
def quit_app(self, window, event):
self.config_save()
Gtk.main_quit()
def create_main_window(self):
self.get_banner()
<|code_end|>
. Write the next line using the current file imports:
import sys, os, subprocess, re, logging, shutil
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths
and context from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
, which may include functions, classes, or code. Output only the next line. | self.monitors_list, self.monitor_primary = monitors.get_monitors() |
Based on the snippet: <|code_start|> config_parser.set('Settings', 'wine', str(self.wine))
config_parser.set('Settings', 'wine_path', str(self.wine_path))
config_parser.set('Settings', 'wine_version', str(self.wine_version))
config_parser.set('Settings', 'monitor', str(self.monitor))
config_parser.set('Settings', 'launcher', str(self.launcher))
config_parser.set('Settings', 'show_banner', str(self.show_banner))
config_parser.set('Settings', 'win_ver', str(self.win_ver))
config_parser.set('Settings', 'virtual_desktop', str(self.virtual_desktop))
config_parser.set('Settings', 'virtual_desktop_width', str(self.virtual_desktop_width))
config_parser.set('Settings', 'virtual_desktop_height', str(self.virtual_desktop_height))
config_parser.set('Settings', 'mouse_capture', str(self.mouse_capture))
config_parser.set('Settings', 'own_prefix', str(self.own_prefix))
config_parser.set('Settings', 'command_before', str(self.command_before))
config_parser.set('Settings', 'command_after', str(self.command_after))
config_parser.set('Settings', 'winearch', str(self.winearch))
config_parser.set('Settings', 'custom_exe_0_path', str(self.custom_exe_0_path))
config_parser.set('Settings', 'custom_exe_0_name', str(self.custom_exe_0_name))
config_parser.set('Settings', 'custom_exe_1_path', str(self.custom_exe_1_path))
config_parser.set('Settings', 'custom_exe_1_name', str(self.custom_exe_1_name))
config_parser.set('Settings', 'custom_exe_2_path', str(self.custom_exe_2_path))
config_parser.set('Settings', 'custom_exe_2_name', str(self.custom_exe_2_name))
config_parser.set('Settings', 'custom_exe_3_path', str(self.custom_exe_3_path))
config_parser.set('Settings', 'custom_exe_3_name', str(self.custom_exe_3_name))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
<|code_end|>
, predict the immediate next line with the help of imports:
import sys, os, subprocess, re, logging, shutil
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths
and context (classes, functions, sometimes code) from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
. Output only the next line. | goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check') |
Next line prediction: <|code_start|> config_parser.set('Settings', 'dosbox_version', str(self.dosbox_version))
config_parser.set('Settings', 'own_dosbox_mapperfile', str(self.own_dosbox_mapperfile))
config_parser.set('Settings', 'monitor', str(self.monitor))
config_parser.set('Settings', 'launcher', str(self.launcher))
config_parser.set('Settings', 'show_banner', str(self.show_banner))
config_parser.set('Settings', 'command_before', str(self.command_before))
config_parser.set('Settings', 'command_after', str(self.command_after))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check')
mylib_image_path = paths.get_image_path('mylib', self.game_name, 'check')
if os.path.exists(goglib_image_path) and \
os.path.exists(goglib_install_dir + '/' + self.game_name):
self.banner_path = goglib_image_path
else:
self.banner_path = mylib_image_path
def quit_app(self, window, event):
self.config_save()
Gtk.main_quit()
def create_main_window(self):
self.get_banner()
<|code_end|>
. Use current file imports:
(import sys, os, subprocess, re
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths)
and context including class names, function names, or small code snippets from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
. Output only the next line. | self.monitors_list, self.monitor_primary = monitors.get_monitors() |
Based on the snippet: <|code_start|> config_parser.set('Settings', 'command_after', str(self.command_after))
else:
self.command_after = config_parser.get('Settings', 'command_after')
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def config_save(self):
config_file = self.install_dir + '/' + self.game_name + '/config.ini'
config_parser = ConfigParser()
config_parser.read(config_file)
config_parser.set('Settings', 'dosbox', str(self.dosbox))
config_parser.set('Settings', 'dosbox_path', str(self.dosbox_path))
config_parser.set('Settings', 'dosbox_version', str(self.dosbox_version))
config_parser.set('Settings', 'own_dosbox_mapperfile', str(self.own_dosbox_mapperfile))
config_parser.set('Settings', 'monitor', str(self.monitor))
config_parser.set('Settings', 'launcher', str(self.launcher))
config_parser.set('Settings', 'show_banner', str(self.show_banner))
config_parser.set('Settings', 'command_before', str(self.command_before))
config_parser.set('Settings', 'command_after', str(self.command_after))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
<|code_end|>
, predict the immediate next line with the help of imports:
import sys, os, subprocess, re
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths
and context (classes, functions, sometimes code) from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
. Output only the next line. | goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check') |
Given the code snippet: <|code_start|>
config_parser.set('Settings', 'launcher_type', str(self.launcher_type))
config_parser.set('Settings', 'monitor', str(self.monitor))
config_parser.set('Settings', 'launcher', str(self.launcher))
config_parser.set('Settings', 'show_banner', str(self.show_banner))
config_parser.set('Settings', 'command_before', str(self.command_before))
config_parser.set('Settings', 'command_after', str(self.command_after))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check')
mylib_image_path = paths.get_image_path('mylib', self.game_name, 'check')
if os.path.exists(goglib_image_path) and \
os.path.exists(goglib_install_dir + '/' + self.game_name):
self.banner_path = goglib_image_path
else:
self.banner_path = mylib_image_path
def quit_app(self, window, event):
self.config_save()
Gtk.main_quit()
def create_main_window(self):
self.get_banner()
<|code_end|>
, generate the next line using the imports in this file:
import sys, os, subprocess, re
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths
and context (functions, classes, or occasionally code) from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
. Output only the next line. | self.monitors_list, self.monitor_primary = monitors.get_monitors() |
Given the code snippet: <|code_start|>
if not config_parser.has_option('Settings', 'command_after'):
self.command_after = ''
config_parser.set('Settings', 'command_after', str(self.command_after))
else:
self.command_after = config_parser.get('Settings', 'command_after')
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def config_save(self):
config_file = self.install_dir + '/' + self.game_name + '/config.ini'
config_parser = ConfigParser()
config_parser.read(config_file)
config_parser.set('Settings', 'launcher_type', str(self.launcher_type))
config_parser.set('Settings', 'monitor', str(self.monitor))
config_parser.set('Settings', 'launcher', str(self.launcher))
config_parser.set('Settings', 'show_banner', str(self.show_banner))
config_parser.set('Settings', 'command_before', str(self.command_before))
config_parser.set('Settings', 'command_after', str(self.command_after))
new_config_file = open(config_file, 'w')
config_parser.write(new_config_file)
new_config_file.close()
def get_banner(self):
<|code_end|>
, generate the next line using the imports in this file:
import sys, os, subprocess, re
import gi
import gettext
import sys
from gi.repository import Gtk, Gdk, GdkPixbuf
from ConfigParser import ConfigParser as ConfigParser
from configparser import ConfigParser as ConfigParser
from modules import monitors, paths
and context (functions, classes, or occasionally code) from other files:
# Path: modules/monitors.py
# def get_monitors():
#
# Path: modules/paths.py
# def get_image_path(lib, game_name, *args):
# def get_setup_script_path(lib, game_name):
. Output only the next line. | goglib_image_path = paths.get_image_path('goglib', self.game_name, 'normal', 'check') |
Given the following code snippet before the placeholder: <|code_start|>
def test_highpass_filter():
# instantiate a bayer filter and a wrong filter
bayer_filter = np.tile([[0, 1], [1, 0]], (500, 500))
wrong_filter = np.tile([[1, 1, 1, 0], [1, 0, 0, 1]], (500, 250))
# Source images. imgc is for color image (image with a bayer filter)
img = np.abs(np.random.random([1000, 1000]))
imgc = np.ma.array(img, mask=bayer_filter)
# Image wrong's purpose is to assert that a ValueError is raised
img_wrong = np.ma.array(img, mask=wrong_filter)
# Pass the highpass filter on the source images
<|code_end|>
, predict the next line using imports from the current file:
import pytest
import numpy as np
from emva1288.process import routines
and context including class names, function names, and sometimes code from other files:
# Path: emva1288/process/routines.py
# SIGNIFICANT_DIGITS = 7
# L = len(imgs)
# X = np.asfarray(Xi)
# Y = np.asfarray(Yi)
# X = np.asfarray(Xi)
# Y = np.asfarray(Yi)
# W = 1
# Q = q + 1
# W = int(np.ceil(1. * q / (Qmax - 1)))
# Q = int(np.floor(1. * q / W)) + 1
# B = [ymin + (i * W) for i in range(Q + 1)]
# N = np.ma.count(y)
# def load_image(fname):
# def get_int_imgs(imgs):
# def LinearB0(Xi, Yi):
# def fp(v, x):
# def e(v, x, y):
# def LinearB(Xi, Yi):
# def fp(v, x):
# def e(v, x, y):
# def GetImgShape(img):
# def FFT1288(img_, rotate=False, n=1):
# def GetFrecs(fft):
# def Histogram1288(img, Qmax):
# def cls_1288_info(cls):
# def _sections_first(dct):
# def obj_to_dict(obj):
# def dict_to_xml(d, root='results', filename=None):
# def key_to_xml(d, r):
# def xml_to_dict(xml):
# def element_to_dict(r):
# def round_significant(v, sig=SIGNIFICANT_DIGITS):
# def compare_xml(x1, x2, filename=None):
# def high_pass_filter(img, dim):
. Output only the next line. | res = routines.high_pass_filter(img, 5) |
Predict the next line for this snippet: <|code_start|> def index_sensitivity_min(self):
"""Sensitivity minimum index.
Index for linear fits in sensitivity part of the standard
(70% of saturation)
.. emva1288::
:Section: sensitivity
:Short: Sensitivity fit minimum index
"""
return self._index_sensitivity_min
@property
def R(self):
"""Responsivity.
Slope of the (u_y - u_y_dark) Vs u_p. Fit with offset = 0
Uses the :func:`~emva1288.process.routines.LinearB0` function
to make the fit.
.. emva1288::
:Section: sensitivity
:Short: Responsivity
:Symbol: R
:Unit: DN/p
"""
Y = self.temporal['u_y'] - self.temporal['u_ydark']
X = self.temporal['u_p']
<|code_end|>
with the help of current file imports:
import logging
import numpy as np
from emva1288.process import routines
from scipy.ndimage import convolve
and context from other files:
# Path: emva1288/process/routines.py
# SIGNIFICANT_DIGITS = 7
# L = len(imgs)
# X = np.asfarray(Xi)
# Y = np.asfarray(Yi)
# X = np.asfarray(Xi)
# Y = np.asfarray(Yi)
# W = 1
# Q = q + 1
# W = int(np.ceil(1. * q / (Qmax - 1)))
# Q = int(np.floor(1. * q / W)) + 1
# B = [ymin + (i * W) for i in range(Q + 1)]
# N = np.ma.count(y)
# def load_image(fname):
# def get_int_imgs(imgs):
# def LinearB0(Xi, Yi):
# def fp(v, x):
# def e(v, x, y):
# def LinearB(Xi, Yi):
# def fp(v, x):
# def e(v, x, y):
# def GetImgShape(img):
# def FFT1288(img_, rotate=False, n=1):
# def GetFrecs(fft):
# def Histogram1288(img, Qmax):
# def cls_1288_info(cls):
# def _sections_first(dct):
# def obj_to_dict(obj):
# def dict_to_xml(d, root='results', filename=None):
# def key_to_xml(d, r):
# def xml_to_dict(xml):
# def element_to_dict(r):
# def round_significant(v, sig=SIGNIFICANT_DIGITS):
# def compare_xml(x1, x2, filename=None):
# def high_pass_filter(img, dim):
, which may contain function names, class names, or code. Output only the next line. | val, _error = routines.LinearB0(X[self.index_sensitivity_min: |
Given the following code snippet before the placeholder: <|code_start|> Full well capacity
dsnu : np.array, optional
DSNU image in e^-, array with the same shape of the image
that is added to every image.
prnu : np.array, optional
PRNU image in percentages (1 = 100%), array with the same shape
of the image. Every image is multiplied by it.
seed : int, optional
A seed to initialize the random number generator.
"""
self._pixel_area = pixel_area
self._bit_depth = bit_depth
self._img_max = 2 ** int(bit_depth) - 1
if bit_depth <= 8:
self._img_type = np.uint8
elif bit_depth <= 16:
self._img_type = np.uint16
else:
self._img_type = np.uint64
self._width = width
self._height = height
self._shape = (self.height, self.width)
self._temperature_ref = temperature_ref
self._temperature_doubling = temperature_doubling
self._qe = qe
# When no specific qe is provided we simulate one
if qe is None:
<|code_end|>
, predict the next line using imports from the current file:
import numpy as np
from emva1288.camera import routines
and context including class names, function names, and sometimes code from other files:
# Path: emva1288/camera/routines.py
# def nearest_value(value, array):
# def get_photons(exposure, radiance, pixel_area, wavelength, f_number):
# def get_radiance(exposure, photons, pixel_area, f_number, wavelength):
# def get_tile(arr, height, width):
# def get_bayer_filter(t00, t01, t10, t11, width, height, w):
# def array_tile(array, height, width):
# def poisson(x, loc, mu=1000):
# def __init__(self,
# qe=None,
# wavelength=np.linspace(400, 800, 100),
# width=640,
# height=480,
# filter=None):
# def w(self):
# def qe(self):
# def gen_qe(self):
# def gauss(w):
# class Qe(object):
. Output only the next line. | self._qe = routines.Qe(height=self._height, width=self._width) |
Using the snippet: <|code_start|>
logging.basicConfig(level=logging.DEBUG)
class Command(NoArgsCommand):
help = 'Import station polygon JSON data'
def handle_noargs(self, **options):
project_dir = Path(__file__).absolute().ancestor(4)
data_file = project_dir.child('static', 'js', 'station_polys.json')
with open(data_file, 'r') as f:
data = json.loads(f.read())
for station_id, coords in data.items():
<|code_end|>
, determine the next line of code. You have imports:
import json
import logging
from django.core.management.base import NoArgsCommand
from telostats.stations.models import Station
from unipath import FSPath as Path
and context (class names, function names, or code) available:
# Path: telostats/stations/models.py
# class Station(models.Model):
# id = models.IntegerField(unique=True, primary_key=True)
# name = models.CharField(u'name', max_length=100)
# address = models.CharField(u'address', max_length=100, null=True, blank=True)
# longitude = models.FloatField(u'longitude')
# latitude = models.FloatField(u'latitude')
# polygon = JSONField(u'polygon')
# poles = models.IntegerField(u'poles')
# available = models.IntegerField(u'available')
# visible = models.BooleanField(u'visible', default=False)
#
# objects = models.Manager()
# visible_objects = VisibleStationManager()
#
# def __unicode__(self):
# return u'({}) {}'.format(self.id, self.name)
#
# def available_poles(self):
# return self.available
#
# def available_bikes(self):
# return self.poles - self.available
. Output only the next line. | station = Station.objects.get(id=station_id) |
Predict the next line after this snippet: <|code_start|>
class StationMap(TemplateView):
template_name = 'map.html'
def get_context_data(self, **kwargs):
context = super(StationMap, self).get_context_data(**kwargs)
context['tileserver_url'] = settings.TILESERVER_URL
return context
class StationDetail(PJAXResponseMixin, DetailView):
<|code_end|>
using the current file's imports:
from django.conf import settings
from django.core.mail import send_mail
from django.http import HttpResponseRedirect
from django.views.generic import DetailView, TemplateView
from djpjax import PJAXResponseMixin
from .models import Station
and any relevant context from other files:
# Path: telostats/stations/models.py
# class Station(models.Model):
# id = models.IntegerField(unique=True, primary_key=True)
# name = models.CharField(u'name', max_length=100)
# address = models.CharField(u'address', max_length=100, null=True, blank=True)
# longitude = models.FloatField(u'longitude')
# latitude = models.FloatField(u'latitude')
# polygon = JSONField(u'polygon')
# poles = models.IntegerField(u'poles')
# available = models.IntegerField(u'available')
# visible = models.BooleanField(u'visible', default=False)
#
# objects = models.Manager()
# visible_objects = VisibleStationManager()
#
# def __unicode__(self):
# return u'({}) {}'.format(self.id, self.name)
#
# def available_poles(self):
# return self.available
#
# def available_bikes(self):
# return self.poles - self.available
. Output only the next line. | model = Station |
Continue the code snippet: <|code_start|>
logging.basicConfig(level=logging.DEBUG)
class Command(BaseCommand):
help = 'Scrape station data from the Tel-o-fun website.'
option_list = BaseCommand.option_list + (
make_option('-n', '--no-log',
action='store_false',
dest='log_tempodb',
default=True,
help='Do not log measurements to Tempo DB (use this in local env)'),
)
def handle(self, *args, **options):
<|code_end|>
. Use current file imports:
import logging
from django.core.management.base import BaseCommand
from optparse import make_option
from telostats.stations.tasks import measure
and context (classes, functions, or code) from other files:
# Path: telostats/stations/tasks.py
# def measure(log_tempodb=True):
# logging.info('Measuring stations...')
# timestamp = datetime.datetime.utcnow().replace(tzinfo=utc)
# stations = parse_stations(scrape_stations())
# store_stations(stations)
# if log_tempodb:
# log_data(timestamp, stations)
# else:
# logging.debug('Skipping logging measurements to TempoDB...')
# logging.info('Measured {} stations.'.format(len(stations)))
# # TODO: periodically write more metadata about stations to the tempo series?
. Output only the next line. | measure(log_tempodb=options['log_tempodb']) |
Given the following code snippet before the placeholder: <|code_start|>
admin.autodiscover()
v1_api = Api(api_name='v1')
v1_api.register(StationResource())
<|code_end|>
, predict the next line using imports from the current file:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from tastypie.api import Api
from telostats.stations.api import StationResource, RecentResource, AverageResource
and context including class names, function names, and sometimes code from other files:
# Path: telostats/stations/api.py
# class StationResource(ModelResource):
# class Meta:
# queryset = Station.visible_objects.all()
# resource_name = 'station'
# # cache = APICache(timeout=60 * 15)
# serializer = Serializer(formats=['json'])
# limit = 0 # show all stations by default
# allowed_methods = ['get']
# filtering = {
# 'id': ('exact', ),
# }
# excludes = ['visible']
#
# def dehydrate(self, bundle):
# bundle.data['polygon'] = json.loads(bundle.data['polygon'])
# return bundle
#
# class RecentResource(Resource):
# id = fields.CharField(attribute='id')
# series = fields.ListField(attribute='series')
#
# class Meta:
# object_class = StationSeries
# resource_name = 'recent'
# # cache = APICache(timeout=60 * 15)
# serializer = Serializer(formats=['json'])
# limit = 1
# list_allowed_methods = []
# detail_allowed_methods = ['get']
# filtering = {
# 'id': ('exact', ),
# }
#
# def _client(self):
# return TempoDbClient()
#
# def _get_series(self, station_id=None, **kwargs):
# return self._client().get_series(station_id, **kwargs)
#
# def get_object_list(self, request):
# series_list = self._get_series().items()
# res = []
# for sta_id, series in series_list:
# obj = StationSeries(initial=series)
# obj.id = sta_id
# res.append(obj)
# return res
#
# def obj_get_list(self, request=None, **kwargs):
# return self.get_object_list(request)
#
# def obj_get(self, request=None, **kwargs):
# station_id = kwargs['pk']
# series = self._get_series(station_id=station_id)[station_id]
# # zip the two lists together on same timestamps
# timestamps = [x['t'] for x in series['available']] # or poles, dm;st
# available = [x['v'] for x in series['available']]
# poles = [x['v'] for x in series['poles']]
# series = [{
# 'timestamp': t,
# 'poles': p,
# 'available': a,
# 'bikes': p - a
# } for t, p, a in zip(timestamps, poles, available)]
# initial_series = {'series': series}
# station_series = StationSeries(initial=initial_series)
# station_series.id = station_id
# return station_series
#
# class AverageResource(Resource):
# id = fields.CharField(attribute='id')
# series = fields.ListField(attribute='series')
#
# class Meta:
# object_class = StationSeries
# resource_name = 'average'
# cache = APICache(timeout=60 * 60 * 24 * 7)
# serializer = Serializer(formats=['json'])
# limit = 1
# list_allowed_methods = []
# detail_allowed_methods = ['get']
# filtering = {
# 'id': ('exact', ),
# }
#
# def _client(self):
# return TempoDbClient()
#
# def _get_series(self, station_id=None, **kwargs):
# return self._client().get_series(station_id, start=timedelta(days=7), **kwargs)
#
# def obj_get(self, request=None, **kwargs):
# station_id = kwargs['pk']
# series = self._get_series(station_id=station_id)[station_id]
#
# # initial result structure
# res = dict([
# (i, {'available': [], 'poles': []})
# for i in range(24)])
#
# # data collection in buckets
# for s in ['available', 'poles']:
# for datum in series[s]:
# hour = parse_date(datum['t']).hour
# res[hour][s].append(datum['v'])
#
# # reduce lists by average
# res = [{
# 'timestamp': datetime(2013, 1, 1, k).isoformat(),
# 'available': sum(v['available']) / len(v['available']),
# 'poles': sum(v['poles']) / len(v['poles']),
# } for k, v in res.items()]
#
# # add final bike count
# for hour in res:
# hour['bikes'] = hour['poles'] - hour['available']
#
# initial_series = {'series': res}
# station_series = StationSeries(initial=initial_series)
# station_series.id = station_id
# return station_series
. Output only the next line. | v1_api.register(RecentResource()) |
Predict the next line for this snippet: <|code_start|>
admin.autodiscover()
v1_api = Api(api_name='v1')
v1_api.register(StationResource())
v1_api.register(RecentResource())
<|code_end|>
with the help of current file imports:
from django.conf.urls import patterns, include, url
from django.contrib import admin
from tastypie.api import Api
from telostats.stations.api import StationResource, RecentResource, AverageResource
and context from other files:
# Path: telostats/stations/api.py
# class StationResource(ModelResource):
# class Meta:
# queryset = Station.visible_objects.all()
# resource_name = 'station'
# # cache = APICache(timeout=60 * 15)
# serializer = Serializer(formats=['json'])
# limit = 0 # show all stations by default
# allowed_methods = ['get']
# filtering = {
# 'id': ('exact', ),
# }
# excludes = ['visible']
#
# def dehydrate(self, bundle):
# bundle.data['polygon'] = json.loads(bundle.data['polygon'])
# return bundle
#
# class RecentResource(Resource):
# id = fields.CharField(attribute='id')
# series = fields.ListField(attribute='series')
#
# class Meta:
# object_class = StationSeries
# resource_name = 'recent'
# # cache = APICache(timeout=60 * 15)
# serializer = Serializer(formats=['json'])
# limit = 1
# list_allowed_methods = []
# detail_allowed_methods = ['get']
# filtering = {
# 'id': ('exact', ),
# }
#
# def _client(self):
# return TempoDbClient()
#
# def _get_series(self, station_id=None, **kwargs):
# return self._client().get_series(station_id, **kwargs)
#
# def get_object_list(self, request):
# series_list = self._get_series().items()
# res = []
# for sta_id, series in series_list:
# obj = StationSeries(initial=series)
# obj.id = sta_id
# res.append(obj)
# return res
#
# def obj_get_list(self, request=None, **kwargs):
# return self.get_object_list(request)
#
# def obj_get(self, request=None, **kwargs):
# station_id = kwargs['pk']
# series = self._get_series(station_id=station_id)[station_id]
# # zip the two lists together on same timestamps
# timestamps = [x['t'] for x in series['available']] # or poles, dm;st
# available = [x['v'] for x in series['available']]
# poles = [x['v'] for x in series['poles']]
# series = [{
# 'timestamp': t,
# 'poles': p,
# 'available': a,
# 'bikes': p - a
# } for t, p, a in zip(timestamps, poles, available)]
# initial_series = {'series': series}
# station_series = StationSeries(initial=initial_series)
# station_series.id = station_id
# return station_series
#
# class AverageResource(Resource):
# id = fields.CharField(attribute='id')
# series = fields.ListField(attribute='series')
#
# class Meta:
# object_class = StationSeries
# resource_name = 'average'
# cache = APICache(timeout=60 * 60 * 24 * 7)
# serializer = Serializer(formats=['json'])
# limit = 1
# list_allowed_methods = []
# detail_allowed_methods = ['get']
# filtering = {
# 'id': ('exact', ),
# }
#
# def _client(self):
# return TempoDbClient()
#
# def _get_series(self, station_id=None, **kwargs):
# return self._client().get_series(station_id, start=timedelta(days=7), **kwargs)
#
# def obj_get(self, request=None, **kwargs):
# station_id = kwargs['pk']
# series = self._get_series(station_id=station_id)[station_id]
#
# # initial result structure
# res = dict([
# (i, {'available': [], 'poles': []})
# for i in range(24)])
#
# # data collection in buckets
# for s in ['available', 'poles']:
# for datum in series[s]:
# hour = parse_date(datum['t']).hour
# res[hour][s].append(datum['v'])
#
# # reduce lists by average
# res = [{
# 'timestamp': datetime(2013, 1, 1, k).isoformat(),
# 'available': sum(v['available']) / len(v['available']),
# 'poles': sum(v['poles']) / len(v['poles']),
# } for k, v in res.items()]
#
# # add final bike count
# for hour in res:
# hour['bikes'] = hour['poles'] - hour['available']
#
# initial_series = {'series': res}
# station_series = StationSeries(initial=initial_series)
# station_series.id = station_id
# return station_series
, which may contain function names, class names, or code. Output only the next line. | v1_api.register(AverageResource()) |
Predict the next line after this snippet: <|code_start|>
urlpatterns = patterns('',
url('^$', StationMap.as_view(), name='home'),
url('^about$', About.as_view(), name='about'),
<|code_end|>
using the current file's imports:
from django.conf.urls import patterns, url
from .views import About, AboutApi, Contact, StationMap, StationDetail
and any relevant context from other files:
# Path: telostats/stations/views.py
# class About(StationMap):
# template_name = 'about.html'
#
# class AboutApi(StationMap):
# template_name = 'about_api.html'
#
# class Contact(StationMap):
# template_name = 'contact.html'
#
# def post(self, request):
# msg = request.POST.get('message')
# send_mail('Tel-O-Stats Feedback Received', msg,
# 'admin@telostats.com', ['idan@gazit.me', 'yuv.adm@gmail.com'])
# return HttpResponseRedirect('/')
#
# class StationMap(TemplateView):
# template_name = 'map.html'
#
# def get_context_data(self, **kwargs):
# context = super(StationMap, self).get_context_data(**kwargs)
# context['tileserver_url'] = settings.TILESERVER_URL
# return context
#
# class StationDetail(PJAXResponseMixin, DetailView):
# model = Station
# context_object_name = 'station'
#
# def get_context_data(self, **kwargs):
# context = super(StationDetail, self).get_context_data(**kwargs)
# context['tileserver_url'] = settings.TILESERVER_URL
# return context
. Output only the next line. | url('^about/api$', AboutApi.as_view(), name='about_api'), |
Predict the next line after this snippet: <|code_start|>
urlpatterns = patterns('',
url('^$', StationMap.as_view(), name='home'),
url('^about$', About.as_view(), name='about'),
url('^about/api$', AboutApi.as_view(), name='about_api'),
<|code_end|>
using the current file's imports:
from django.conf.urls import patterns, url
from .views import About, AboutApi, Contact, StationMap, StationDetail
and any relevant context from other files:
# Path: telostats/stations/views.py
# class About(StationMap):
# template_name = 'about.html'
#
# class AboutApi(StationMap):
# template_name = 'about_api.html'
#
# class Contact(StationMap):
# template_name = 'contact.html'
#
# def post(self, request):
# msg = request.POST.get('message')
# send_mail('Tel-O-Stats Feedback Received', msg,
# 'admin@telostats.com', ['idan@gazit.me', 'yuv.adm@gmail.com'])
# return HttpResponseRedirect('/')
#
# class StationMap(TemplateView):
# template_name = 'map.html'
#
# def get_context_data(self, **kwargs):
# context = super(StationMap, self).get_context_data(**kwargs)
# context['tileserver_url'] = settings.TILESERVER_URL
# return context
#
# class StationDetail(PJAXResponseMixin, DetailView):
# model = Station
# context_object_name = 'station'
#
# def get_context_data(self, **kwargs):
# context = super(StationDetail, self).get_context_data(**kwargs)
# context['tileserver_url'] = settings.TILESERVER_URL
# return context
. Output only the next line. | url('^contact$', Contact.as_view(), name='contact'), |
Predict the next line for this snippet: <|code_start|>
urlpatterns = patterns('',
url('^$', StationMap.as_view(), name='home'),
url('^about$', About.as_view(), name='about'),
url('^about/api$', AboutApi.as_view(), name='about_api'),
url('^contact$', Contact.as_view(), name='contact'),
<|code_end|>
with the help of current file imports:
from django.conf.urls import patterns, url
from .views import About, AboutApi, Contact, StationMap, StationDetail
and context from other files:
# Path: telostats/stations/views.py
# class About(StationMap):
# template_name = 'about.html'
#
# class AboutApi(StationMap):
# template_name = 'about_api.html'
#
# class Contact(StationMap):
# template_name = 'contact.html'
#
# def post(self, request):
# msg = request.POST.get('message')
# send_mail('Tel-O-Stats Feedback Received', msg,
# 'admin@telostats.com', ['idan@gazit.me', 'yuv.adm@gmail.com'])
# return HttpResponseRedirect('/')
#
# class StationMap(TemplateView):
# template_name = 'map.html'
#
# def get_context_data(self, **kwargs):
# context = super(StationMap, self).get_context_data(**kwargs)
# context['tileserver_url'] = settings.TILESERVER_URL
# return context
#
# class StationDetail(PJAXResponseMixin, DetailView):
# model = Station
# context_object_name = 'station'
#
# def get_context_data(self, **kwargs):
# context = super(StationDetail, self).get_context_data(**kwargs)
# context['tileserver_url'] = settings.TILESERVER_URL
# return context
, which may contain function names, class names, or code. Output only the next line. | url('^station/(?P<pk>\d+)', StationDetail.as_view(), name='station_detail'), |
Next line prediction: <|code_start|>
class APICache(NoCache):
def __init__(self, timeout=60, *args, **kwargs):
super(APICache, self).__init__(*args, **kwargs)
self.timeout = timeout
def cache_control(self):
return {
'max_age': self.timeout,
's_maxage': self.timeout,
'public': True
}
class StationResource(ModelResource):
class Meta:
<|code_end|>
. Use current file imports:
(import json
from datetime import datetime, timedelta
from dateutil.parser import parse as parse_date
from tastypie.cache import NoCache
from tastypie.resources import ModelResource, Resource, fields
from tastypie.serializers import Serializer
from .models import Station
from ..utils.tempodb import TempoDbClient)
and context including class names, function names, or small code snippets from other files:
# Path: telostats/stations/models.py
# class Station(models.Model):
# id = models.IntegerField(unique=True, primary_key=True)
# name = models.CharField(u'name', max_length=100)
# address = models.CharField(u'address', max_length=100, null=True, blank=True)
# longitude = models.FloatField(u'longitude')
# latitude = models.FloatField(u'latitude')
# polygon = JSONField(u'polygon')
# poles = models.IntegerField(u'poles')
# available = models.IntegerField(u'available')
# visible = models.BooleanField(u'visible', default=False)
#
# objects = models.Manager()
# visible_objects = VisibleStationManager()
#
# def __unicode__(self):
# return u'({}) {}'.format(self.id, self.name)
#
# def available_poles(self):
# return self.available
#
# def available_bikes(self):
# return self.poles - self.available
#
# Path: telostats/utils/tempodb.py
# class TempoDbClient():
# def __init__(self, key=TEMPODB_KEY, secret=TEMPODB_SECRET):
# self.key = key
# self.secret = secret
# session_auth = requests.auth.HTTPBasicAuth(TEMPODB_KEY, TEMPODB_SECRET)
# self.session = requests.session(auth=session_auth)
#
# def get_data(self, station_id=None, start=None, end=None, interval='1hour',
# function='mean', tz='Asia/Jerusalem'):
# params = {
# 'interval': interval,
# 'function': function,
# 'tz': tz
# }
# if start:
# params['start'] = start
# if end:
# params['end'] = end
# if station_id:
# params['attr[station]'] = station_id
# return self.session.get(API_URL + '/data/', params=params)
#
# def get_series(self, station_id=None, interval='1hour', start=timedelta(hours=24), **kwargs):
# start = (datetime.utcnow() - start).isoformat()
# data = self.get_data(station_id=station_id, start=start, interval=interval)
# content = data.content
# res = defaultdict(dict)
#
# d = json.loads(content)
# for series in d:
# station_id = series['series']['attributes']['station']
# data_type = series['series']['tags'][0]
# data_val = series['data']
# res[station_id][data_type] = data_val
#
# return dict(res)
#
# def get_latest_counts(self, station_id=None):
# '''This method is pending a proper get_latest API from tempo db'''
# start = datetime.utcnow() - timedelta(minutes=15)
# content = self.get_data(station_id=station_id, start=start).content
# res = defaultdict(dict)
#
# d = json.loads(content)
# for series in d:
# station_id = series['series']['attributes']['station']
# data_type = series['series']['tags'][0]
# data_val = series['data'][-1]['v']
# res[station_id][data_type] = data_val
#
# return dict(res)
. Output only the next line. | queryset = Station.visible_objects.all() |
Given snippet: <|code_start|> if hasattr(initial, 'items'):
self.__dict__['_data'] = initial
def __getattr__(self, name):
return self._data.get(name, None)
def __setattr__(self, name, value):
self.__dict__['_data'][name] = value
def to_dict(self):
return self._data
class RecentResource(Resource):
id = fields.CharField(attribute='id')
series = fields.ListField(attribute='series')
class Meta:
object_class = StationSeries
resource_name = 'recent'
# cache = APICache(timeout=60 * 15)
serializer = Serializer(formats=['json'])
limit = 1
list_allowed_methods = []
detail_allowed_methods = ['get']
filtering = {
'id': ('exact', ),
}
def _client(self):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
from datetime import datetime, timedelta
from dateutil.parser import parse as parse_date
from tastypie.cache import NoCache
from tastypie.resources import ModelResource, Resource, fields
from tastypie.serializers import Serializer
from .models import Station
from ..utils.tempodb import TempoDbClient
and context:
# Path: telostats/stations/models.py
# class Station(models.Model):
# id = models.IntegerField(unique=True, primary_key=True)
# name = models.CharField(u'name', max_length=100)
# address = models.CharField(u'address', max_length=100, null=True, blank=True)
# longitude = models.FloatField(u'longitude')
# latitude = models.FloatField(u'latitude')
# polygon = JSONField(u'polygon')
# poles = models.IntegerField(u'poles')
# available = models.IntegerField(u'available')
# visible = models.BooleanField(u'visible', default=False)
#
# objects = models.Manager()
# visible_objects = VisibleStationManager()
#
# def __unicode__(self):
# return u'({}) {}'.format(self.id, self.name)
#
# def available_poles(self):
# return self.available
#
# def available_bikes(self):
# return self.poles - self.available
#
# Path: telostats/utils/tempodb.py
# class TempoDbClient():
# def __init__(self, key=TEMPODB_KEY, secret=TEMPODB_SECRET):
# self.key = key
# self.secret = secret
# session_auth = requests.auth.HTTPBasicAuth(TEMPODB_KEY, TEMPODB_SECRET)
# self.session = requests.session(auth=session_auth)
#
# def get_data(self, station_id=None, start=None, end=None, interval='1hour',
# function='mean', tz='Asia/Jerusalem'):
# params = {
# 'interval': interval,
# 'function': function,
# 'tz': tz
# }
# if start:
# params['start'] = start
# if end:
# params['end'] = end
# if station_id:
# params['attr[station]'] = station_id
# return self.session.get(API_URL + '/data/', params=params)
#
# def get_series(self, station_id=None, interval='1hour', start=timedelta(hours=24), **kwargs):
# start = (datetime.utcnow() - start).isoformat()
# data = self.get_data(station_id=station_id, start=start, interval=interval)
# content = data.content
# res = defaultdict(dict)
#
# d = json.loads(content)
# for series in d:
# station_id = series['series']['attributes']['station']
# data_type = series['series']['tags'][0]
# data_val = series['data']
# res[station_id][data_type] = data_val
#
# return dict(res)
#
# def get_latest_counts(self, station_id=None):
# '''This method is pending a proper get_latest API from tempo db'''
# start = datetime.utcnow() - timedelta(minutes=15)
# content = self.get_data(station_id=station_id, start=start).content
# res = defaultdict(dict)
#
# d = json.loads(content)
# for series in d:
# station_id = series['series']['attributes']['station']
# data_type = series['series']['tags'][0]
# data_val = series['data'][-1]['v']
# res[station_id][data_type] = data_val
#
# return dict(res)
which might include code, classes, or functions. Output only the next line. | return TempoDbClient() |
Predict the next line after this snippet: <|code_start|> conn.execute(
'''
DROP TABLE IF EXISTS sales;
CREATE TABLE sales (
beer_id INT,
sold_at DATE,
quantity INT,
bumpkin VARCHAR(80)
);
DROP TABLE IF EXISTS beers;
CREATE TABLE beers (
id INT,
name VARCHAR(80),
style VARCHAR(80),
abv FLOAT,
price DECIMAL(16,2)
);
'''
)
class DenormalizeBeersPipe(pemi.Pipe):
def __init__(self, **params):
super().__init__(**params)
sa_engine = sa.create_engine(this.params['sa_conn_str'])
self.source(
<|code_end|>
using the current file's imports:
import os
import sys
import logging
import factory
import sqlalchemy as sa
import pemi
import pemi.testing as pt
from pemi.data_subject import SaDataSubject
from pemi.fields import *
and any relevant context from other files:
# Path: pemi/data_subject.py
# class SaDataSubject(DataSubject):
# def __init__(self, engine, table, sql_schema=None, **kwargs):
# super().__init__(**kwargs)
# self.engine = engine
# self.table = table
# self.sql_schema = sql_schema
#
# self.cached_test_df = None
#
# def to_pd(self):
# if self.cached_test_df is not None:
# return self.cached_test_df
#
# with self.engine.connect() as conn:
# df = pd.read_sql_table(
# self.table,
# conn,
# schema=self.sql_schema,
# )
#
# for column in set(df.columns) & set(self.schema.keys()):
# df[column] = df[column].apply(self.schema[column].coerce)
#
# self.cached_test_df = df
# return df
#
# def from_pd(self, df, **to_sql_opts):
# self.cached_test_df = df
# pemi.log.debug('loading SaDataSubject with:\n%s', self.cached_test_df)
#
# to_sql_opts['if_exists'] = to_sql_opts.get('if_exists', 'append')
# to_sql_opts['index'] = to_sql_opts.get('index', False)
# if self.sql_schema:
# to_sql_opts['schema'] = self.sql_schema
#
# df_to_sql = df.copy()
# for field in self.schema.values():
# if isinstance(field, JsonField):
# df_to_sql[field.name] = df_to_sql[field.name].apply(json.dumps)
#
# with self.engine.connect() as conn:
# df_to_sql.to_sql(self.table, conn, **to_sql_opts)
#
# def connect_from(self, _other):
# self.engine.dispose()
# self.validate_schema()
#
# def __getstate__(self):
# return (
# [],
# {
# 'url': self.engine.url,
# 'table': self.table,
# 'sql_schema': self.sql_schema
# }
# )
#
# def __setstate__(self, state):
# _args, kwargs = state
# self.engine = sa.create_engine(kwargs['url'])
# self.table = kwargs['table']
# self.sql_schema = kwargs['sql_schema']
. Output only the next line. | SaDataSubject, |
Here is a snippet: <|code_start|>
pytestmark = pytest.mark.spark
class DenormalizeBeersPipe(pemi.Pipe):
def __init__(self, spark_session, **params):
super().__init__(**params)
self.source(
<|code_end|>
. Write the next line using the current file imports:
import pytest
import factory
import pyspark
import pemi
import pemi.testing as pt
from pemi.data_subject import SparkDataSubject
from pemi.fields import *
and context from other files:
# Path: pemi/data_subject.py
# class SparkDataSubject(DataSubject):
# def __init__(self, spark, df=None, **kwargs):
# super().__init__(**kwargs)
# self.spark = spark
# self.df = df
#
# self.cached_test_df = None
#
# def to_pd(self):
# if self.cached_test_df is not None:
# return self.cached_test_df
#
# converted_df = self.df.toPandas()
# self.cached_test_df = pd.DataFrame()
# for column in list(converted_df):
# self.cached_test_df[column] = converted_df[column].apply(self.schema[column].coerce)
#
# return self.cached_test_df
#
# def from_pd(self, df, **kwargs):
# self.df = self.spark.createDataFrame(df)
#
# def connect_from(self, other):
# self.spark = other.spark.builder.getOrCreate()
# self.df = other.df
# self.validate_schema()
, which may include functions, classes, or code. Output only the next line. | SparkDataSubject, |
Given the code snippet: <|code_start|> bumpkin VARCHAR(80)
);
DROP TABLE IF EXISTS beers;
CREATE TABLE beers (
id INT,
name VARCHAR(80),
style VARCHAR(80),
abv FLOAT,
price DECIMAL(16,2)
);
DROP TABLE IF EXISTS renamed_sales;
CREATE TABLE renamed_sales (LIKE sales);
DROP TABLE IF EXISTS renamed_beers;
CREATE TABLE renamed_beers (LIKE beers);
'''
)
# This pipe just renames a tables and is contrived to show
# how queries can be run in parallel via Dask.
class RenameSaPipe(pemi.Pipe):
def __init__(self, *, schema, table, **params):
super().__init__(**params)
self.schema = schema
self.table = table
self.source(
<|code_end|>
, generate the next line using the imports in this file:
import os
import logging
import factory
import sqlalchemy as sa
import dask.threaded
import pemi
import pemi.testing as pt
from pemi.data_subject import SaDataSubject
from pemi.fields import *
and context (functions, classes, or occasionally code) from other files:
# Path: pemi/data_subject.py
# class SaDataSubject(DataSubject):
# def __init__(self, engine, table, sql_schema=None, **kwargs):
# super().__init__(**kwargs)
# self.engine = engine
# self.table = table
# self.sql_schema = sql_schema
#
# self.cached_test_df = None
#
# def to_pd(self):
# if self.cached_test_df is not None:
# return self.cached_test_df
#
# with self.engine.connect() as conn:
# df = pd.read_sql_table(
# self.table,
# conn,
# schema=self.sql_schema,
# )
#
# for column in set(df.columns) & set(self.schema.keys()):
# df[column] = df[column].apply(self.schema[column].coerce)
#
# self.cached_test_df = df
# return df
#
# def from_pd(self, df, **to_sql_opts):
# self.cached_test_df = df
# pemi.log.debug('loading SaDataSubject with:\n%s', self.cached_test_df)
#
# to_sql_opts['if_exists'] = to_sql_opts.get('if_exists', 'append')
# to_sql_opts['index'] = to_sql_opts.get('index', False)
# if self.sql_schema:
# to_sql_opts['schema'] = self.sql_schema
#
# df_to_sql = df.copy()
# for field in self.schema.values():
# if isinstance(field, JsonField):
# df_to_sql[field.name] = df_to_sql[field.name].apply(json.dumps)
#
# with self.engine.connect() as conn:
# df_to_sql.to_sql(self.table, conn, **to_sql_opts)
#
# def connect_from(self, _other):
# self.engine.dispose()
# self.validate_schema()
#
# def __getstate__(self):
# return (
# [],
# {
# 'url': self.engine.url,
# 'table': self.table,
# 'sql_schema': self.sql_schema
# }
# )
#
# def __setstate__(self, state):
# _args, kwargs = state
# self.engine = sa.create_engine(kwargs['url'])
# self.table = kwargs['table']
# self.sql_schema = kwargs['sql_schema']
. Output only the next line. | SaDataSubject, |
Continue the code snippet: <|code_start|> 'error_bad_lines': True
}
return {**default_opts, **user_csv_opts, **mandatory_opts}
def _parse_one(self, filepath):
pemi.log.debug('Parsing file at %s', filepath)
raw_df = pd.read_csv(filepath, **self.csv_opts)
pemi.log.debug('Found %i raw records', len(raw_df))
raw_df.columns = [self.column_normalizer(col) for col in raw_df.columns]
if self.filename_field:
if self.filename_full_path:
raw_df[self.filename_field] = filepath
else:
raw_df[self.filename_field] = os.path.basename(filepath)
if self.schema:
return raw_df.mapping(
[(name, name, field.coerce) for name, field in self.schema.items()],
on_error='redirect'
)
return raw_df.mapping([], inplace=True)
def flow(self):
self.parse(self.extract())
<|code_end|>
. Use current file imports:
import os
import re
import pandas as pd
import pemi
from pemi.pipes.patterns import TargetPipe
and context (classes, functions, or code) from other files:
# Path: pemi/pipes/patterns.py
# class TargetPipe(pemi.Pipe):
# '''
# A target pipe takes data provided to it, encodes it into a structure that can be
# understand by some external target, and then loads the data into that external target.
# '''
#
# def __init__(self, *, schema, **params):
# super().__init__(**params)
#
# self.schema = schema
#
# self.source(
# pemi.PdDataSubject,
# name='main',
# schema=self.schema
# )
#
# self.target(
# pemi.PdDataSubject,
# name='errors'
# )
#
# self.target(
# pemi.PdDataSubject,
# name='response'
# )
#
# def encode(self):
# # e.g., CsvTargetEncoder.encode()
# # return source_main_data_encoded_for_loader
# raise NotImplementedError
#
# def load(self, encoded_data):
# # e.g., S3Loader.load()
# # return results_from_load_operation
# raise NotImplementedError
#
# def flow(self):
# self.load(self.encode())
. Output only the next line. | class LocalCsvFileTargetPipe(TargetPipe): |
Next line prediction: <|code_start|> def expect_exception(self, exception):
'''
Used to indicate that the test case is expected to fail with exception ``exception``.
If the test case raises this exception, then it will pass. If it does not raise the
exception, then it will fail.
'''
self.expected_exception = exception
def setup(self):
for i_when in self.whens:
i_when(self)
def _assert_then(self, i_then):
if self.expected_exception:
with pytest.raises(self.expected_exception):
i_then(self)
else:
i_then(self)
def assert_case(self):
self.scenario.run()
try:
if len(self.thens) < 1:
raise CaseStructureError
for i_then in self.thens:
self._assert_then(i_then)
except AssertionError:
<|code_end|>
. Use current file imports:
(from collections import namedtuple
from collections import OrderedDict
from pemi.tabular import PemiTabular
from pemi.pipe import mock_pipe #pylint: disable=unused-import
import os
import sys
import inspect
import pytest
import pandas as pd
import pemi
import pemi.data)
and context including class names, function names, or small code snippets from other files:
# Path: pemi/tabular.py
# class PemiTabular:
# __DEFAULT_FILE_DIR = 'pemi-tabular/'
# __DEFAULT_FILE_NAME = 'tabular.html'
# __CSS_FILE_PATH = _DIR_PATH + '/pemi-web/css/pemi_errors.css'
#
# def __init__(self, file_dir=__DEFAULT_FILE_DIR):
# super().__init__()
# self.__file_dir = file_dir
# self.__css_style = self.__load_css_file()
# self.__data = []
# self.__output_file = None
#
# @staticmethod
# def to_html_table(df, title):
# try:
# if not isinstance(df, pd.core.frame.DataFrame):
# raise ValueError
# except ValueError:
# raise ValueError("df argument should have a type of pandas.core.frame.DataFrame")
#
# html_title = "<h3 style=\"text-align: center;\">{}</h3> \n".format(title)
# df = df.applymap(str)
# df.replace("<", "<", regex=True, inplace=True)
# df.replace(">", ">", regex=True, inplace=True)
# html_table = html_title + tbl(df, list(df.columns.values), 'html', stralign='center')
# return html_table
#
# def add(self, df, df_name=None):
# if df_name is None:
# df_name = self.__generate_df_name()
# self.__data.append({
# "name": df_name,
# "df": df
# })
#
# def render(self, file=__DEFAULT_FILE_NAME):
# file_path = self.__file_dir + file
# self.__open(file_path)
# self.__append("<html><head>" + "<style>" + self.__css_style + "</style></head>")
# for _, item in enumerate(self.__data):
# html_table = PemiTabular.to_html_table(df=item['df'], title=item['name'])
# self.__append(html_table)
# self.__append("</html>")
# self.__close()
#
# def reset(self):
# self.__data = []
#
# def __append(self, data):
# self.__output_file.write(data)
#
# def __close(self):
# try:
# if self.__output_file is not None:
# self.__output_file.close()
# except IOError:
# raise IOError('Cannot close output file')
#
# def __open(self, file_path):
# self.__create_dir()
#
# try:
# self.__output_file = open(file_path, "w")
# except IOError:
# raise IOError('Cannot open file with path ' + file_path)
#
# def __generate_df_name(self):
# default_table_name = 'Pemi Errors Table {}'
# return default_table_name.format(len(self.__data) + 1)
#
# def __create_dir(self):
# try:
# if not os.path.exists(self.__file_dir):
# os.mkdir(self.__file_dir)
# except IOError:
# raise IOError('Cannot create directory ' + self.__file_dir)
#
# def __load_css_file(self):
# try:
# css_file = open(self.__CSS_FILE_PATH, "r")
# css_content = css_file.read()
# except IOError:
# raise IOError('Cannot find CSS file with path ' + self.__CSS_FILE_PATH)
#
# return css_content
#
# Path: pemi/pipe.py
# def mock_pipe(parent_pipe, pipe_name):
# pipe = parent_pipe.pipes[pipe_name]
# mocked = MockPipe(name=pipe.name)
# for source in pipe.sources:
# mocked.sources[source] = pipe.sources[source]
# mocked.sources[source].pipe = mocked
#
# for target in pipe.targets:
# mocked.targets[target] = pipe.targets[target]
# mocked.targets[target].pipe = mocked
#
# parent_pipe.pipes[pipe_name] = mocked
. Output only the next line. | errors_tbl = PemiTabular() |
Here is a snippet: <|code_start|>
this = sys.modules[__name__]
this.schemas = {
'beers': pemi.Schema(
id=IntegerField(),
name=StringField(),
abv=DecimalField(precision=3, scale=1),
last_brewed_at=DateField()
),
'beers_w_style': pemi.Schema(
id=IntegerField(),
name=StringField(),
abv=DecimalField(precision=3, scale=1),
style=StringField()
)
}
class RemoteSourcePipe(pemi.Pipe):
def __init__(self, **params):
super().__init__(**params)
self.target(
<|code_end|>
. Write the next line using the current file imports:
import re
import sys
import pandas as pd
import factory
import pemi
import pemi.testing as pt
from collections import OrderedDict
from pemi.data_subject import PdDataSubject
from pemi.fields import *
and context from other files:
# Path: pemi/data_subject.py
# class PdDataSubject(DataSubject):
# def __init__(self, df=None, strict_match_schema=False, **kwargs):
# super().__init__(**kwargs)
#
# if df is None or df.shape == (0, 0):
# df = self._empty_df()
# self.strict_match_schema = strict_match_schema
# self.df = df
#
# def to_pd(self):
# return self.df
#
# def from_pd(self, df, **kwargs):
# self.df = df
#
# def connect_from(self, other):
# if other.df is None or other.df.shape == (0, 0):
# self.df = self._empty_df()
# else:
# self.df = other.df
# self.validate_schema()
#
# def validate_schema(self):
# 'Verify that the dataframe contains all of the columns specified in the schema'
# if self.strict_match_schema:
# return self.validate_data_frame_columns()
# missing = set(self.schema.keys()) - set(self.df.columns)
# if len(missing) == 0:
# return True
# raise MissingFieldsError('DataFrame missing expected fields: {}'.format(missing))
#
# def validate_data_frame_columns(self):
# 'Verify that the schema contains all the columns specefied in the dataframe'
# missing = set(self.df.columns) - set(self.schema.keys())
# if len(missing) > 0:
# raise MissingFieldsError("Schema is missing current columns: {}".format(missing))
# return True
#
# def _empty_df(self):
# return pd.DataFrame(columns=self.schema.keys())
, which may include functions, classes, or code. Output only the next line. | PdDataSubject, |
Next line prediction: <|code_start|>"""
Test for hbridge
"""
class TestHBridge(unittest.TestCase):
"""
Tests to makes sure movements work correctly
"""
def test_initially_stopped(self):
""" Test bot is in stopped state """
<|code_end|>
. Use current file imports:
(import unittest
from minibot.hardware.virtual_gpio.gpio import DigitalOutput, PWM
from minibot.peripherals.hbridge import HBridge)
and context including class names, function names, or small code snippets from other files:
# Path: minibot/hardware/virtual_gpio/gpio.py
# class DigitalOutput():
# """
# Digital output pin.
# """
#
# def __init__(self, pin):
# """
# Constructor.
# Args:
# pin (int): Digital pin number.
# """
# self.pin = pin
# self.state = 0
#
# def set_low(self):
# """
# Set the digital output pin to low.
# """
# self.state = 0
#
# def set_high(self):
# """
# Set the digital output pin to high.
# """
# self.state = 1
#
# class PWM():
# """
# PWM used on a minibot.
# """
# def __init__(self, pin, frequency=1, duty_cycle=0):
# """
# Constructor.
# Args:
# pin (int): Pin that the PWM is connected to on the minibot.
# frequency (int): Frequency of the PWM.
# duty_cycle (int): Duty cycle of the PWM.
# """
# self.pin = pin
# self.frequency = frequency
# self.duty_cycle = duty_cycle
# self.started = False
#
# def set_frequency(self, frequency):
# """
# Sets frequency of the PWM.
# """
# self.frequency = frequency
#
# def set_duty_cycle(self, duty_cycle):
# """
# Sets duty cycle of the PWM.
# """
# self.duty_cycle = duty_cycle
#
# def stop(self):
# """
# Stops the PWM.
# """
# self.started = False
#
# def start(self):
# """
# Starts the PWM.
# """
# self.started = True
#
# Path: minibot/peripherals/hbridge.py
# class HBridge():
# """
# Minibot H-Bridge class.
# """
# def __init__(self, left_pin, left_pwm, right_pin, right_pwm):
# """
# Constructor.
# Args:
# left_pin (:obj:`DigitalOutput`): Left motor direction pin.
# left_pwm (:obj:`PWM`): PWM of the servo.
# right_pin (:obj:`DigitalOutput`): Right motor direction pin.
# right_pwm (:obj:`PWM`): PWM of the servo.
# """
# self.left_pin = left_pin
# self.left_pwm = left_pwm
# self.right_pin = right_pin
# self.right_pwm = right_pwm
#
# self.left_speed = 0
# self.right_speed = 0
#
# left_pwm.set_frequency(100)
# right_pwm.set_frequency(100)
#
# def get_speed(self):
# """
# Returns the (left speed, right speed) tuple
# """
# return (self.left_speed, self.right_speed)
#
# def set_speed(self, left, right):
# """
# Sets the speed of both motors.
# Args:
# left (float): The speed of the left motor (-100 to 100).
# right (float): The speed of the right motor (-100 to 100).
# """
# self.left_speed = max(min(left, 100.0), -100.0)
# self.right_speed = max(min(right, 100.0), -100.0)
# # divide by hundred because PWMs have values between 1 and -1
# # values are negated because of the wiring setup
# left = -self.left_speed/100.0
# right = -self.right_speed/100.0
#
# if left < 0:
# self.left_pin.set_high()
# self.left_pwm.set_duty_cycle(abs(left))
# else:
# self.left_pin.set_low()
# self.left_pwm.set_duty_cycle(1-abs(left))
#
# if right < 0:
# self.right_pin.set_high()
# self.right_pwm.set_duty_cycle(abs(right))
# else:
# self.right_pin.set_low()
# self.right_pwm.set_duty_cycle(1-abs(right))
. Output only the next line. | hbridge = HBridge(DigitalOutput(10), |
Continue the code snippet: <|code_start|>"""
Test for hbridge
"""
class TestHBridge(unittest.TestCase):
"""
Tests to makes sure movements work correctly
"""
def test_initially_stopped(self):
""" Test bot is in stopped state """
hbridge = HBridge(DigitalOutput(10),
<|code_end|>
. Use current file imports:
import unittest
from minibot.hardware.virtual_gpio.gpio import DigitalOutput, PWM
from minibot.peripherals.hbridge import HBridge
and context (classes, functions, or code) from other files:
# Path: minibot/hardware/virtual_gpio/gpio.py
# class DigitalOutput():
# """
# Digital output pin.
# """
#
# def __init__(self, pin):
# """
# Constructor.
# Args:
# pin (int): Digital pin number.
# """
# self.pin = pin
# self.state = 0
#
# def set_low(self):
# """
# Set the digital output pin to low.
# """
# self.state = 0
#
# def set_high(self):
# """
# Set the digital output pin to high.
# """
# self.state = 1
#
# class PWM():
# """
# PWM used on a minibot.
# """
# def __init__(self, pin, frequency=1, duty_cycle=0):
# """
# Constructor.
# Args:
# pin (int): Pin that the PWM is connected to on the minibot.
# frequency (int): Frequency of the PWM.
# duty_cycle (int): Duty cycle of the PWM.
# """
# self.pin = pin
# self.frequency = frequency
# self.duty_cycle = duty_cycle
# self.started = False
#
# def set_frequency(self, frequency):
# """
# Sets frequency of the PWM.
# """
# self.frequency = frequency
#
# def set_duty_cycle(self, duty_cycle):
# """
# Sets duty cycle of the PWM.
# """
# self.duty_cycle = duty_cycle
#
# def stop(self):
# """
# Stops the PWM.
# """
# self.started = False
#
# def start(self):
# """
# Starts the PWM.
# """
# self.started = True
#
# Path: minibot/peripherals/hbridge.py
# class HBridge():
# """
# Minibot H-Bridge class.
# """
# def __init__(self, left_pin, left_pwm, right_pin, right_pwm):
# """
# Constructor.
# Args:
# left_pin (:obj:`DigitalOutput`): Left motor direction pin.
# left_pwm (:obj:`PWM`): PWM of the servo.
# right_pin (:obj:`DigitalOutput`): Right motor direction pin.
# right_pwm (:obj:`PWM`): PWM of the servo.
# """
# self.left_pin = left_pin
# self.left_pwm = left_pwm
# self.right_pin = right_pin
# self.right_pwm = right_pwm
#
# self.left_speed = 0
# self.right_speed = 0
#
# left_pwm.set_frequency(100)
# right_pwm.set_frequency(100)
#
# def get_speed(self):
# """
# Returns the (left speed, right speed) tuple
# """
# return (self.left_speed, self.right_speed)
#
# def set_speed(self, left, right):
# """
# Sets the speed of both motors.
# Args:
# left (float): The speed of the left motor (-100 to 100).
# right (float): The speed of the right motor (-100 to 100).
# """
# self.left_speed = max(min(left, 100.0), -100.0)
# self.right_speed = max(min(right, 100.0), -100.0)
# # divide by hundred because PWMs have values between 1 and -1
# # values are negated because of the wiring setup
# left = -self.left_speed/100.0
# right = -self.right_speed/100.0
#
# if left < 0:
# self.left_pin.set_high()
# self.left_pwm.set_duty_cycle(abs(left))
# else:
# self.left_pin.set_low()
# self.left_pwm.set_duty_cycle(1-abs(left))
#
# if right < 0:
# self.right_pin.set_high()
# self.right_pwm.set_duty_cycle(abs(right))
# else:
# self.right_pin.set_low()
# self.right_pwm.set_duty_cycle(1-abs(right))
. Output only the next line. | PWM(13), |
Here is a snippet: <|code_start|>"""
Test for hbridge
"""
class TestHBridge(unittest.TestCase):
"""
Tests to makes sure movements work correctly
"""
def test_initially_stopped(self):
""" Test bot is in stopped state """
<|code_end|>
. Write the next line using the current file imports:
import unittest
from minibot.hardware.virtual_gpio.gpio import DigitalOutput, PWM
from minibot.peripherals.hbridge import HBridge
and context from other files:
# Path: minibot/hardware/virtual_gpio/gpio.py
# class DigitalOutput():
# """
# Digital output pin.
# """
#
# def __init__(self, pin):
# """
# Constructor.
# Args:
# pin (int): Digital pin number.
# """
# self.pin = pin
# self.state = 0
#
# def set_low(self):
# """
# Set the digital output pin to low.
# """
# self.state = 0
#
# def set_high(self):
# """
# Set the digital output pin to high.
# """
# self.state = 1
#
# class PWM():
# """
# PWM used on a minibot.
# """
# def __init__(self, pin, frequency=1, duty_cycle=0):
# """
# Constructor.
# Args:
# pin (int): Pin that the PWM is connected to on the minibot.
# frequency (int): Frequency of the PWM.
# duty_cycle (int): Duty cycle of the PWM.
# """
# self.pin = pin
# self.frequency = frequency
# self.duty_cycle = duty_cycle
# self.started = False
#
# def set_frequency(self, frequency):
# """
# Sets frequency of the PWM.
# """
# self.frequency = frequency
#
# def set_duty_cycle(self, duty_cycle):
# """
# Sets duty cycle of the PWM.
# """
# self.duty_cycle = duty_cycle
#
# def stop(self):
# """
# Stops the PWM.
# """
# self.started = False
#
# def start(self):
# """
# Starts the PWM.
# """
# self.started = True
#
# Path: minibot/peripherals/hbridge.py
# class HBridge():
# """
# Minibot H-Bridge class.
# """
# def __init__(self, left_pin, left_pwm, right_pin, right_pwm):
# """
# Constructor.
# Args:
# left_pin (:obj:`DigitalOutput`): Left motor direction pin.
# left_pwm (:obj:`PWM`): PWM of the servo.
# right_pin (:obj:`DigitalOutput`): Right motor direction pin.
# right_pwm (:obj:`PWM`): PWM of the servo.
# """
# self.left_pin = left_pin
# self.left_pwm = left_pwm
# self.right_pin = right_pin
# self.right_pwm = right_pwm
#
# self.left_speed = 0
# self.right_speed = 0
#
# left_pwm.set_frequency(100)
# right_pwm.set_frequency(100)
#
# def get_speed(self):
# """
# Returns the (left speed, right speed) tuple
# """
# return (self.left_speed, self.right_speed)
#
# def set_speed(self, left, right):
# """
# Sets the speed of both motors.
# Args:
# left (float): The speed of the left motor (-100 to 100).
# right (float): The speed of the right motor (-100 to 100).
# """
# self.left_speed = max(min(left, 100.0), -100.0)
# self.right_speed = max(min(right, 100.0), -100.0)
# # divide by hundred because PWMs have values between 1 and -1
# # values are negated because of the wiring setup
# left = -self.left_speed/100.0
# right = -self.right_speed/100.0
#
# if left < 0:
# self.left_pin.set_high()
# self.left_pwm.set_duty_cycle(abs(left))
# else:
# self.left_pin.set_low()
# self.left_pwm.set_duty_cycle(1-abs(left))
#
# if right < 0:
# self.right_pin.set_high()
# self.right_pwm.set_duty_cycle(abs(right))
# else:
# self.right_pin.set_low()
# self.right_pwm.set_duty_cycle(1-abs(right))
, which may include functions, classes, or code. Output only the next line. | hbridge = HBridge(DigitalOutput(10), |
Based on the snippet: <|code_start|>"""
Tests for minibot movement.
"""
CONFIG_LOCATION = '/home/pi/cs-minibot/minibot/configs/config.json'
if __name__ == "__main__":
print("Initializing Minibot Software")
config_file = open(CONFIG_LOCATION)
config = json.loads(config_file.read())
<|code_end|>
, predict the immediate next line with the help of imports:
from minibot.bot import Bot
import json
and context (classes, functions, sometimes code) from other files:
# Path: minibot/bot.py
# class Bot():
# """
# Minibot object class.
# Keeps track of the BotState (orientation, location, etc.) of the instance of MiniBot.
# """
# def __init__(self, config):
# """
# Constructor for minibot.
# Args:
# config (dict): Dictionary of config information for bot hardware.
# """
# self.name = config['name']
# self.state = BotState()
# self.sensors = {}
# self.actuators = {}
# self.motors = None
# self._parse_config(config)
#
# # queue for extra unrecognized commands by parser
# self.extraCMD = Queue()
#
# def _parse_config(self, config):
# """
# Parses config dictionary and registers peripherals.
# Args:
# config (dict): Dictionary of config information.
# """
# self.actuators["left"] = config["actuators"][0]
# self.actuators["right"] = config["actuators"][1]
# self.motors = HBridge(DigitalOutput(self.actuators["left"]["pinHighLow"]),
# PWM(self.actuators["left"]["pinPWM"]),
# DigitalOutput(self.actuators["right"]["pinHighLow"]),
# PWM(self.actuators["right"]["pinPWM"]))
# self.stop()
#
# #Starts wheels
# wheelEnabler = DigitalOutput(config["wheelEnablerPin"])
# wheelEnabler.set_high()
#
# for sensor in config["sensors"]:
# name = sensor["name"]
# pin = sensor["pin"]
# self.sensors[name] = ColorSensor(self, name, pin)
#
# def get_state(self):
# """
# Gets the BotState of the minibot.
# Returns:
# BotState of the minibot.
# """
# return self.state
#
# def stop(self):
# """
# Moves the bot forward at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(0,0)
#
# def move_forward(self, power):
# """
# Moves the bot forward at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(power,power)
#
# def move_backward(self, power):
# """
# Moves the bot backward at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(-power,-power)
#
# def turn_clockwise(self, power):
# """
# Moves the bot clockwise at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(power,-power)
#
# def turn_counter_clockwise(self, power):
# """
# Moves the bot counter-clockwise at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(-power,power)
#
# def set_wheel_power(self, left, right):
# """
# Sets the power of the bot's wheels as a percentage from -100 to 100. If a wheel
# specified does not exist, the power for that wheel is ignored.
# :param front_left power to deliver to the front_left wheel
# :param front_right power to deliver to the front_right wheel
# :param back_left power to deliver to the back_left wheel
# :param back_right power to deliver to the back_right wheel
# :return True if the action is supported
# """
# self.motors.set_speed(left,right)
#
# def wait(self, t):
# """
# Waits for a duration in seconds.
# :param t The duration in seconds
# """
# time.sleep(t)
#
# def register_actuator(self,actuator):
# """
# Stores actuator object
# :param actuator object
# """
# self.actuators[actuator.name] = actuator
#
# def register_sensor(self,sensor):
# """
# Stores sensor object
# :param sensor object
# """
# self.sensors[sensor.name] = sensor
#
# def get_actuator_by_name(self, name):
# """
# Returns actuator object
# :param name name of the actuator
# """
# return self.actuators[name]
#
# def get_sensor_by_name(self, name):
# """
# Returns sensor object
# :param name name of the sensor
# """
# return self.sensors[name]
#
# def get_all_actuators(self):
# """
# Returns all actuators in a list
# """
# return self.actuators.values()
#
# def get_all_sensors(self):
# """
# Returns all sensors in a list
# """
# return self.sensors.values()
#
# def get_motors(self):
# """
# Returns motor object
# """
# return self.motors
#
# def poll_sensors(self):
# """
# Reads values from all sensors
# """
# data = {}
# for sensor in self.sensors:
# data[sensor] = self.sensors[sensor].read()
# return data
. Output only the next line. | bot = Bot(config) |
Here is a snippet: <|code_start|># currently not in the required format dictated by cs-minibot/test.py,
# but will do
forward = ("50.0", "50.0", "50.0", "50.0")
stop = ("0.0", "0.0", "0.0", "0.0")
if __name__ == "__main__":
<|code_end|>
. Write the next line using the current file imports:
from basestation.base_station import BaseStation
import time
and context from other files:
# Path: basestation/base_station.py
# class BaseStation(object, metaclass=Singleton):
# """
# Contains logic to manage and unify input and output between bots and vision
# sources. This class is a singleton to prevent accidental BaseStation
# duplication.
# """
# # https://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
#
# def __init__(self):
# """
# Creates singleton base station. Contains a bot and vision manager.
# """
# self.bot_manager = BotManager()
# self.vision_manager = VisionManager()
# return
#
# def get_bot_manager(self):
# """
# Gets the bot manager associated with current BaseStation.
# """
# return self.bot_manager
#
# def get_vision_manager(self):
# """
# Gets the vision manager associated with current BaseStation.
# """
# return self.vision_manager
, which may include functions, classes, or code. Output only the next line. | bs = BaseStation() |
Predict the next line after this snippet: <|code_start|>"""
Unit test for the Virtual Figure 8 Bot.
Ensure the the Figure 8 Bot hits certain checkpoints in its loop.
"""
def dist(x1, y1, x2, y2):
"""
Returns:
float: The Euclidean distance between two points.
"""
return math.sqrt((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1))
class TestVirtual8BotPhysics(unittest.TestCase):
"""
Unit test for the Virtual Figure 8 Bot.
"""
def test_simulation(self):
"""
Test simulation
"""
<|code_end|>
using the current file's imports:
import math
import unittest
from tests.virtual8bot import Virtual8Bot
and any relevant context from other files:
# Path: tests/virtual8bot.py
# class Virtual8Bot(Bot):
# """
# Virtual Figure 8 Bot
# """
#
# figure8 = [
# (1.0, 1.0, 1.0),
# (math.pi * 1.5, 0.0, 2.0),
# (2.0, 1.0, 1.0),
# (math.pi * 1.5, 2.0, 0.0),
# (1.0, 1.0, 1.0),
# ]
#
# def __init__(self):
# Bot.__init__(self, "Virtual 8 Bot")
# self.l = 0
# self.r = 0
# self.t = 0
# self.state.radius = 1.0
#
#
# def set_motor(self, l, r):
# """
# Set left and right motor speeds
# """
# self.l = l
# self.r = r
#
# def run(self, dt):
# """
# Set the motors based on the current time to perform a figure 8 loop.
# """
# self.t += dt
# self.set_motor(0, 0)
# tt = 0
# for lt, l, r in self.figure8:
# tt += lt
# if self.t < tt:
# self.set_motor(l, r)
# break
#
# self.simulate(dt)
#
# def simulate(self, dt):
# """
# Simulate the current position.
# """
# v = 0.5 * (self.r + self.l)
# w = 0.5 / self.state.radius * (self.r - self.l)
# self.state.angle += dt * w
# self.state.x += dt * v * math.cos(self.state.angle)
# self.state.y += dt * v * math.sin(self.state.angle)
. Output only the next line. | bot = Virtual8Bot() |
Next line prediction: <|code_start|>"""
Unit tests for minibot.
"""
class TestBotBasics(unittest.TestCase):
"""
Tests basic functionalities of the minibot, such as movement and bot states.
"""
def test_default_state(self):
"""
Tests the default state, i.e. whether upon creating a minibot, the bot is detected to
begin at the origin point with no angle or radius offset.
"""
<|code_end|>
. Use current file imports:
(import unittest
from minibot.botstate import BotState)
and context including class names, function names, or small code snippets from other files:
# Path: minibot/botstate.py
# class BotState():
# """
# BotState class.
# Keeps track of the current state of the minibot.
# """
# x = 0
# y = 0
# angle = 0
# radius = 0
#
# def __init__(self, x=0, y=0, angle=0, radius=0):
# """
# Constructor for BotState. Assumes bot begins at origin with no orientation offset.
# Note:
# All parameters default to 0.
# Args:
# x (int): X coordinate of the bot.
# y (int): Y coordinate of the bot.
# angle (int): Angle orientation of robot from the horizontal (x-axis).
# radius (int): Radius of minibot.
# """
# self.x = x
# self.y = y
# self.angle = angle
# self.radius = radius
. Output only the next line. | state = BotState() |
Using the snippet: <|code_start|>"""
Script which is automatically run on the MiniBot's Pi upon startup.
Must be configured in /etc/init.d/minibotinit.sh on the RPi.
"""
"""
Loads UserScript file.
Reloads file when it is run from GUI to reflect changes.
"""
US = importlib.import_module('minibot.scripts.UserScript')
CONFIG_LOCATION = '/home/pi/cs-minibot/minibot/configs/config.json'
p = None
def main():
print("Initializing Minibot Software")
p = None
config_file = open(CONFIG_LOCATION)
config = json.loads(config_file.read())
<|code_end|>
, determine the next line of code. You have imports:
from minibot.bot import Bot
from minibot.hardware.communication.TCP import TCP
from threading import Thread
import minibot.hardware.communication.UDP
import json
import time
import importlib
import os
and context (class names, function names, or code) available:
# Path: minibot/bot.py
# class Bot():
# """
# Minibot object class.
# Keeps track of the BotState (orientation, location, etc.) of the instance of MiniBot.
# """
# def __init__(self, config):
# """
# Constructor for minibot.
# Args:
# config (dict): Dictionary of config information for bot hardware.
# """
# self.name = config['name']
# self.state = BotState()
# self.sensors = {}
# self.actuators = {}
# self.motors = None
# self._parse_config(config)
#
# # queue for extra unrecognized commands by parser
# self.extraCMD = Queue()
#
# def _parse_config(self, config):
# """
# Parses config dictionary and registers peripherals.
# Args:
# config (dict): Dictionary of config information.
# """
# self.actuators["left"] = config["actuators"][0]
# self.actuators["right"] = config["actuators"][1]
# self.motors = HBridge(DigitalOutput(self.actuators["left"]["pinHighLow"]),
# PWM(self.actuators["left"]["pinPWM"]),
# DigitalOutput(self.actuators["right"]["pinHighLow"]),
# PWM(self.actuators["right"]["pinPWM"]))
# self.stop()
#
# #Starts wheels
# wheelEnabler = DigitalOutput(config["wheelEnablerPin"])
# wheelEnabler.set_high()
#
# for sensor in config["sensors"]:
# name = sensor["name"]
# pin = sensor["pin"]
# self.sensors[name] = ColorSensor(self, name, pin)
#
# def get_state(self):
# """
# Gets the BotState of the minibot.
# Returns:
# BotState of the minibot.
# """
# return self.state
#
# def stop(self):
# """
# Moves the bot forward at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(0,0)
#
# def move_forward(self, power):
# """
# Moves the bot forward at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(power,power)
#
# def move_backward(self, power):
# """
# Moves the bot backward at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(-power,-power)
#
# def turn_clockwise(self, power):
# """
# Moves the bot clockwise at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(power,-power)
#
# def turn_counter_clockwise(self, power):
# """
# Moves the bot counter-clockwise at a percentage of its full power
# :param power The percentage of the bot's power to use from 0-100
# :return True if the action is supported
# """
# self.motors.set_speed(-power,power)
#
# def set_wheel_power(self, left, right):
# """
# Sets the power of the bot's wheels as a percentage from -100 to 100. If a wheel
# specified does not exist, the power for that wheel is ignored.
# :param front_left power to deliver to the front_left wheel
# :param front_right power to deliver to the front_right wheel
# :param back_left power to deliver to the back_left wheel
# :param back_right power to deliver to the back_right wheel
# :return True if the action is supported
# """
# self.motors.set_speed(left,right)
#
# def wait(self, t):
# """
# Waits for a duration in seconds.
# :param t The duration in seconds
# """
# time.sleep(t)
#
# def register_actuator(self,actuator):
# """
# Stores actuator object
# :param actuator object
# """
# self.actuators[actuator.name] = actuator
#
# def register_sensor(self,sensor):
# """
# Stores sensor object
# :param sensor object
# """
# self.sensors[sensor.name] = sensor
#
# def get_actuator_by_name(self, name):
# """
# Returns actuator object
# :param name name of the actuator
# """
# return self.actuators[name]
#
# def get_sensor_by_name(self, name):
# """
# Returns sensor object
# :param name name of the sensor
# """
# return self.sensors[name]
#
# def get_all_actuators(self):
# """
# Returns all actuators in a list
# """
# return self.actuators.values()
#
# def get_all_sensors(self):
# """
# Returns all sensors in a list
# """
# return self.sensors.values()
#
# def get_motors(self):
# """
# Returns motor object
# """
# return self.motors
#
# def poll_sensors(self):
# """
# Reads values from all sensors
# """
# data = {}
# for sensor in self.sensors:
# data[sensor] = self.sensors[sensor].read()
# return data
. Output only the next line. | bot = Bot(config) |
Given snippet: <|code_start|> RGPIO.setup(pin, RGPIO.IN, pull_up_down=pull_up_down)
def read(self):
"""
Read digital input from the pin.
Return:
int: 0 or 1 for LOW or HIGH voltage.
"""
return RGPIO.input(self.pin)
class DigitalOutput(MDigitalOutput):
"""
Digital output pin.
"""
def __init__(self, pin):
MDigitalOutput.__init__(self, pin)
RGPIO.setup(pin, RGPIO.OUT)
def set_low(self):
"""
Set the digital output pin to low.
"""
RGPIO.output(self.pin, RGPIO.LOW)
def set_high(self):
"""
Set the digital output pin to high.
"""
RGPIO.output(self.pin, RGPIO.HIGH)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from minibot.hardware.gpio import PWM as MPWM
from minibot.hardware.gpio import DigitalInput as MDigitalInput
from minibot.hardware.gpio import DigitalOutput as MDigitalOutput
import RPi.GPIO as RGPIO
and context:
# Path: minibot/hardware/gpio.py
# class PWM():
# """
# PWM used on a minibot.
# """
# def __init__(self, pin, frequency, duty_cycle=0):
# """
# Constructor.
# Args:
# pin (int): Pin that the PWM is connected to on the minibot.
# frequency (int): Frequency of the PWM.
# duty_cycle (int): Duty cycle of the PWM.
# """
# self.pin = pin
# self.frequency = frequency
# self.duty_cycle = duty_cycle
#
# def set_frequency(self, frequency):
# """
# Sets frequency of the PWM.
# """
# self.frequency = frequency
#
# def set_duty_cycle(self, duty_cycle):
# """
# Sets duty cycle of the PWM.
# """
# self.duty_cycle = duty_cycle
#
# def stop(self):
# """
# Stops the PWM.
# """
# raise NotImplementedError("PWM.stop")
#
# Path: minibot/hardware/gpio.py
# class DigitalInput():
# """
# Digital input pin.
# """
# def __init__(self, pin):
# """
# Constructor.
# Args:
# pin (int): Digital pin number.
# """
# self.pin = pin
#
# def read(self):
# """
# Read input from the digital pin.
# Return:
# int: 0 or 1 for LOW or HIGH voltage.
# """
# raise NotImplementedError
#
# Path: minibot/hardware/gpio.py
# class DigitalOutput():
# """
# Digital output pin.
# """
# def __init__(self, pin):
# """
# Constructor.
# Args:
# pin (int): Digital pin number.
# """
# self.pin = pin
#
# def set_low(self):
# """
# Set the digital output pin to low.
# """
# raise NotImplementedError
#
# def set_high(self):
# """
# Set the digital output pin to high.
# """
# raise NotImplementedError
which might include code, classes, or functions. Output only the next line. | class PWM(MPWM): |
Next line prediction: <|code_start|>"""
Minibot GPIO, specific to Raspberry Pi.
"""
# Sets mode of the GPIO to BCM numbering.
RGPIO.setmode(RGPIO.BCM)
class DigitalInput(MDigitalInput):
"""
Digital input pin.
"""
def __init__(self, pin, pull_up_down=None):
"""
Digital input for the RPi.
Args:
pin (int): BCM pin number for the digital input.
pull_up_down (int): Whether to use an internal pull up or pull down resistor.
"""
MDigitalInput.__init__(self, pin)
RGPIO.setup(pin, RGPIO.IN, pull_up_down=pull_up_down)
def read(self):
"""
Read digital input from the pin.
Return:
int: 0 or 1 for LOW or HIGH voltage.
"""
return RGPIO.input(self.pin)
<|code_end|>
. Use current file imports:
(from minibot.hardware.gpio import PWM as MPWM
from minibot.hardware.gpio import DigitalInput as MDigitalInput
from minibot.hardware.gpio import DigitalOutput as MDigitalOutput
import RPi.GPIO as RGPIO)
and context including class names, function names, or small code snippets from other files:
# Path: minibot/hardware/gpio.py
# class PWM():
# """
# PWM used on a minibot.
# """
# def __init__(self, pin, frequency, duty_cycle=0):
# """
# Constructor.
# Args:
# pin (int): Pin that the PWM is connected to on the minibot.
# frequency (int): Frequency of the PWM.
# duty_cycle (int): Duty cycle of the PWM.
# """
# self.pin = pin
# self.frequency = frequency
# self.duty_cycle = duty_cycle
#
# def set_frequency(self, frequency):
# """
# Sets frequency of the PWM.
# """
# self.frequency = frequency
#
# def set_duty_cycle(self, duty_cycle):
# """
# Sets duty cycle of the PWM.
# """
# self.duty_cycle = duty_cycle
#
# def stop(self):
# """
# Stops the PWM.
# """
# raise NotImplementedError("PWM.stop")
#
# Path: minibot/hardware/gpio.py
# class DigitalInput():
# """
# Digital input pin.
# """
# def __init__(self, pin):
# """
# Constructor.
# Args:
# pin (int): Digital pin number.
# """
# self.pin = pin
#
# def read(self):
# """
# Read input from the digital pin.
# Return:
# int: 0 or 1 for LOW or HIGH voltage.
# """
# raise NotImplementedError
#
# Path: minibot/hardware/gpio.py
# class DigitalOutput():
# """
# Digital output pin.
# """
# def __init__(self, pin):
# """
# Constructor.
# Args:
# pin (int): Digital pin number.
# """
# self.pin = pin
#
# def set_low(self):
# """
# Set the digital output pin to low.
# """
# raise NotImplementedError
#
# def set_high(self):
# """
# Set the digital output pin to high.
# """
# raise NotImplementedError
. Output only the next line. | class DigitalOutput(MDigitalOutput): |
Using the snippet: <|code_start|># Minibot imports.
class BaseInterface:
"""
Class which contains the base station and necessary functions for running the
base station GUI.
"""
def __init__(self, port):
"""
Initializes base station
:param port: Port number from which basestation runs.
"""
self.port = port
self.handlers = [
("/", BaseStationHandler),
("/gui", BaseStationHandler),
("/addBot", AddBotHandler),
("/commandBot", CommandBotHandler),
("/discoverBots", DiscoverBotsHandler),
("/getTrackedBots", GetTrackedBotHandler),
("/removeBot", RemoveBotHandler),
("/sendKV", SendKVHandler),
("/vision", VisionHandler),
("/updateloc", VisionHandler),
("/findScripts", FindScriptsHandler),
("/addScenario", AddScenarioHandler)
]
self.settings = {
"static_path": os.path.join(os.path.dirname(__file__), "static")
}
<|code_end|>
, determine the next line of code. You have imports:
import tornado
import tornado.web
import os.path
import json
import logging
import sys
from basestation.base_station import BaseStation
and context (class names, function names, or code) available:
# Path: basestation/base_station.py
# class BaseStation(object, metaclass=Singleton):
# """
# Contains logic to manage and unify input and output between bots and vision
# sources. This class is a singleton to prevent accidental BaseStation
# duplication.
# """
# # https://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
#
# def __init__(self):
# """
# Creates singleton base station. Contains a bot and vision manager.
# """
# self.bot_manager = BotManager()
# self.vision_manager = VisionManager()
# return
#
# def get_bot_manager(self):
# """
# Gets the bot manager associated with current BaseStation.
# """
# return self.bot_manager
#
# def get_vision_manager(self):
# """
# Gets the vision manager associated with current BaseStation.
# """
# return self.vision_manager
. Output only the next line. | self.base_station = BaseStation() |
Based on the snippet: <|code_start|>#coding: utf-8
@csrf_exempt
@login_required
def submit(request):
if request.method == "POST" and request.FILES:
<|code_end|>
, predict the immediate next line with the help of imports:
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseRedirect
from utils import is_client, chemistry_logger
from utils.file_operator import file_upload_response
from chemistry.util import (singletask_details, suitetask_details,
get_models_selector)
from chemistry.models import SuiteTask, SingleTask
and context (classes, functions, sometimes code) from other files:
# Path: utils/file_operator.py
# def file_upload_response(request):
# f = file_upload_save_process(request)
# data = [{'name': f.title,
# 'id': f.fid,
# 'type': f.file_type}]
#
# response = JSONResponse(data, {}, response_minetype(request))
# response["Content-Dispostion"] = "inline; filename=files.json"
# return response
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def get_models_selector(models_str):
# """get models name and color flag"""
# colors = ("label-success", "label-warning", "label-primary",
# "label-info", "label-danger", "label-default")
#
# models_list = models_str.split(MODEL_SPLITS)
# result = []
# for i in range(0, len(models_list)):
# e = {}
# e["color"] = colors[i % len(colors)]
# e["value"] = models_list[i]
# result.append(e)
#
# return result
. Output only the next line. | return file_upload_response(request) |
Given the following code snippet before the placeholder: <|code_start|>@login_required
def history(request):
#TODO: Add pagination
results = SuiteTask.objects.filter(user__user=request.user,
is_hide=False).order_by('-start_time')
#show_all = request.META.get('show_all', '0') == '1'
#if not show_all:
# pass
#results = results.filter(is_hide=True)
for r in results:
r.models_str_list = get_models_selector(r.models_str)
r.models_category_str_list = get_models_selector(r.models_category_str)
r.progress_value = "%0.2f" % (float(r.has_finished_tasks) / r.total_tasks * 100)
r.is_finished = bool(r.total_tasks == r.has_finished_tasks)
return render(request, 'history.html',
dict(history_lists=results))
@login_required
def suitetask(request, sid=None):
return render(request, 'suite_details.html',
suitetask_details(sid))
@login_required
def singletask(request, pid=None):
return render(request, 'task_details.html',
<|code_end|>
, predict the next line using imports from the current file:
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseRedirect
from utils import is_client, chemistry_logger
from utils.file_operator import file_upload_response
from chemistry.util import (singletask_details, suitetask_details,
get_models_selector)
from chemistry.models import SuiteTask, SingleTask
and context including class names, function names, and sometimes code from other files:
# Path: utils/file_operator.py
# def file_upload_response(request):
# f = file_upload_save_process(request)
# data = [{'name': f.title,
# 'id': f.fid,
# 'type': f.file_type}]
#
# response = JSONResponse(data, {}, response_minetype(request))
# response["Content-Dispostion"] = "inline; filename=files.json"
# return response
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def get_models_selector(models_str):
# """get models name and color flag"""
# colors = ("label-success", "label-warning", "label-primary",
# "label-info", "label-danger", "label-default")
#
# models_list = models_str.split(MODEL_SPLITS)
# result = []
# for i in range(0, len(models_list)):
# e = {}
# e["color"] = colors[i % len(colors)]
# e["value"] = models_list[i]
# result.append(e)
#
# return result
. Output only the next line. | singletask_details(pid)) |
Given the following code snippet before the placeholder: <|code_start|>def submit(request):
if request.method == "POST" and request.FILES:
return file_upload_response(request)
return render(request, "newtask.html")
@login_required
def history(request):
#TODO: Add pagination
results = SuiteTask.objects.filter(user__user=request.user,
is_hide=False).order_by('-start_time')
#show_all = request.META.get('show_all', '0') == '1'
#if not show_all:
# pass
#results = results.filter(is_hide=True)
for r in results:
r.models_str_list = get_models_selector(r.models_str)
r.models_category_str_list = get_models_selector(r.models_category_str)
r.progress_value = "%0.2f" % (float(r.has_finished_tasks) / r.total_tasks * 100)
r.is_finished = bool(r.total_tasks == r.has_finished_tasks)
return render(request, 'history.html',
dict(history_lists=results))
@login_required
def suitetask(request, sid=None):
return render(request, 'suite_details.html',
<|code_end|>
, predict the next line using imports from the current file:
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseRedirect
from utils import is_client, chemistry_logger
from utils.file_operator import file_upload_response
from chemistry.util import (singletask_details, suitetask_details,
get_models_selector)
from chemistry.models import SuiteTask, SingleTask
and context including class names, function names, and sometimes code from other files:
# Path: utils/file_operator.py
# def file_upload_response(request):
# f = file_upload_save_process(request)
# data = [{'name': f.title,
# 'id': f.fid,
# 'type': f.file_type}]
#
# response = JSONResponse(data, {}, response_minetype(request))
# response["Content-Dispostion"] = "inline; filename=files.json"
# return response
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def get_models_selector(models_str):
# """get models name and color flag"""
# colors = ("label-success", "label-warning", "label-primary",
# "label-info", "label-danger", "label-default")
#
# models_list = models_str.split(MODEL_SPLITS)
# result = []
# for i in range(0, len(models_list)):
# e = {}
# e["color"] = colors[i % len(colors)]
# e["value"] = models_list[i]
# result.append(e)
#
# return result
. Output only the next line. | suitetask_details(sid)) |
Predict the next line for this snippet: <|code_start|>#coding: utf-8
@csrf_exempt
@login_required
def submit(request):
if request.method == "POST" and request.FILES:
return file_upload_response(request)
return render(request, "newtask.html")
@login_required
def history(request):
#TODO: Add pagination
results = SuiteTask.objects.filter(user__user=request.user,
is_hide=False).order_by('-start_time')
#show_all = request.META.get('show_all', '0') == '1'
#if not show_all:
# pass
#results = results.filter(is_hide=True)
for r in results:
<|code_end|>
with the help of current file imports:
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseRedirect
from utils import is_client, chemistry_logger
from utils.file_operator import file_upload_response
from chemistry.util import (singletask_details, suitetask_details,
get_models_selector)
from chemistry.models import SuiteTask, SingleTask
and context from other files:
# Path: utils/file_operator.py
# def file_upload_response(request):
# f = file_upload_save_process(request)
# data = [{'name': f.title,
# 'id': f.fid,
# 'type': f.file_type}]
#
# response = JSONResponse(data, {}, response_minetype(request))
# response["Content-Dispostion"] = "inline; filename=files.json"
# return response
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def get_models_selector(models_str):
# """get models name and color flag"""
# colors = ("label-success", "label-warning", "label-primary",
# "label-info", "label-danger", "label-default")
#
# models_list = models_str.split(MODEL_SPLITS)
# result = []
# for i in range(0, len(models_list)):
# e = {}
# e["color"] = colors[i % len(colors)]
# e["value"] = models_list[i]
# result.append(e)
#
# return result
, which may contain function names, class names, or code. Output only the next line. | r.models_str_list = get_models_selector(r.models_str) |
Next line prediction: <|code_start|># coding: utf-8
class MopacModel():
def __init__(self, mop_fname_list):
self.mop_fname_list_no_ext = []
for fname in mop_fname_list:
name = fname.split('.')[0]
self.mop_fname_list_no_ext.append(name)
def opt4dragon(self, model_name):
for name in self.mop_fname_list_no_ext:
mol_path = join(CALCULATE_DATA_PATH.DRAGON, model_name, name,
'%s.mol' % name)
mop_path = join(CALCULATE_DATA_PATH.MOPAC, model_name, name,
'%s.mop' % name)
out_path = join(CALCULATE_DATA_PATH.MOPAC, model_name, name,
'%s.out' % name)
<|code_end|>
. Use current file imports:
(from os.path import join
from .config import CALCULATE_CMD_TYPE, CALCULATE_DATA_PATH
from utils import chemistry_logger
from chemistry.calcore.utils import CalcoreCmd)
and context including class names, function names, or small code snippets from other files:
# Path: chemistry/calcore/config.py
# CALCULATE_CMD_TYPE = namedtuple("CALCULATE_CMD_TYPE",
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])('dragon6shell -s ', 'mopac ', 'g09 ')
#
# CALCULATE_DATA_PATH = namedtuple('CALCULATE_DATA_PATH',
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])(DRAGON_PATH, MOPAC_PATH, GAUSSIAN_PATH)
. Output only the next line. | cmd = '%s "%s"' % (CALCULATE_CMD_TYPE.MOPAC, mop_path) |
Given the code snippet: <|code_start|># coding: utf-8
class MopacModel():
def __init__(self, mop_fname_list):
self.mop_fname_list_no_ext = []
for fname in mop_fname_list:
name = fname.split('.')[0]
self.mop_fname_list_no_ext.append(name)
def opt4dragon(self, model_name):
for name in self.mop_fname_list_no_ext:
<|code_end|>
, generate the next line using the imports in this file:
from os.path import join
from .config import CALCULATE_CMD_TYPE, CALCULATE_DATA_PATH
from utils import chemistry_logger
from chemistry.calcore.utils import CalcoreCmd
and context (functions, classes, or occasionally code) from other files:
# Path: chemistry/calcore/config.py
# CALCULATE_CMD_TYPE = namedtuple("CALCULATE_CMD_TYPE",
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])('dragon6shell -s ', 'mopac ', 'g09 ')
#
# CALCULATE_DATA_PATH = namedtuple('CALCULATE_DATA_PATH',
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])(DRAGON_PATH, MOPAC_PATH, GAUSSIAN_PATH)
. Output only the next line. | mol_path = join(CALCULATE_DATA_PATH.DRAGON, model_name, name, |
Given the code snippet: <|code_start|> request = Mock()
request_text = base64.encodestring('test123:1234')
request.META = {"HTTP_AUTHORIZATION": "basic %s" % request_text}
ok_(not basic_auth_api(request))
def tearDown(self):
self.user.delete()
class TaskSubmitTest(TestCase):
smile = "COc1ccccc1NC(=O)CC(=O)C"
files_id_list = ['ff2da209-7fd5-4fe4-a398-ee4c3a2a6d68',
'b56be350-17dd-4bf4-846e-fce7a5a1a299']
mol_data = None
models = [{'model': 'koa', 'temperature': '10'},
{'model': 'pl', 'temperature': '11'}]
task_name = "test_task"
task_notes = "test_notes"
def setUp(self):
self.user = User.objects.create_user('test1', 'test1@t.com', '123')
def tearDown(self):
self.user.delete()
def test_submit(self):
pass
def test_calculate_task(self):
<|code_end|>
, generate the next line using the imports in this file:
import base64
from django.test import TestCase
from django.contrib.auth.models import User
from mock import Mock, patch
from nose.tools import eq_, ok_
from utils import basic_auth_api
from chemistry.util import calculate_tasks
and context (functions, classes, or occasionally code) from other files:
# Path: chemistry/util.py
# def calculate_tasks(files_id_list, smile, mol_data, models):
# num = len(files_id_list)
# num += 1 if smile else 0
# num += 1 if mol_data else 0
#
# #TODO: 计算需要优化
# for fid in files_id_list:
# if not fid:
# continue
#
# #根据id,获取前端页面上传的文件model
# f_record = ProcessedFile.objects.get(fid=fid)
# if f_record.file_type.lower() in ('smi', 'cas'):
# num += cnt_batch_file_task(f_record)
#
# return num * len(models)
. Output only the next line. | num = calculate_tasks(self.files_id_list, self.smile, |
Next line prediction: <|code_start|># coding: utf-8
class GaussianOptimizeModel():
'''Optimize .gjf --> .log --> .mol'''
def __init__(self, gjf_fname_list):
self.gjf_fname_list_no_ext = []
for fname in gjf_fname_list:
self.gjf_fname_list_no_ext.append(fname.split('.')[0])
def gjf4dragon(self, model_name):
for name in self.gjf_fname_list_no_ext:
mol_path = join(CALCULATE_DATA_PATH.DRAGON, model_name, name,
'%s.mol' % name)
gjf_path = join(CALCULATE_DATA_PATH.GAUSSIAN, model_name, name,
'%s.gjf' % name)
log_path = join(CALCULATE_DATA_PATH.GAUSSIAN, model_name, name,
'%s.log' % name)
<|code_end|>
. Use current file imports:
(from os.path import join
from .config import CALCULATE_CMD_TYPE, CALCULATE_DATA_PATH
from utils import chemistry_logger
from chemistry.calcore.utils import CalcoreCmd)
and context including class names, function names, or small code snippets from other files:
# Path: chemistry/calcore/config.py
# CALCULATE_CMD_TYPE = namedtuple("CALCULATE_CMD_TYPE",
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])('dragon6shell -s ', 'mopac ', 'g09 ')
#
# CALCULATE_DATA_PATH = namedtuple('CALCULATE_DATA_PATH',
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])(DRAGON_PATH, MOPAC_PATH, GAUSSIAN_PATH)
. Output only the next line. | cmd = '%s "%s"' % (CALCULATE_CMD_TYPE.GAUSSIAN, gjf_path) |
Based on the snippet: <|code_start|># coding: utf-8
class GaussianOptimizeModel():
'''Optimize .gjf --> .log --> .mol'''
def __init__(self, gjf_fname_list):
self.gjf_fname_list_no_ext = []
for fname in gjf_fname_list:
self.gjf_fname_list_no_ext.append(fname.split('.')[0])
def gjf4dragon(self, model_name):
for name in self.gjf_fname_list_no_ext:
<|code_end|>
, predict the immediate next line with the help of imports:
from os.path import join
from .config import CALCULATE_CMD_TYPE, CALCULATE_DATA_PATH
from utils import chemistry_logger
from chemistry.calcore.utils import CalcoreCmd
and context (classes, functions, sometimes code) from other files:
# Path: chemistry/calcore/config.py
# CALCULATE_CMD_TYPE = namedtuple("CALCULATE_CMD_TYPE",
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])('dragon6shell -s ', 'mopac ', 'g09 ')
#
# CALCULATE_DATA_PATH = namedtuple('CALCULATE_DATA_PATH',
# ['DRAGON', 'MOPAC', 'GAUSSIAN'])(DRAGON_PATH, MOPAC_PATH, GAUSSIAN_PATH)
. Output only the next line. | mol_path = join(CALCULATE_DATA_PATH.DRAGON, model_name, name, |
Predict the next line for this snippet: <|code_start|>#coding: utf-8
urlpatterns = patterns('',
url(r'^newtask/$', views.submit),
url(r'^history/$', views.history),
url(r'^hide/(?P<id>.{36})', views.hide),
url(r'^details/suite/(?P<sid>.{36})$', views.suitetask),
url(r'^details/task/(?P<pid>.{36})$', views.singletask),
url(r'^chemwriter/$', TemplateView.as_view(template_name='chemwriter.html')),
<|code_end|>
with the help of current file imports:
from django.conf.urls import patterns, url
from chemistry import views, api_views
from django.views.generic import TemplateView
and context from other files:
# Path: chemistry/views.py
# def submit(request):
# def history(request):
# def suitetask(request, sid=None):
# def singletask(request, pid=None):
# def hide(request, id):
#
# Path: chemistry/api_views.py
# def smile_search(request):
# def mol_upload(request):
# def task_submit(request):
# def suitetask(request):
# def singletask(request):
# def history(request):
, which may contain function names, class names, or code. Output only the next line. | url(r'^api/smile-search/$', api_views.smile_search), |
Given the code snippet: <|code_start|>@require_POST
@csrf_exempt
def smile_search(request):
if not basic_auth_api(request):
return HttpResponseForbidden()
query = dict(cas=request.POST.get('cas'),
smile=request.POST.get('smile'),
common_name_ch=request.POST.get('common_name_ch'),
common_name_en=request.POST.get('common_name_en'))
# TODO: 开启分页
start = int(request.POST.get('start', 0))
limit = int(request.POST.get('limit', 10))
# TODO: 目前只是使用本地搜索,未来重新开启第三方search API
#results_chemspider = search_cheminfo(query, start, limit)
results = search_cheminfo_local(query, start, limit)
return make_json_response(results)
@require_POST
@csrf_exempt
def mol_upload(request):
if not basic_auth_api(request):
return HttpResponseForbidden()
if request.method == "POST" and request.FILES:
try:
<|code_end|>
, generate the next line using the imports in this file:
import json
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseForbidden
from utils import make_json_response, basic_auth_api
from utils.file_operator import file_upload_save_process
from chemistry.util import (singletask_details, suitetask_details,
submit_calculate_task, search_cheminfo_local)
from chemistry.models import SuiteTask
and context (functions, classes, or occasionally code) from other files:
# Path: utils/file_operator.py
# def file_upload_save_process(request):
# f = request.FILES["file"]
# name, filetype = split_file_name(UploadedFile(f).name)
# obj = ProcessedFile()
# obj.title = name
# obj.file_type = filetype
# obj.file_obj = f
# obj.save()
#
# return obj
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def submit_calculate_task(user, smile=None, draw_mol_data=None,
# task_notes=None, task_name=None,
# files_id_list=None, models=None,
# local_search_id=None):
#
# chemistry_logger.info("smile: %s" % smile)
# chemistry_logger.info("draw_mol_data: %s" % draw_mol_data)
# chemistry_logger.info("files_id_list: %s" % files_id_list)
# chemistry_logger.info("models: %s" % models)
#
# tasks_num = calculate_tasks(files_id_list, smile, draw_mol_data, models)
#
# if tasks_num == 0:
# status = False
# info = "请至少选择一种输入方式和计算模型!"
# id = None
# return (status, info, id)
#
# try:
# s = SuiteTask()
# s.sid = id = str(uuid.uuid4())
# s.user = UserProfile.objects.get(user=user)
# s.total_tasks = tasks_num
# s.has_finished_tasks = 0
# s.start_time = utils.get_real_now()
# s.name = task_name
# s.notes = task_notes
# s.models_str, s.models_category_str = parse_models(models)
# s.status = StatusCategory.objects.get(category=STATUS_WORKING)
# s.email = user.email
# s.save()
#
# chemistry_logger.info('~~~~~~~~ s:%s' % s.start_time)
#
# generate_calculate_task.delay(models, smile, draw_mol_data,
# files_id_list, id, local_search_id)
# except:
# chemistry_logger.exception('failed to generate suite_task')
# s.delete()
# status = False
# info = "计算任务添加不成功,将重试或联系网站管理员!"
# id = None
# else:
# status = True
# info = "恭喜,计算任务已经提交!"
#
# return (status, info, id)
#
# @simple_search_output
# def search_cheminfo_local(query, start=0, limit=10):
# #TODO: 加入中文名称搜索
# #Q(common_name_ch__contains=query['common_name_ch']) |
# q = Q(cas=query['cas'].strip())
# if query['smile']:
# q |= Q(smiles=query['smile'].strip())
# if query['common_name_en']:
# q |= Q(einecs_name__contains=query['common_name_en'].strip())
#
# results = ChemInfoLocal.objects.filter(q)[start:(start + limit)]
# return results
. Output only the next line. | f = file_upload_save_process(request) |
Predict the next line after this snippet: <|code_start|>def suitetask(request):
if not basic_auth_api(request):
return HttpResponseForbidden()
id = request.POST.get('id')
content = suitetask_details(id)
details = content.get('suitetask')
ret = dict(start_time=str(details.start_time),
end_time=str(details.end_time),
total_tasks=details.total_tasks,
has_finished_tasks=details.has_finished_tasks,
name=details.name,
notes=details.notes,
email=details.email,
status=str(details.status),
models=details.models_str,
models_category=details.models_category_str,
result=details.result_pdf.url if details.result_pdf else None,
singletask_lists=[t.pid for t in content.get('single_lists')])
return make_json_response(ret)
@require_POST
@csrf_exempt
def singletask(request):
if not basic_auth_api(request):
return HttpResponseForbidden()
id = request.POST.get('id')
<|code_end|>
using the current file's imports:
import json
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseForbidden
from utils import make_json_response, basic_auth_api
from utils.file_operator import file_upload_save_process
from chemistry.util import (singletask_details, suitetask_details,
submit_calculate_task, search_cheminfo_local)
from chemistry.models import SuiteTask
and any relevant context from other files:
# Path: utils/file_operator.py
# def file_upload_save_process(request):
# f = request.FILES["file"]
# name, filetype = split_file_name(UploadedFile(f).name)
# obj = ProcessedFile()
# obj.title = name
# obj.file_type = filetype
# obj.file_obj = f
# obj.save()
#
# return obj
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def submit_calculate_task(user, smile=None, draw_mol_data=None,
# task_notes=None, task_name=None,
# files_id_list=None, models=None,
# local_search_id=None):
#
# chemistry_logger.info("smile: %s" % smile)
# chemistry_logger.info("draw_mol_data: %s" % draw_mol_data)
# chemistry_logger.info("files_id_list: %s" % files_id_list)
# chemistry_logger.info("models: %s" % models)
#
# tasks_num = calculate_tasks(files_id_list, smile, draw_mol_data, models)
#
# if tasks_num == 0:
# status = False
# info = "请至少选择一种输入方式和计算模型!"
# id = None
# return (status, info, id)
#
# try:
# s = SuiteTask()
# s.sid = id = str(uuid.uuid4())
# s.user = UserProfile.objects.get(user=user)
# s.total_tasks = tasks_num
# s.has_finished_tasks = 0
# s.start_time = utils.get_real_now()
# s.name = task_name
# s.notes = task_notes
# s.models_str, s.models_category_str = parse_models(models)
# s.status = StatusCategory.objects.get(category=STATUS_WORKING)
# s.email = user.email
# s.save()
#
# chemistry_logger.info('~~~~~~~~ s:%s' % s.start_time)
#
# generate_calculate_task.delay(models, smile, draw_mol_data,
# files_id_list, id, local_search_id)
# except:
# chemistry_logger.exception('failed to generate suite_task')
# s.delete()
# status = False
# info = "计算任务添加不成功,将重试或联系网站管理员!"
# id = None
# else:
# status = True
# info = "恭喜,计算任务已经提交!"
#
# return (status, info, id)
#
# @simple_search_output
# def search_cheminfo_local(query, start=0, limit=10):
# #TODO: 加入中文名称搜索
# #Q(common_name_ch__contains=query['common_name_ch']) |
# q = Q(cas=query['cas'].strip())
# if query['smile']:
# q |= Q(smiles=query['smile'].strip())
# if query['common_name_en']:
# q |= Q(einecs_name__contains=query['common_name_en'].strip())
#
# results = ChemInfoLocal.objects.filter(q)[start:(start + limit)]
# return results
. Output only the next line. | details = singletask_details(id).get("singletask") |
Given snippet: <|code_start|> models = json.loads(post.get('models', "[]"))
task_notes = post.get('task_notes')
task_name = post.get('task_name')
local_search_id = int(post.get('local_search_id', 0))
try:
status, info, id = submit_calculate_task(
request.user,
smile=smile,
draw_mol_data=draw_mol_data,
files_id_list=files_id_list,
models=models,
task_notes=task_notes,
task_name=task_name,
local_search_id=local_search_id)
except Exception as err:
status, info, id = False, str(err), None
return make_json_response(dict(status=status,
info=info,
id=id))
@require_POST
@csrf_exempt
def suitetask(request):
if not basic_auth_api(request):
return HttpResponseForbidden()
id = request.POST.get('id')
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseForbidden
from utils import make_json_response, basic_auth_api
from utils.file_operator import file_upload_save_process
from chemistry.util import (singletask_details, suitetask_details,
submit_calculate_task, search_cheminfo_local)
from chemistry.models import SuiteTask
and context:
# Path: utils/file_operator.py
# def file_upload_save_process(request):
# f = request.FILES["file"]
# name, filetype = split_file_name(UploadedFile(f).name)
# obj = ProcessedFile()
# obj.title = name
# obj.file_type = filetype
# obj.file_obj = f
# obj.save()
#
# return obj
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def submit_calculate_task(user, smile=None, draw_mol_data=None,
# task_notes=None, task_name=None,
# files_id_list=None, models=None,
# local_search_id=None):
#
# chemistry_logger.info("smile: %s" % smile)
# chemistry_logger.info("draw_mol_data: %s" % draw_mol_data)
# chemistry_logger.info("files_id_list: %s" % files_id_list)
# chemistry_logger.info("models: %s" % models)
#
# tasks_num = calculate_tasks(files_id_list, smile, draw_mol_data, models)
#
# if tasks_num == 0:
# status = False
# info = "请至少选择一种输入方式和计算模型!"
# id = None
# return (status, info, id)
#
# try:
# s = SuiteTask()
# s.sid = id = str(uuid.uuid4())
# s.user = UserProfile.objects.get(user=user)
# s.total_tasks = tasks_num
# s.has_finished_tasks = 0
# s.start_time = utils.get_real_now()
# s.name = task_name
# s.notes = task_notes
# s.models_str, s.models_category_str = parse_models(models)
# s.status = StatusCategory.objects.get(category=STATUS_WORKING)
# s.email = user.email
# s.save()
#
# chemistry_logger.info('~~~~~~~~ s:%s' % s.start_time)
#
# generate_calculate_task.delay(models, smile, draw_mol_data,
# files_id_list, id, local_search_id)
# except:
# chemistry_logger.exception('failed to generate suite_task')
# s.delete()
# status = False
# info = "计算任务添加不成功,将重试或联系网站管理员!"
# id = None
# else:
# status = True
# info = "恭喜,计算任务已经提交!"
#
# return (status, info, id)
#
# @simple_search_output
# def search_cheminfo_local(query, start=0, limit=10):
# #TODO: 加入中文名称搜索
# #Q(common_name_ch__contains=query['common_name_ch']) |
# q = Q(cas=query['cas'].strip())
# if query['smile']:
# q |= Q(smiles=query['smile'].strip())
# if query['common_name_en']:
# q |= Q(einecs_name__contains=query['common_name_en'].strip())
#
# results = ChemInfoLocal.objects.filter(q)[start:(start + limit)]
# return results
which might include code, classes, or functions. Output only the next line. | content = suitetask_details(id) |
Predict the next line for this snippet: <|code_start|> info=str(err),
uuid=None)
else:
data = dict(status=True,
info="upload file succeed",
uuid=f.fid,
name=f.title)
else:
data = dict(status=False,
uuid=None,
info='post file field is required')
return make_json_response(data)
@require_POST
@csrf_exempt
def task_submit(request):
if not basic_auth_api(request):
return HttpResponseForbidden()
post = request.POST
smile = post.get('smile')
draw_mol_data = post.get('draw_mol_data')
files_id_list = json.loads(post.get('files_id_list', "[]"))
models = json.loads(post.get('models', "[]"))
task_notes = post.get('task_notes')
task_name = post.get('task_name')
local_search_id = int(post.get('local_search_id', 0))
try:
<|code_end|>
with the help of current file imports:
import json
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseForbidden
from utils import make_json_response, basic_auth_api
from utils.file_operator import file_upload_save_process
from chemistry.util import (singletask_details, suitetask_details,
submit_calculate_task, search_cheminfo_local)
from chemistry.models import SuiteTask
and context from other files:
# Path: utils/file_operator.py
# def file_upload_save_process(request):
# f = request.FILES["file"]
# name, filetype = split_file_name(UploadedFile(f).name)
# obj = ProcessedFile()
# obj.title = name
# obj.file_type = filetype
# obj.file_obj = f
# obj.save()
#
# return obj
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def submit_calculate_task(user, smile=None, draw_mol_data=None,
# task_notes=None, task_name=None,
# files_id_list=None, models=None,
# local_search_id=None):
#
# chemistry_logger.info("smile: %s" % smile)
# chemistry_logger.info("draw_mol_data: %s" % draw_mol_data)
# chemistry_logger.info("files_id_list: %s" % files_id_list)
# chemistry_logger.info("models: %s" % models)
#
# tasks_num = calculate_tasks(files_id_list, smile, draw_mol_data, models)
#
# if tasks_num == 0:
# status = False
# info = "请至少选择一种输入方式和计算模型!"
# id = None
# return (status, info, id)
#
# try:
# s = SuiteTask()
# s.sid = id = str(uuid.uuid4())
# s.user = UserProfile.objects.get(user=user)
# s.total_tasks = tasks_num
# s.has_finished_tasks = 0
# s.start_time = utils.get_real_now()
# s.name = task_name
# s.notes = task_notes
# s.models_str, s.models_category_str = parse_models(models)
# s.status = StatusCategory.objects.get(category=STATUS_WORKING)
# s.email = user.email
# s.save()
#
# chemistry_logger.info('~~~~~~~~ s:%s' % s.start_time)
#
# generate_calculate_task.delay(models, smile, draw_mol_data,
# files_id_list, id, local_search_id)
# except:
# chemistry_logger.exception('failed to generate suite_task')
# s.delete()
# status = False
# info = "计算任务添加不成功,将重试或联系网站管理员!"
# id = None
# else:
# status = True
# info = "恭喜,计算任务已经提交!"
#
# return (status, info, id)
#
# @simple_search_output
# def search_cheminfo_local(query, start=0, limit=10):
# #TODO: 加入中文名称搜索
# #Q(common_name_ch__contains=query['common_name_ch']) |
# q = Q(cas=query['cas'].strip())
# if query['smile']:
# q |= Q(smiles=query['smile'].strip())
# if query['common_name_en']:
# q |= Q(einecs_name__contains=query['common_name_en'].strip())
#
# results = ChemInfoLocal.objects.filter(q)[start:(start + limit)]
# return results
, which may contain function names, class names, or code. Output only the next line. | status, info, id = submit_calculate_task( |
Next line prediction: <|code_start|># coding: utf-8
@require_POST
@csrf_exempt
def smile_search(request):
if not basic_auth_api(request):
return HttpResponseForbidden()
query = dict(cas=request.POST.get('cas'),
smile=request.POST.get('smile'),
common_name_ch=request.POST.get('common_name_ch'),
common_name_en=request.POST.get('common_name_en'))
# TODO: 开启分页
start = int(request.POST.get('start', 0))
limit = int(request.POST.get('limit', 10))
# TODO: 目前只是使用本地搜索,未来重新开启第三方search API
#results_chemspider = search_cheminfo(query, start, limit)
<|code_end|>
. Use current file imports:
(import json
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseForbidden
from utils import make_json_response, basic_auth_api
from utils.file_operator import file_upload_save_process
from chemistry.util import (singletask_details, suitetask_details,
submit_calculate_task, search_cheminfo_local)
from chemistry.models import SuiteTask)
and context including class names, function names, or small code snippets from other files:
# Path: utils/file_operator.py
# def file_upload_save_process(request):
# f = request.FILES["file"]
# name, filetype = split_file_name(UploadedFile(f).name)
# obj = ProcessedFile()
# obj.title = name
# obj.file_type = filetype
# obj.file_obj = f
# obj.save()
#
# return obj
#
# Path: chemistry/util.py
# def singletask_details(pid):
# from chemistry.tasks import has_temperature
# single_task = get_object_or_404(SingleTask, pid=pid)
# if not has_temperature(single_task.model.desc):
# single_task.temperature = '--'
# single_task.result_value, single_task.hi, single_task.hx, single_task.degrade = get_singletask_area(single_task.results)
# try:
# local_search_id = single_task.file_obj.local_search_id
# if local_search_id:
# local_search = ChemInfoLocal.objects.get(id=local_search_id)
# else:
# local_search = None
# except Exception:
# chemistry_logger.exception('failed to get cheminfo by local_search_id')
# local_search = None
#
# return dict(singletask=single_task,
# search_engine=local_search)
#
# def suitetask_details(sid):
# from chemistry.tasks import has_temperature
# suitetask = get_object_or_404(SuiteTask, sid=sid)
# single_lists = SingleTask.objects.filter(sid=sid, is_hide=False)
#
# for s in single_lists:
# if not has_temperature(s.model.desc):
# s.temperature = '--'
# s.result_value, s.hi, s.hx, s.degrade = get_singletask_area(s.results)
#
# return dict(suitetask=suitetask,
# single_lists=single_lists)
#
# def submit_calculate_task(user, smile=None, draw_mol_data=None,
# task_notes=None, task_name=None,
# files_id_list=None, models=None,
# local_search_id=None):
#
# chemistry_logger.info("smile: %s" % smile)
# chemistry_logger.info("draw_mol_data: %s" % draw_mol_data)
# chemistry_logger.info("files_id_list: %s" % files_id_list)
# chemistry_logger.info("models: %s" % models)
#
# tasks_num = calculate_tasks(files_id_list, smile, draw_mol_data, models)
#
# if tasks_num == 0:
# status = False
# info = "请至少选择一种输入方式和计算模型!"
# id = None
# return (status, info, id)
#
# try:
# s = SuiteTask()
# s.sid = id = str(uuid.uuid4())
# s.user = UserProfile.objects.get(user=user)
# s.total_tasks = tasks_num
# s.has_finished_tasks = 0
# s.start_time = utils.get_real_now()
# s.name = task_name
# s.notes = task_notes
# s.models_str, s.models_category_str = parse_models(models)
# s.status = StatusCategory.objects.get(category=STATUS_WORKING)
# s.email = user.email
# s.save()
#
# chemistry_logger.info('~~~~~~~~ s:%s' % s.start_time)
#
# generate_calculate_task.delay(models, smile, draw_mol_data,
# files_id_list, id, local_search_id)
# except:
# chemistry_logger.exception('failed to generate suite_task')
# s.delete()
# status = False
# info = "计算任务添加不成功,将重试或联系网站管理员!"
# id = None
# else:
# status = True
# info = "恭喜,计算任务已经提交!"
#
# return (status, info, id)
#
# @simple_search_output
# def search_cheminfo_local(query, start=0, limit=10):
# #TODO: 加入中文名称搜索
# #Q(common_name_ch__contains=query['common_name_ch']) |
# q = Q(cas=query['cas'].strip())
# if query['smile']:
# q |= Q(smiles=query['smile'].strip())
# if query['common_name_en']:
# q |= Q(einecs_name__contains=query['common_name_en'].strip())
#
# results = ChemInfoLocal.objects.filter(q)[start:(start + limit)]
# return results
. Output only the next line. | results = search_cheminfo_local(query, start, limit) |
Given the code snippet: <|code_start|>#!/usr/bin/python
if len(sys.argv) == 1:
print("Usage: %s <url>" % sys.argv)
url = sys.argv[1]
<|code_end|>
, generate the next line using the imports in this file:
import cmd
import subvertpy
import sys
from subvertpy.ra import RemoteAccess
and context (functions, classes, or occasionally code) from other files:
# Path: subvertpy/ra.py
# def RemoteAccess(url, *args, **kwargs):
# """Connect to a remote Subversion server
#
# :param url: URL to connect to
# :return: RemoteAccess object
# """
# if isinstance(url, bytes):
# url = url.decode("utf-8")
# (type, opaque) = splittype(url)
# if type not in url_handlers:
# raise SubversionException("Unknown URL type '%s'" % type, ERR_BAD_URL)
# return url_handlers[type](url, *args, **kwargs)
. Output only the next line. | conn = RemoteAccess(url) |
Given snippet: <|code_start|>"""
python -m sol
now i can have fun
"""
if __name__ == '__main__':
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from .test_gui import main
and context:
# Path: sol/test_gui.py
# def main():
# root = tk.Tk()
# root.title('sol')
#
# if not os.path.exists(C.config_savefile):
# tkmb.showinfo(
# '', 'no config found!\n you need to run initial setup first')
# SetupGui(root)
#
# else:
# testgui = MainGui(root)
#
# testgui.refresh_after_load()
#
# testgui.start()
#
# root.mainloop()
which might include code, classes, or functions. Output only the next line. | main() |
Predict the next line after this snippet: <|code_start|> blocks[name] = [block]
for block_list in blocks.values():
for block in block_list:
block.initialize()
contentlet_specs.extend(itertools.ifilter(lambda x: x not in contentlet_specs, block.contentlet_specs))
contentreference_specs.update(block.contentreference_specs)
return contentlet_specs, contentreference_specs
class LazyContainerFinder(object):
def __init__(self, nodes, extends=False):
self.nodes = nodes
self.initialized = False
self.contentlet_specs = []
self.contentreference_specs = SortedDict()
self.blocks = {}
self.block_super = False
self.extends = extends
def process(self, nodelist):
for node in nodelist:
if self.extends:
if isinstance(node, BlockNode):
self.blocks[node.name] = block = LazyContainerFinder(node.nodelist)
block.initialize()
self.blocks.update(block.blocks)
continue
<|code_end|>
using the current file's imports:
import itertools
from django.template import TextNode, VariableNode, Context
from django.template.loader_tags import BlockNode, ExtendsNode, BlockContext, ConstantIncludeNode
from django.utils.datastructures import SortedDict
from philo.templatetags.containers import ContainerNode
and any relevant context from other files:
# Path: philo/templatetags/containers.py
# class ContainerNode(template.Node):
# def __init__(self, name, references=None, as_var=None):
# self.name = name
# self.as_var = as_var
# self.references = references
#
# def render(self, context):
# container_content = self.get_container_content(context)
#
# if self.as_var:
# context[self.as_var] = container_content
# return ''
#
# return container_content
#
# def get_container_content(self, context):
# try:
# container_context = context.render_context[CONTAINER_CONTEXT_KEY]
# except KeyError:
# try:
# page = context['page']
# except KeyError:
# return settings.TEMPLATE_STRING_IF_INVALID
#
# container_context = ContainerContext(page)
# context.render_context[CONTAINER_CONTEXT_KEY] = container_context
#
# if self.references:
# # Then it's a content reference.
# try:
# contentreference = container_context.get_references()[(self.name, self.references)]
# except KeyError:
# content = ''
# else:
# content = contentreference.content
# else:
# # Otherwise it's a contentlet.
# try:
# contentlet = container_context.get_contentlets()[self.name]
# except KeyError:
# content = ''
# else:
# content = contentlet.content
# return content
. Output only the next line. | if isinstance(node, ContainerNode): |
Predict the next line after this snippet: <|code_start|> return self.compile_instance(self.object_pk.resolve(context))
def get_template(self, context):
if self.template_name is None:
return None
return self.compile_template(self.template_name.resolve(context))
class InstanceEmbedNode(EmbedNode):
def __init__(self, instance, kwargs=None):
self.instance = instance
self.kwargs = kwargs or {}
def get_template(self, context):
return None
def get_instance(self, context):
return self.instance.resolve(context)
def get_content_type(self, context):
instance = self.get_instance(context)
if not instance:
return None
return ContentType.objects.get_for_model(instance)
def get_embedded(self):
return self.template
<|code_end|>
using the current file's imports:
from django import template
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.template.loader_tags import ExtendsNode, BlockContext, BLOCK_CONTEXT_KEY, TextNode, BlockNode
from philo.utils.templates import LOADED_TEMPLATE_ATTR
and any relevant context from other files:
# Path: philo/utils/templates.py
# LOADED_TEMPLATE_ATTR = '_philo_loaded_template'
. Output only the next line. | setattr(ConstantEmbedNode, LOADED_TEMPLATE_ATTR, property(get_embedded)) |
Given snippet: <|code_start|>
@vary_on_headers('Accept')
def node_view(request, path=None, **kwargs):
"""
:func:`node_view` handles incoming requests by checking to make sure that:
- the request has an attached :class:`.Node`.
- the attached :class:`~philo.models.nodes.Node` handles any remaining path beyond its location.
If these conditions are not met, then :func:`node_view` will either raise :exc:`Http404` or, if it seems like the address was mistyped (for example missing a trailing slash), return an :class:`HttpResponseRedirect` to the correct address.
Otherwise, :func:`node_view` will call the :class:`.Node`'s :meth:`~.Node.render_to_response` method, passing ``kwargs`` in as the ``extra_context``.
"""
if "philo.middleware.RequestNodeMiddleware" not in settings.MIDDLEWARE_CLASSES:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.conf import settings
from django.core.urlresolvers import resolve
from django.http import Http404, HttpResponseRedirect
from django.views.decorators.vary import vary_on_headers
from philo.exceptions import MIDDLEWARE_NOT_CONFIGURED
and context:
# Path: philo/exceptions.py
# MIDDLEWARE_NOT_CONFIGURED = ImproperlyConfigured("""Philo requires the RequestNode middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'philo.middleware.RequestNodeMiddleware'.""")
which might include code, classes, or functions. Output only the next line. | raise MIDDLEWARE_NOT_CONFIGURED |
Next line prediction: <|code_start|>
def keys(self):
"""Returns the keys from the cache, first populating the cache if necessary."""
if not self._cache_filled:
self._fill_cache()
return self._cache.keys()
def items(self):
"""Returns the items from the cache, first populating the cache if necessary."""
if not self._cache_filled:
self._fill_cache()
return self._cache.items()
def values(self):
"""Returns the values from the cache, first populating the cache if necessary."""
if not self._cache_filled:
self._fill_cache()
return self._cache.values()
def _fill_cache(self):
if self._cache_filled:
return
attributes = self.get_attributes()
value_lookups = {}
for a in attributes:
value_lookups.setdefault(a.value_content_type_id, []).append(a.value_object_id)
self._attributes_cache[a.key] = a
<|code_end|>
. Use current file imports:
(from functools import partial
from UserDict import DictMixin
from django.db import models
from django.contrib.contenttypes.models import ContentType
from philo.utils.lazycompat import SimpleLazyObject
from philo.models.base import JSONValue, ForeignKeyValue, ManyToManyValue, Attribute
from philo.models.base import Attribute
from philo.models import Attribute
from philo.models import Attribute
from philo.models import Attribute
from philo.models import Attribute)
and context including class names, function names, or small code snippets from other files:
# Path: philo/utils/lazycompat.py
# class SimpleLazyObject(LazyObject):
# """
# A lazy object initialised from any function.
#
# Designed for compound objects of unknown type. For builtins or objects of
# known type, use django.utils.functional.lazy.
# """
# def __init__(self, func):
# """
# Pass in a callable that returns the object to be wrapped.
#
# If copies are made of the resulting SimpleLazyObject, which can happen
# in various circumstances within Django, then you must ensure that the
# callable can be safely run more than once and will return the same
# value.
# """
# self.__dict__['_setupfunc'] = func
# super(SimpleLazyObject, self).__init__()
#
# def _setup(self):
# self._wrapped = self._setupfunc()
#
# __str__ = new_method_proxy(str)
# __unicode__ = new_method_proxy(unicode)
#
# def __deepcopy__(self, memo):
# if self._wrapped is empty:
# # We have to use SimpleLazyObject, not self.__class__, because the
# # latter is proxied.
# result = SimpleLazyObject(self._setupfunc)
# memo[id(self)] = result
# return result
# else:
# import copy
# return copy.deepcopy(self._wrapped, memo)
#
# # Need to pretend to be the wrapped class, for the sake of objects that care
# # about this (especially in equality tests)
# __class__ = property(new_method_proxy(operator.attrgetter("__class__")))
# __eq__ = new_method_proxy(operator.eq)
# __hash__ = new_method_proxy(hash)
# __nonzero__ = new_method_proxy(bool)
. Output only the next line. | values_bulk = dict(((ct_pk, SimpleLazyObject(partial(ContentType.objects.get_for_id(ct_pk).model_class().objects.in_bulk, pks))) for ct_pk, pks in value_lookups.items())) |
Predict the next line after this snippet: <|code_start|>class EmailInput(forms.TextInput):
"""Displays an HTML5 email input on browsers which support it and a normal text input on other browsers."""
input_type = 'email'
class RegistrationForm(UserCreationForm):
"""
Handles user registration. If :mod:`recaptcha_django` is installed on the system and :class:`recaptcha_django.middleware.ReCaptchaMiddleware` is in :setting:`settings.MIDDLEWARE_CLASSES`, then a recaptcha field will automatically be added to the registration form.
.. seealso:: `recaptcha-django <http://code.google.com/p/recaptcha-django/>`_
"""
#: An :class:`EmailField` using the :class:`EmailInput` widget.
email = forms.EmailField(widget=EmailInput)
try:
except ImportError:
pass
else:
if 'recaptcha_django.middleware.ReCaptchaMiddleware' in settings.MIDDLEWARE_CLASSES:
recaptcha = ReCaptchaField()
def clean_username(self):
username = self.cleaned_data['username']
# Trivial case: if the username doesn't exist, go for it!
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return username
<|code_end|>
using the current file's imports:
from datetime import date
from django import forms
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from philo.contrib.waldo.tokens import REGISTRATION_TIMEOUT_DAYS
from recaptcha_django import ReCaptchaField
and any relevant context from other files:
# Path: philo/contrib/waldo/tokens.py
# REGISTRATION_TIMEOUT_DAYS = getattr(settings, 'WALDO_REGISTRATION_TIMEOUT_DAYS', 1)
. Output only the next line. | if not user.is_active and (date.today() - user.date_joined.date()).days > REGISTRATION_TIMEOUT_DAYS and user.last_login == user.date_joined: |
Next line prediction: <|code_start|>
class NodeURLNode(template.Node):
def __init__(self, node, as_var, with_obj=None, view_name=None, args=None, kwargs=None):
self.as_var = as_var
self.view_name = view_name
# Because the following variables have already been compiled as filters if they exist, they don't need to be re-scanned as template variables.
self.node = node
self.with_obj = with_obj
self.args = args
self.kwargs = kwargs
def render(self, context):
if self.node:
node = self.node.resolve(context)
else:
node = context.get('node', None)
if not node:
return settings.TEMPLATE_STRING_IF_INVALID
if self.with_obj is None and self.view_name is None:
url = node.get_absolute_url()
else:
if not node.accepts_subpath:
return settings.TEMPLATE_STRING_IF_INVALID
if self.with_obj is not None:
try:
view_name, args, kwargs = node.view.get_reverse_params(self.with_obj.resolve(context))
<|code_end|>
. Use current file imports:
(from django import template
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaulttags import kwarg_re
from django.utils.encoding import smart_str
from philo.exceptions import ViewCanNotProvideSubpath)
and context including class names, function names, or small code snippets from other files:
# Path: philo/exceptions.py
# class ViewCanNotProvideSubpath(Exception):
# """Raised by :meth:`.View.reverse` when the :class:`.View` can not provide a subpath for the supplied arguments."""
# silent_variable_failure = True
. Output only the next line. | except ViewCanNotProvideSubpath: |
Given the code snippet: <|code_start|>
"""
def __init__(self, allow=None, disallow=None, secure=True):
self.allow = allow
self.disallow = disallow
self.secure = secure
def __call__(self, value):
try:
self.validate_template(value)
except ValidationError:
raise
except Exception, e:
if hasattr(e, 'source') and isinstance(e, TemplateSyntaxError):
origin, (start, end) = e.source
template_source = origin.reload()
upto = 0
for num, next in enumerate(linebreak_iter(template_source)):
if start >= upto and end <= next:
raise ValidationError(mark_safe("Template code invalid: \"%s\" (%s:%d).<br />%s" % (escape(template_source[start:end]), origin.loadname, num, e)))
upto = next
raise ValidationError("Template code invalid. Error was: %s: %s" % (e.__class__.__name__, e))
def validate_template(self, template_string):
# We want to tokenize like normal, then use a custom parser.
lexer = Lexer(template_string, None)
tokens = lexer.tokenize()
parser = TemplateValidationParser(tokens, self.allow, self.disallow, self.secure)
for node in parser.parse():
<|code_end|>
, generate the next line using the imports in this file:
import re
from django.core.exceptions import ValidationError
from django.template import Template, Parser, Lexer, TOKEN_BLOCK, TOKEN_VAR, TemplateSyntaxError
from django.utils import simplejson as json
from django.utils.html import escape, mark_safe
from django.utils.translation import ugettext_lazy as _
from philo.utils.templates import LOADED_TEMPLATE_ATTR
and context (functions, classes, or occasionally code) from other files:
# Path: philo/utils/templates.py
# LOADED_TEMPLATE_ATTR = '_philo_loaded_template'
. Output only the next line. | template = getattr(node, LOADED_TEMPLATE_ATTR, None) |
Given the following code snippet before the placeholder: <|code_start|># This file is part of python-github2, and is made available under the 3-clause
# BSD license. See LICENSE for the full details.
ITEM_FMT = "* %s (%s)"
URL_USER_FMT = "http://github.com/%s"
OPTION_LIST = (
optparse.make_option('-t', '--api-token',
default=None, action="store", dest="api_token", type="str",
help="GitHub API token. Default is to find this from git config"),
optparse.make_option('-u', '--api-user',
default=None, action="store", dest="api_user", type="str",
help="GitHub Username. Default is to find this from git config"),
)
BY_LOWER = lambda value: value.lower()
class FriendOrFollow(object):
# Caching api calls
_followers = None
_following = None
def __init__(self, username=None, api_user=None, api_token=None):
self.api_user = api_user or self.git_config_get("github.user")
self.api_token = api_token or self.git_config_get("github.token")
self.username = username or self.api_user
print("U:(%s) T:(%s) F:(%s)" % (self.api_user, self.api_token,
self.username))
<|code_end|>
, predict the next line using imports from the current file:
import sys
import optparse
from subprocess import Popen, PIPE
from github2.client import Github
and context including class names, function names, and sometimes code from other files:
# Path: github2/client.py
# class Github(object):
#
# """Interface to GitHub's API v2."""
#
# def __init__(self, username=None, api_token=None, requests_per_second=None,
# access_token=None, cache=None, proxy_host=None,
# proxy_port=8080, github_url=None):
# """Setup GitHub API object.
#
# .. versionadded:: 0.2.0
# The ``requests_per_second`` parameter
# .. versionadded:: 0.3.0
# The ``cache`` and ``access_token`` parameters
# .. versionadded:: 0.4.0
# The ``proxy_host`` and ``proxy_port`` parameters
# .. versionadded:: 0.7.0
# The ``github_url`` parameter
#
# :param str username: your own GitHub username.
# :param str api_token: can be found at https://github.com/account
# (while logged in as that user):
# :param str access_token: can be used when no ``username`` and/or
# ``api_token`` is used. The ``access_token`` is the OAuth access
# token that is received after successful OAuth authentication.
# :param float requests_per_second: indicate the API rate limit you're
# operating under (1 per second per GitHub at the moment),
# or None to disable delays. The default is to disable delays (for
# backwards compatibility).
# :param str cache: a directory for caching GitHub responses.
# :param str proxy_host: the hostname for the HTTP proxy, if needed.
# :param str proxy_port: the hostname for the HTTP proxy, if needed (will
# default to 8080 if a proxy_host is set and no port is set).
# :param str github_url: the hostname to connect to, for GitHub
# Enterprise support
#
# """
#
# self.request = GithubRequest(username=username, api_token=api_token,
# requests_per_second=requests_per_second,
# access_token=access_token, cache=cache,
# proxy_host=proxy_host,
# proxy_port=proxy_port,
# github_url=github_url)
# self.issues = Issues(self.request)
# self.users = Users(self.request)
# self.repos = Repositories(self.request)
# self.commits = Commits(self.request)
# self.organizations = Organizations(self.request)
# self.teams = Teams(self.request)
# self.pull_requests = PullRequests(self.request)
#
# def project_for_user_repo(self, user, repo):
# """Return GitHub identifier for a user's repository.
#
# :param str user: repository owner
# :param str repo: repository name
#
# """
# return "/".join([user, repo])
#
# def get_all_blobs(self, project, tree_sha):
# """Get a list of all blobs for a specific tree.
#
# .. versionadded:: 0.3.0
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
#
# """
# blobs = self.request.get("blob/all", project, tree_sha)
# return blobs.get("blobs")
#
# def get_blob_info(self, project, tree_sha, path):
# """Get the blob for a file within a specific tree.
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
# :param str path: path within tree to fetch blob for
#
# """
# blob = self.request.get("blob/show", project, tree_sha, path)
# return blob.get("blob")
#
# def get_tree(self, project, tree_sha):
# """Get tree information for a specific tree.
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
#
# """
# tree = self.request.get("tree/show", project, tree_sha)
# return tree.get("tree", [])
#
# def get_network_meta(self, project):
# """Get GitHub metadata associated with a project.
#
# :param str project: GitHub project
#
# """
# return self.request.raw_request("/".join([self.request.github_url,
# project,
# "network_meta"]), {})
#
# def get_network_data(self, project, nethash, start=None, end=None):
# """Get chunk of GitHub network data.
#
# :param str project: GitHub project
# :param str nethash: identifier provided by :meth:`get_network_meta`
# :param int start: optional start point for data
# :param int stop: optional end point for data
#
# """
# data = {"nethash": nethash}
# if start:
# data["start"] = start
# if end:
# data["end"] = end
#
# return self.request.raw_request("/".join([self.request.github_url,
# project,
# "network_data_chunk"]),
# data)
. Output only the next line. | self.client = Github(self.api_user, self.api_token) |
Next line prediction: <|code_start|> """Http mock side effect that returns saved entries.
Implementation tests should never span network boundaries.
"""
file = os.path.join("tests/data", httplib2.safename(uri))
if os.path.exists(file):
response = message_from_file(open(file))
headers = httplib2.Response(response)
body = bytes(response.get_payload(), charset_from_headers(headers))
return (headers, body)
else:
return (httplib2.Response({"status": "404"}),
"Resource %r unavailable from test data store" % file)
class HttpMockTestCase(unittest.TestCase):
def setUp(self):
"""Prepare test fixtures.
`httplib2.Http` is patched to return cached entries via
:class:`HttpMock`.
:attr:`client` is an unauthenticated :obj:`Github` object for easy use
in tests.
"""
httplib2.Http.request = Mock(spec_set=httplib2.Http.request,
side_effect=request_mock)
<|code_end|>
. Use current file imports:
(import os
import sys
import unittest
import httplib2
from email import message_from_file
from mock import Mock
from github2.client import Github
from github2.request import charset_from_headers)
and context including class names, function names, or small code snippets from other files:
# Path: github2/client.py
# class Github(object):
#
# """Interface to GitHub's API v2."""
#
# def __init__(self, username=None, api_token=None, requests_per_second=None,
# access_token=None, cache=None, proxy_host=None,
# proxy_port=8080, github_url=None):
# """Setup GitHub API object.
#
# .. versionadded:: 0.2.0
# The ``requests_per_second`` parameter
# .. versionadded:: 0.3.0
# The ``cache`` and ``access_token`` parameters
# .. versionadded:: 0.4.0
# The ``proxy_host`` and ``proxy_port`` parameters
# .. versionadded:: 0.7.0
# The ``github_url`` parameter
#
# :param str username: your own GitHub username.
# :param str api_token: can be found at https://github.com/account
# (while logged in as that user):
# :param str access_token: can be used when no ``username`` and/or
# ``api_token`` is used. The ``access_token`` is the OAuth access
# token that is received after successful OAuth authentication.
# :param float requests_per_second: indicate the API rate limit you're
# operating under (1 per second per GitHub at the moment),
# or None to disable delays. The default is to disable delays (for
# backwards compatibility).
# :param str cache: a directory for caching GitHub responses.
# :param str proxy_host: the hostname for the HTTP proxy, if needed.
# :param str proxy_port: the hostname for the HTTP proxy, if needed (will
# default to 8080 if a proxy_host is set and no port is set).
# :param str github_url: the hostname to connect to, for GitHub
# Enterprise support
#
# """
#
# self.request = GithubRequest(username=username, api_token=api_token,
# requests_per_second=requests_per_second,
# access_token=access_token, cache=cache,
# proxy_host=proxy_host,
# proxy_port=proxy_port,
# github_url=github_url)
# self.issues = Issues(self.request)
# self.users = Users(self.request)
# self.repos = Repositories(self.request)
# self.commits = Commits(self.request)
# self.organizations = Organizations(self.request)
# self.teams = Teams(self.request)
# self.pull_requests = PullRequests(self.request)
#
# def project_for_user_repo(self, user, repo):
# """Return GitHub identifier for a user's repository.
#
# :param str user: repository owner
# :param str repo: repository name
#
# """
# return "/".join([user, repo])
#
# def get_all_blobs(self, project, tree_sha):
# """Get a list of all blobs for a specific tree.
#
# .. versionadded:: 0.3.0
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
#
# """
# blobs = self.request.get("blob/all", project, tree_sha)
# return blobs.get("blobs")
#
# def get_blob_info(self, project, tree_sha, path):
# """Get the blob for a file within a specific tree.
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
# :param str path: path within tree to fetch blob for
#
# """
# blob = self.request.get("blob/show", project, tree_sha, path)
# return blob.get("blob")
#
# def get_tree(self, project, tree_sha):
# """Get tree information for a specific tree.
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
#
# """
# tree = self.request.get("tree/show", project, tree_sha)
# return tree.get("tree", [])
#
# def get_network_meta(self, project):
# """Get GitHub metadata associated with a project.
#
# :param str project: GitHub project
#
# """
# return self.request.raw_request("/".join([self.request.github_url,
# project,
# "network_meta"]), {})
#
# def get_network_data(self, project, nethash, start=None, end=None):
# """Get chunk of GitHub network data.
#
# :param str project: GitHub project
# :param str nethash: identifier provided by :meth:`get_network_meta`
# :param int start: optional start point for data
# :param int stop: optional end point for data
#
# """
# data = {"nethash": nethash}
# if start:
# data["start"] = start
# if end:
# data["end"] = end
#
# return self.request.raw_request("/".join([self.request.github_url,
# project,
# "network_data_chunk"]),
# data)
#
# Path: github2/request.py
# def charset_from_headers(headers):
# """Parse charset from headers.
#
# :param httplib2.Response headers: Request headers
# :return: Defined encoding, or default to ASCII
#
# """
# match = re.search("charset=([^ ;]+)", headers.get('content-type', ""))
# if match:
# charset = match.groups()[0]
# else:
# charset = "ascii"
# return charset
. Output only the next line. | self.client = Github() |
Predict the next line for this snippet: <|code_start|># Copyright (C) 2011-2012 James Rowe <jnrowe@gmail.com>
#
# This file is part of python-github2, and is made available under the 3-clause
# BSD license. See LICENSE for the full details.
if sys.version_info[0] == 2:
bytes = lambda x, enc: x
ORIG_REQUEST_METHOD = httplib2.Http.request
def request_mock(uri, method='GET', body=None, headers=None,
redirections=5, connection_type=None):
"""Http mock side effect that returns saved entries.
Implementation tests should never span network boundaries.
"""
file = os.path.join("tests/data", httplib2.safename(uri))
if os.path.exists(file):
response = message_from_file(open(file))
headers = httplib2.Response(response)
<|code_end|>
with the help of current file imports:
import os
import sys
import unittest
import httplib2
from email import message_from_file
from mock import Mock
from github2.client import Github
from github2.request import charset_from_headers
and context from other files:
# Path: github2/client.py
# class Github(object):
#
# """Interface to GitHub's API v2."""
#
# def __init__(self, username=None, api_token=None, requests_per_second=None,
# access_token=None, cache=None, proxy_host=None,
# proxy_port=8080, github_url=None):
# """Setup GitHub API object.
#
# .. versionadded:: 0.2.0
# The ``requests_per_second`` parameter
# .. versionadded:: 0.3.0
# The ``cache`` and ``access_token`` parameters
# .. versionadded:: 0.4.0
# The ``proxy_host`` and ``proxy_port`` parameters
# .. versionadded:: 0.7.0
# The ``github_url`` parameter
#
# :param str username: your own GitHub username.
# :param str api_token: can be found at https://github.com/account
# (while logged in as that user):
# :param str access_token: can be used when no ``username`` and/or
# ``api_token`` is used. The ``access_token`` is the OAuth access
# token that is received after successful OAuth authentication.
# :param float requests_per_second: indicate the API rate limit you're
# operating under (1 per second per GitHub at the moment),
# or None to disable delays. The default is to disable delays (for
# backwards compatibility).
# :param str cache: a directory for caching GitHub responses.
# :param str proxy_host: the hostname for the HTTP proxy, if needed.
# :param str proxy_port: the hostname for the HTTP proxy, if needed (will
# default to 8080 if a proxy_host is set and no port is set).
# :param str github_url: the hostname to connect to, for GitHub
# Enterprise support
#
# """
#
# self.request = GithubRequest(username=username, api_token=api_token,
# requests_per_second=requests_per_second,
# access_token=access_token, cache=cache,
# proxy_host=proxy_host,
# proxy_port=proxy_port,
# github_url=github_url)
# self.issues = Issues(self.request)
# self.users = Users(self.request)
# self.repos = Repositories(self.request)
# self.commits = Commits(self.request)
# self.organizations = Organizations(self.request)
# self.teams = Teams(self.request)
# self.pull_requests = PullRequests(self.request)
#
# def project_for_user_repo(self, user, repo):
# """Return GitHub identifier for a user's repository.
#
# :param str user: repository owner
# :param str repo: repository name
#
# """
# return "/".join([user, repo])
#
# def get_all_blobs(self, project, tree_sha):
# """Get a list of all blobs for a specific tree.
#
# .. versionadded:: 0.3.0
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
#
# """
# blobs = self.request.get("blob/all", project, tree_sha)
# return blobs.get("blobs")
#
# def get_blob_info(self, project, tree_sha, path):
# """Get the blob for a file within a specific tree.
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
# :param str path: path within tree to fetch blob for
#
# """
# blob = self.request.get("blob/show", project, tree_sha, path)
# return blob.get("blob")
#
# def get_tree(self, project, tree_sha):
# """Get tree information for a specific tree.
#
# :param str project: GitHub project
# :param str tree_sha: object ID of tree
#
# """
# tree = self.request.get("tree/show", project, tree_sha)
# return tree.get("tree", [])
#
# def get_network_meta(self, project):
# """Get GitHub metadata associated with a project.
#
# :param str project: GitHub project
#
# """
# return self.request.raw_request("/".join([self.request.github_url,
# project,
# "network_meta"]), {})
#
# def get_network_data(self, project, nethash, start=None, end=None):
# """Get chunk of GitHub network data.
#
# :param str project: GitHub project
# :param str nethash: identifier provided by :meth:`get_network_meta`
# :param int start: optional start point for data
# :param int stop: optional end point for data
#
# """
# data = {"nethash": nethash}
# if start:
# data["start"] = start
# if end:
# data["end"] = end
#
# return self.request.raw_request("/".join([self.request.github_url,
# project,
# "network_data_chunk"]),
# data)
#
# Path: github2/request.py
# def charset_from_headers(headers):
# """Parse charset from headers.
#
# :param httplib2.Response headers: Request headers
# :return: Defined encoding, or default to ASCII
#
# """
# match = re.search("charset=([^ ;]+)", headers.get('content-type', ""))
# if match:
# charset = match.groups()[0]
# else:
# charset = "ascii"
# return charset
, which may contain function names, class names, or code. Output only the next line. | body = bytes(response.get_payload(), charset_from_headers(headers)) |
Based on the snippet: <|code_start|> """
return dt(year, month, day, hour, minute, second, tzinfo=tzutc())
def test_ghdate_to_datetime():
eq_(string_to_datetime('2011/05/22 00:24:15 -0700'),
dt_utz(2011, 5, 22, 7, 24, 15))
eq_(string_to_datetime('2009/04/18 13:04:09 -0700'),
dt_utz(2009, 4, 18, 20, 4, 9))
eq_(string_to_datetime('2009/11/12 21:15:17 -0800'),
dt_utz(2009, 11, 13, 5, 15, 17))
eq_(string_to_datetime('2009/11/12 21:16:20 -0800'),
dt_utz(2009, 11, 13, 5, 16, 20))
eq_(string_to_datetime('2010/04/17 17:24:29 -0700'),
dt_utz(2010, 4, 18, 0, 24, 29))
eq_(string_to_datetime('2010/05/18 06:10:36 -0700'),
dt_utz(2010, 5, 18, 13, 10, 36))
eq_(string_to_datetime('2010/05/25 21:59:37 -0700'),
dt_utz(2010, 5, 26, 4, 59, 37))
eq_(string_to_datetime('2010/05/26 17:08:41 -0700'),
dt_utz(2010, 5, 27, 0, 8, 41))
eq_(string_to_datetime('2010/06/20 06:13:37 -0700'),
dt_utz(2010, 6, 20, 13, 13, 37))
eq_(string_to_datetime('2010/07/28 12:56:51 -0700'),
dt_utz(2010, 7, 28, 19, 56, 51))
eq_(string_to_datetime('2010/09/20 21:32:49 -0700'),
dt_utz(2010, 9, 21, 4, 32, 49))
def test_datetime_to_ghdate():
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime as dt
from dateutil.tz import tzutc
from nose.tools import eq_
from github2 import core
from github2.core import (datetime_to_ghdate, datetime_to_commitdate,
datetime_to_isodate, string_to_datetime)
and context (classes, functions, sometimes code) from other files:
# Path: github2/core.py
# LOGGER = logging.getLogger('github2.core')
# PY3K = sys.version_info[0] == 3
# PY27 = sys.version_info[:2] >= (2, 7)
# GITHUB_DATE_FORMAT = "%Y/%m/%d %H:%M:%S %z"
# COMMIT_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
# GITHUB_TZ = tz.gettz("America/Los_Angeles")
# NAIVE = True
# def string_to_datetime(string):
# def _handle_naive_datetimes(f):
# def wrapper(datetime_):
# def datetime_to_ghdate(datetime_):
# def datetime_to_commitdate(datetime_):
# def datetime_to_isodate(datetime_):
# def requires_auth(f):
# def wrapper(self, *args, **kwargs):
# def enhanced_by_auth(f):
# def __init__(self, request):
# def make_request(self, command, *args, **kwargs):
# def get_value(self, *args, **kwargs):
# def get_values(self, *args, **kwargs):
# def doc_generator(docstring, attributes):
# def bullet(title, text):
# def __init__(self, help):
# def to_python(self, value):
# def __init__(self, *args, **kwargs):
# def to_python(self, value):
# def from_python(self, value):
# def __new__(cls, name, bases, attrs):
# def _contribute_method(name, func):
# def constructor(self, **kwargs):
# def iterate(self):
# def __getitem__(self, key):
# def __setitem__(self, key, value):
# def repr_string(string):
# class AuthError(Exception):
# class GithubCommand(object):
# class Attribute(object):
# class DateAttribute(Attribute):
# class BaseDataType(type):
# class BaseData(BaseDataType('BaseData', (object, ), {})):
#
# Path: github2/core.py
# @_handle_naive_datetimes
# def datetime_to_ghdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# return datetime_.strftime(GITHUB_DATE_FORMAT)
#
# @_handle_naive_datetimes
# def datetime_to_commitdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# date_without_tz = datetime_.strftime(COMMIT_DATE_FORMAT)
# utcoffset = GITHUB_TZ.utcoffset(datetime_)
# hours, minutes = divmod(utcoffset.days * 86400 + utcoffset.seconds, 3600)
#
# return "".join([date_without_tz, "%+03d:%02d" % (hours, minutes)])
#
# def datetime_to_isodate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param str datetime_: datetime object to convert
#
# .. note:: Supports naive and timezone-aware datetimes
# """
# if not datetime_.tzinfo:
# datetime_ = datetime_.replace(tzinfo=tz.tzutc())
# else:
# datetime_ = datetime_.astimezone(tz.tzutc())
# return "%sZ" % datetime_.isoformat()[:-6]
#
# def string_to_datetime(string):
# """Convert a string to Python datetime.
#
# :param str github_date: date string to parse
#
# """
# parsed = parser.parse(string)
# if NAIVE:
# parsed = parsed.replace(tzinfo=None)
# return parsed
. Output only the next line. | eq_(datetime_to_ghdate(dt_utz(2011, 5, 22, 7, 24, 15)), |
Based on the snippet: <|code_start|> eq_(datetime_to_ghdate(dt_utz(2010, 9, 21, 4, 32, 49)),
'2010/09/20 21:32:49 -0700')
def test_commitdate_to_datetime():
eq_(string_to_datetime('2011-05-22T00:24:15-07:00'),
dt_utz(2011, 5, 22, 7, 24, 15))
eq_(string_to_datetime('2011-04-09T10:07:30-07:00'),
dt_utz(2011, 4, 9, 17, 7, 30))
eq_(string_to_datetime('2011-02-19T07:16:11-08:00'),
dt_utz(2011, 2, 19, 15, 16, 11))
eq_(string_to_datetime('2010-12-21T12:34:27-08:00'),
dt_utz(2010, 12, 21, 20, 34, 27))
eq_(string_to_datetime('2011-04-09T10:20:05-07:00'),
dt_utz(2011, 4, 9, 17, 20, 5))
eq_(string_to_datetime('2011-04-09T10:05:58-07:00'),
dt_utz(2011, 4, 9, 17, 5, 58))
eq_(string_to_datetime('2011-04-09T09:53:00-07:00'),
dt_utz(2011, 4, 9, 16, 53, 0))
eq_(string_to_datetime('2011-04-09T10:00:21-07:00'),
dt_utz(2011, 4, 9, 17, 0, 21))
eq_(string_to_datetime('2010-12-16T15:10:59-08:00'),
dt_utz(2010, 12, 16, 23, 10, 59))
eq_(string_to_datetime('2011-04-09T09:53:00-07:00'),
dt_utz(2011, 4, 9, 16, 53, 0))
eq_(string_to_datetime('2011-04-09T09:53:00-07:00'),
dt_utz(2011, 4, 9, 16, 53, 0))
def test_datetime_to_commitdate():
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime as dt
from dateutil.tz import tzutc
from nose.tools import eq_
from github2 import core
from github2.core import (datetime_to_ghdate, datetime_to_commitdate,
datetime_to_isodate, string_to_datetime)
and context (classes, functions, sometimes code) from other files:
# Path: github2/core.py
# LOGGER = logging.getLogger('github2.core')
# PY3K = sys.version_info[0] == 3
# PY27 = sys.version_info[:2] >= (2, 7)
# GITHUB_DATE_FORMAT = "%Y/%m/%d %H:%M:%S %z"
# COMMIT_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
# GITHUB_TZ = tz.gettz("America/Los_Angeles")
# NAIVE = True
# def string_to_datetime(string):
# def _handle_naive_datetimes(f):
# def wrapper(datetime_):
# def datetime_to_ghdate(datetime_):
# def datetime_to_commitdate(datetime_):
# def datetime_to_isodate(datetime_):
# def requires_auth(f):
# def wrapper(self, *args, **kwargs):
# def enhanced_by_auth(f):
# def __init__(self, request):
# def make_request(self, command, *args, **kwargs):
# def get_value(self, *args, **kwargs):
# def get_values(self, *args, **kwargs):
# def doc_generator(docstring, attributes):
# def bullet(title, text):
# def __init__(self, help):
# def to_python(self, value):
# def __init__(self, *args, **kwargs):
# def to_python(self, value):
# def from_python(self, value):
# def __new__(cls, name, bases, attrs):
# def _contribute_method(name, func):
# def constructor(self, **kwargs):
# def iterate(self):
# def __getitem__(self, key):
# def __setitem__(self, key, value):
# def repr_string(string):
# class AuthError(Exception):
# class GithubCommand(object):
# class Attribute(object):
# class DateAttribute(Attribute):
# class BaseDataType(type):
# class BaseData(BaseDataType('BaseData', (object, ), {})):
#
# Path: github2/core.py
# @_handle_naive_datetimes
# def datetime_to_ghdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# return datetime_.strftime(GITHUB_DATE_FORMAT)
#
# @_handle_naive_datetimes
# def datetime_to_commitdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# date_without_tz = datetime_.strftime(COMMIT_DATE_FORMAT)
# utcoffset = GITHUB_TZ.utcoffset(datetime_)
# hours, minutes = divmod(utcoffset.days * 86400 + utcoffset.seconds, 3600)
#
# return "".join([date_without_tz, "%+03d:%02d" % (hours, minutes)])
#
# def datetime_to_isodate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param str datetime_: datetime object to convert
#
# .. note:: Supports naive and timezone-aware datetimes
# """
# if not datetime_.tzinfo:
# datetime_ = datetime_.replace(tzinfo=tz.tzutc())
# else:
# datetime_ = datetime_.astimezone(tz.tzutc())
# return "%sZ" % datetime_.isoformat()[:-6]
#
# def string_to_datetime(string):
# """Convert a string to Python datetime.
#
# :param str github_date: date string to parse
#
# """
# parsed = parser.parse(string)
# if NAIVE:
# parsed = parsed.replace(tzinfo=None)
# return parsed
. Output only the next line. | eq_(datetime_to_commitdate(dt_utz(2011, 5, 22, 7, 24, 15)), |
Given snippet: <|code_start|> eq_(datetime_to_commitdate(dt_utz(2011, 4, 9, 16, 53, 0)),
'2011-04-09T09:53:00-07:00')
def test_isodate_to_datetime():
eq_(string_to_datetime('2011-05-22T00:24:15Z'),
dt_utz(2011, 5, 22, 0, 24, 15))
eq_(string_to_datetime('2011-04-09T10:07:30Z'),
dt_utz(2011, 4, 9, 10, 7, 30))
eq_(string_to_datetime('2011-02-19T07:16:11Z'),
dt_utz(2011, 2, 19, 7, 16, 11))
eq_(string_to_datetime('2010-12-21T12:34:27Z'),
dt_utz(2010, 12, 21, 12, 34, 27))
eq_(string_to_datetime('2011-04-09T10:20:05Z'),
dt_utz(2011, 4, 9, 10, 20, 5))
eq_(string_to_datetime('2011-04-09T10:05:58Z'),
dt_utz(2011, 4, 9, 10, 5, 58))
eq_(string_to_datetime('2011-04-09T09:53:00Z'),
dt_utz(2011, 4, 9, 9, 53, 0))
eq_(string_to_datetime('2011-04-09T10:00:21Z'),
dt_utz(2011, 4, 9, 10, 0, 21))
eq_(string_to_datetime('2010-12-16T15:10:59Z'),
dt_utz(2010, 12, 16, 15, 10, 59))
eq_(string_to_datetime('2011-04-09T09:53:00Z'),
dt_utz(2011, 4, 9, 9, 53, 0))
eq_(string_to_datetime('2011-04-09T09:53:00Z'),
dt_utz(2011, 4, 9, 9, 53, 0))
def test_datetime_to_isodate():
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from datetime import datetime as dt
from dateutil.tz import tzutc
from nose.tools import eq_
from github2 import core
from github2.core import (datetime_to_ghdate, datetime_to_commitdate,
datetime_to_isodate, string_to_datetime)
and context:
# Path: github2/core.py
# LOGGER = logging.getLogger('github2.core')
# PY3K = sys.version_info[0] == 3
# PY27 = sys.version_info[:2] >= (2, 7)
# GITHUB_DATE_FORMAT = "%Y/%m/%d %H:%M:%S %z"
# COMMIT_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
# GITHUB_TZ = tz.gettz("America/Los_Angeles")
# NAIVE = True
# def string_to_datetime(string):
# def _handle_naive_datetimes(f):
# def wrapper(datetime_):
# def datetime_to_ghdate(datetime_):
# def datetime_to_commitdate(datetime_):
# def datetime_to_isodate(datetime_):
# def requires_auth(f):
# def wrapper(self, *args, **kwargs):
# def enhanced_by_auth(f):
# def __init__(self, request):
# def make_request(self, command, *args, **kwargs):
# def get_value(self, *args, **kwargs):
# def get_values(self, *args, **kwargs):
# def doc_generator(docstring, attributes):
# def bullet(title, text):
# def __init__(self, help):
# def to_python(self, value):
# def __init__(self, *args, **kwargs):
# def to_python(self, value):
# def from_python(self, value):
# def __new__(cls, name, bases, attrs):
# def _contribute_method(name, func):
# def constructor(self, **kwargs):
# def iterate(self):
# def __getitem__(self, key):
# def __setitem__(self, key, value):
# def repr_string(string):
# class AuthError(Exception):
# class GithubCommand(object):
# class Attribute(object):
# class DateAttribute(Attribute):
# class BaseDataType(type):
# class BaseData(BaseDataType('BaseData', (object, ), {})):
#
# Path: github2/core.py
# @_handle_naive_datetimes
# def datetime_to_ghdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# return datetime_.strftime(GITHUB_DATE_FORMAT)
#
# @_handle_naive_datetimes
# def datetime_to_commitdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# date_without_tz = datetime_.strftime(COMMIT_DATE_FORMAT)
# utcoffset = GITHUB_TZ.utcoffset(datetime_)
# hours, minutes = divmod(utcoffset.days * 86400 + utcoffset.seconds, 3600)
#
# return "".join([date_without_tz, "%+03d:%02d" % (hours, minutes)])
#
# def datetime_to_isodate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param str datetime_: datetime object to convert
#
# .. note:: Supports naive and timezone-aware datetimes
# """
# if not datetime_.tzinfo:
# datetime_ = datetime_.replace(tzinfo=tz.tzutc())
# else:
# datetime_ = datetime_.astimezone(tz.tzutc())
# return "%sZ" % datetime_.isoformat()[:-6]
#
# def string_to_datetime(string):
# """Convert a string to Python datetime.
#
# :param str github_date: date string to parse
#
# """
# parsed = parser.parse(string)
# if NAIVE:
# parsed = parsed.replace(tzinfo=None)
# return parsed
which might include code, classes, or functions. Output only the next line. | eq_(datetime_to_isodate(dt_utz(2011, 5, 22, 0, 24, 15)), |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
# Copyright (C) 2011-2012 James Rowe <jnrowe@gmail.com>
#
# This file is part of python-github2, and is made available under the 3-clause
# BSD license. See LICENSE for the full details.
def setup_module():
"""Enable timezone-aware datetime handling for this module's tests."""
core.NAIVE = False
def teardown_module():
"""Disable timezone-aware datetime handling when tests have completed."""
core.NAIVE = True
def dt_utz(year, month, day, hour, minute, second):
"""Produce a UTC-anchored datetime object.
:see: :class:`datetime.datetime`
"""
return dt(year, month, day, hour, minute, second, tzinfo=tzutc())
def test_ghdate_to_datetime():
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime as dt
from dateutil.tz import tzutc
from nose.tools import eq_
from github2 import core
from github2.core import (datetime_to_ghdate, datetime_to_commitdate,
datetime_to_isodate, string_to_datetime)
and context (classes, functions, sometimes code) from other files:
# Path: github2/core.py
# LOGGER = logging.getLogger('github2.core')
# PY3K = sys.version_info[0] == 3
# PY27 = sys.version_info[:2] >= (2, 7)
# GITHUB_DATE_FORMAT = "%Y/%m/%d %H:%M:%S %z"
# COMMIT_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
# GITHUB_TZ = tz.gettz("America/Los_Angeles")
# NAIVE = True
# def string_to_datetime(string):
# def _handle_naive_datetimes(f):
# def wrapper(datetime_):
# def datetime_to_ghdate(datetime_):
# def datetime_to_commitdate(datetime_):
# def datetime_to_isodate(datetime_):
# def requires_auth(f):
# def wrapper(self, *args, **kwargs):
# def enhanced_by_auth(f):
# def __init__(self, request):
# def make_request(self, command, *args, **kwargs):
# def get_value(self, *args, **kwargs):
# def get_values(self, *args, **kwargs):
# def doc_generator(docstring, attributes):
# def bullet(title, text):
# def __init__(self, help):
# def to_python(self, value):
# def __init__(self, *args, **kwargs):
# def to_python(self, value):
# def from_python(self, value):
# def __new__(cls, name, bases, attrs):
# def _contribute_method(name, func):
# def constructor(self, **kwargs):
# def iterate(self):
# def __getitem__(self, key):
# def __setitem__(self, key, value):
# def repr_string(string):
# class AuthError(Exception):
# class GithubCommand(object):
# class Attribute(object):
# class DateAttribute(Attribute):
# class BaseDataType(type):
# class BaseData(BaseDataType('BaseData', (object, ), {})):
#
# Path: github2/core.py
# @_handle_naive_datetimes
# def datetime_to_ghdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# return datetime_.strftime(GITHUB_DATE_FORMAT)
#
# @_handle_naive_datetimes
# def datetime_to_commitdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# date_without_tz = datetime_.strftime(COMMIT_DATE_FORMAT)
# utcoffset = GITHUB_TZ.utcoffset(datetime_)
# hours, minutes = divmod(utcoffset.days * 86400 + utcoffset.seconds, 3600)
#
# return "".join([date_without_tz, "%+03d:%02d" % (hours, minutes)])
#
# def datetime_to_isodate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param str datetime_: datetime object to convert
#
# .. note:: Supports naive and timezone-aware datetimes
# """
# if not datetime_.tzinfo:
# datetime_ = datetime_.replace(tzinfo=tz.tzutc())
# else:
# datetime_ = datetime_.astimezone(tz.tzutc())
# return "%sZ" % datetime_.isoformat()[:-6]
#
# def string_to_datetime(string):
# """Convert a string to Python datetime.
#
# :param str github_date: date string to parse
#
# """
# parsed = parser.parse(string)
# if NAIVE:
# parsed = parsed.replace(tzinfo=None)
# return parsed
. Output only the next line. | eq_(string_to_datetime('2011/05/22 00:24:15 -0700'), |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.