Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Given snippet: <|code_start|> parser = argparse.ArgumentParser(description='Analyze trace file simulating the placement of the nodes.')
#parser.add_argument('integers', metavar='N', type=int, nargs='+',
# help='an integer for the accumulator')
parser.add_argument('--trace-file', required=True, action='store',
help='trace file to analyze')
parser.add_argument('--topology', required=True, action='store',
help='topology where the users are located into.')
parser.add_argument('--social-topology', required=True, action='store',
help='connections between users.')
parser.add_argument('--mapping', required=True, action='store',
help='mapping algorithm between social-users and topology nodes.')
parser.add_argument('--central-repository', dest='central_repository', action='store_true')
args = parser.parse_args()
print args
# Load important graphs
topology_graph = getattr(__import__('graphs.%s'%args.topology), args.topology).G
social_graph = getattr(__import__('graphs.%s'%args.social_topology), args.social_topology).G
topology_coords = {}
for node in topology_graph.nodes():
topology_coords[node] = (
random.randint(0, 100),
random.randint(0, 100)
)
#Initialize TopologyManager
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import argparse
import networkx
import random
import re
from topology_manager import TopologyManager
and context:
# Path: topology_manager.py
# class TopologyManager(object):
# def __init__(self, topology, social_graph, topology_nodes_position, enable_mobility = False, topology_file=None):
# assert type(topology_nodes_position) == dict
#
# self.topology = topology
# self.social_graph = social_graph
#
# self.enable_mobility = enable_mobility
#
# #Coordinates for the nodes and the users
# #First: filter nodes without caching capabilities
# tnp = {}
# for n in topology_nodes_position.keys():
# if self.has_request_capabilities(n):
# tnp = topology_nodes_position[n]
# topology_nodes_position = None
#
# self.coords = tnp
# self.coords_user = {}
#
# self.topology_nodes = {}
#
# self.method = None
# self.topology_file = topology_file
# self.initialize_paths()
#
# def initialize_paths(self):
# if os.path.exists('graphs/'+self.topology_file+'.routes'):
# self.paths = pickle.load(open('graphs/'+self.topology_file+'.routes', 'rb'))
# else:
# self.paths = SocialPaths(self.topology)
# pickle.dump(self.paths, open('graphs/'+self.topology_file+'.routes', 'wb'))
#
# def get_path(self, social_src, social_dst):
# return self.paths.calculate_path(self.topology_nodes[social_src], self.topology_nodes[social_dst])
#
# def set_method(self, method):
# #Decide which method to use
# assert method in ['random', 'geographical', 'onepublisher']
# self.method = method
#
# def update_user_position(self, user, position):
# # Assign new coords and then calculate user belonging to the topology_node.
# self.coords_user[user] = position
# if self.enable_mobility:
# #if random.randint(0, 2) == 0:
# self.topology_nodes[user] = self.closest_node(user)
#
# def update_user_node(self, user, node):
# self.topology_nodes[user] = node
# def get_user_node(self, user):
# return self.topology_nodes[user]
#
# def update_all_users_position(self):
# #TODO: add feature that users only can be putted in a node without the wr attribute as true
#
# if self.method == 'random':
# request_nodes = [n for n in self.topology.nodes() if self.has_request_capabilities(n)]
# res = []
# for user in self.social_graph.nodes():
# self.topology_nodes[user] = random.choice(request_nodes)
#
# elif self.method == 'geographical':
# for user in self.social_graph.nodes():
# self.topology_nodes[user] = self.closest_node(user)
#
# elif self.method == 'onepublisher':
# for user in self.social_graph.nodes():
# self.topology_nodes[user] = 1
# self.topology_nodes[0] = 0
# else:
# raise Exception('Not implemented method: %s'%self.method)
#
# def closest_node(self, user):
# x, y = self.coords_user[user]
# d = 100*100 # max distance
# n = -2
# for k,v in self.coords.items():
# x1, y1 = v
#
# # Euclidean distance
# a = numpy.array((x,y))
# b = numpy.array((x1,y1))
# d1 = numpy.linalg.norm(a-b)
#
# if d1 < d:
# d = d1
# n = k
# assert n != -1, "Remember to set original coordinates for CCN nodes. User %s found closest node the -1"%user
# return n
#
# def has_caching_capabilities(self, node):
# n = self.topology.node[node]
# return not (n.has_key('wc') and n['wc'] == True)
# def has_request_capabilities(self, node):
# n = self.topology.node[node]
# return not (n.has_key('wr') and n['wr'] == True)
#
# def __getitem__(self, key):
# return self.get_user_node(key)
# def __setitem__(self, key, value):
# return self.update_user_node(key, value)
which might include code, classes, or functions. Output only the next line. | manager = TopologyManager(topology_graph, social_graph, topology_coords, enable_mobility = False, topology_file=args.topology) |
Based on the snippet: <|code_start|>#
logging.basicConfig(filename='example.log',
level=logging.DEBUG,
format='%(asctime)-15s %(message)s'
)
<|code_end|>
, predict the immediate next line with the help of imports:
from cache_manager import CacheManager
import logging
and context (classes, functions, sometimes code) from other files:
# Path: cache_manager.py
# class CacheManager(object):
# def _init_strategy(self):
# pass
# def __init__(self, cache_policy, cache_size, social_graph, topology, topology_manager, threshold = None):
# self.cache_policy = cache_policy
# res = re.match('((?P<name>[a-zA-Z0-9_]*)(\((?P<params>([0-9]*\.?[0-9]*,? ?)*)\))?)', cache_policy)
# assert res != None
# self.cache_policy = res.group('name')
# try:
# self.cache_policy_params = [float(r) for r in res.group('params').split(',')]
# except AttributeError:
# self.cache_policy_params = []
#
# self.social_graph = social_graph
# self.CACHE_SIZE = cache_size
#
# self.topology_manager = topology_manager
# self.topology = topology
#
# self.stats = Stats()
#
# self.caches = {}
# self.prepare_caches(cache_size)
#
# ## INItIALIZE PRODUCER POLICY
# self._init_strategy()
#
# def prepare_caches(self, cache_size):
# ### REPLACEMENT POLICIES ##############################################
# #if cache_policy in ['subset_influentials', 'subset_noninfluentials']:
# # #recorremos todos los nodos, nos fijamos cuales caen y extramos los amigos del subgrafo social
# # if cache_policy == 'subset_influentials':
# # n_ = [n for n in social_graph.nodes() if pagerank[n] > threshold]
# # elif cache_policy == 'subset_noninfluentials':
# # n_ = [n for n in social_graph.nodes() if pagerank[n] <= threshold]
#
# # nodes_ = {}
# # for n in n_:
# # nodes_[str(n)] = n
#
# # for node in self.topology.nodes():
# # self.caches[node] = SubsetCache(cache_size, subset = nodes_)
#
# #elif cache_policy in ['subset_community2']:
# # partition = externmodules.community.best_partition(self.social_graph)
#
# # for node in topology.nodes():
# # users_in_node = [n for n in social_graph.nodes() if topology_manager[n] == node]
# # for vecino in networkx.neighbors(topology, node):
# # users_in_node += [n for n in social_graph.nodes() if topology_manager[n] == vecino]
#
# # self.caches[node] = CommunitySubsetCache(
# # cache_size,
# # users = users_in_node,
# # node_in_the_topology = node,
# # partition = partition
# # )
#
# #elif cache_policy in ['specialsemantic2']:
# # for node in topology.nodes():
# # self.caches[node] = SemanticCache2(cache_size, [0.5, 20, 100000])
# # self.caches[node] = SemanticCache3(cache_size, [0.5, 20, 100000])
# ### END REPLACEMENT POLICIES ##########################################
# rp = getattr(getattr(__import__('replacement_policies.%s'%self.cache_policy), self.cache_policy), self.cache_policy)
# for node in self.topology.nodes():
# if self.topology_manager.has_caching_capabilities(node):
# self.caches[node] = rp(cache_size)#, self.cache_policy_params)
#
# def post_production(self, content_name, social_publisher):
# self._post_production(content_name, social_publisher)
#
# def _post_production(self, content_name, social_publisher):
# """Function to be overloaded"""
# pass
#
# def __getitem__(self, key):
# return self.caches[key]
# def values(self):
# return self.caches.values()
#
# #TODO: change function name
# def _retrieve_from_caches(self, interest, path):
# self.stats.incr_interest()
# res = self.retrieve_from_caches(interest, path)
# # Move to _retrieve_from_caches
# if res[0]:
# self.stats.incr_w()
# logging.info("Resolved interest(%s) with %s, path(%s): hit request into node %s of the topology"%(self.__class__.__name__, interest, path, path[res[1]]))
# else:
# logging.info("Resolved interest(%s) with %s, path(%s): miss request"%(self.__class__.__name__, interest, path))
#
# self.stats.hops_walked(res[1], len(path)-1)
#
#
# def lookup_cache(self, node, interest):
# """Wrapper to lookup in the caches
# """
# if self.topology_manager.has_caching_capabilities(node):
# res = self.caches[node].lookup(interest)
# if res:
# self.stats.hit()
# else:
# self.stats.miss()
# else:
# res = False
# return res
# def store_cache(self, node, interest):
# if self.topology_manager.has_caching_capabilities(node):
# self.stats.incr_accepted(self.caches[node].store(interest))
#
# def print_caches(self):
# logging.debug('Inspection of the caches')
# for k in self.caches.keys():
# logging.debug("Cache in Node(%s): %s"%(k, [k for k in self.caches[k].keys()]))
#
# #REFACTORING OF THIS!
# def incr_publish(self):
# return self.stats.incr_publish()
# def stats_summary(self):
# self.print_caches()
# return self.stats.summary(self.caches)
. Output only the next line. | class LCE(CacheManager): |
Using the snippet: <|code_start|> dictionary. To fix this a list of all valid keys should be created
and checked here before setting the value. These are the keys used
in the ief file.
"""
headlist = ['Title', 'Path', 'Datafile', 'Results']
if key in headlist:
self.event_header[key] = value
else:
self.event_details[key] = value
def addIedFile(self, ied_path, name=''):
"""Add a new ied file.
Args:
ied_path(str): path to an ied file.
name=''(str): name for the ied file.
"""
if self.ied_data is None:
self.ied_data = []
self.ied_data.append({'name': name, 'file': ied_path})
def addSnapshot(self, snapshot_path, time):
"""Add a new snapshot.
Args:
snapshot_path(str): the path for the snapshot.
time(float): the time to assign to the snapshot.
"""
if self.snapshots is None:
self.snapshots = []
<|code_end|>
, determine the next line of code. You have imports:
import os
import logging
from ship.utils import utilfunctions as uf
from ship.utils import filetools as ft
and context (class names, function names, or code) available:
# Path: ship/utils/utilfunctions.py
# def formatFloat(value, no_of_dps, ignore_empty_str=True):
# def checkFileType(file_path, ext):
# def isNumeric(s):
# def encodeStr(value):
# def isString(value):
# def isList(value):
# def arrayToString(self, str_array):
# def findSubstringInList(substr, the_list):
# def findMax(val1, val2):
# def fileExtensionWithoutPeriod(filepath, name_only=False):
# def findWholeWord(w):
# def convertRunOptionsToSEDict(options):
# def getSEResolvedFilename(filename, se_vals):
# def enum(*sequential, **named):
# def __init__(self):
# def isEmpty(self):
# def enqueue(self, item):
# def dequeue(self):
# def size(self):
# def __init__(self, max_size=-1):
# def isEmpty(self):
# def add(self, item):
# def pop(self):
# def peek(self):
# def size(self):
# class FileQueue(object):
# class LoadStack(object):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
. Output only the next line. | if not uf.isNumeric(time): |
Predict the next line after this snippet: <|code_start|>
return contents
def write(self, filepath=None, overwrite=False):
"""Write the contents of this file to disk.
Writes out to file in the format required for reading by ISIS/FMP.
Note:
If a filepath is not provided and the settings in this objects
PathHolder class have not been updated you will write over the
file that was loaded.
Args:
filepath=None(str): if a filename is provided it the file will be
written to that location. If not, the current settings in this
object path_holder object will be used.
overwrite=False(bool): if the file already exists it will raise
an IOError.
Raises:
IOError - If unable to write to file.
"""
if filepath is None:
filepath = self.path_holder.absolutePath()
if not overwrite and os.path.exists(filepath):
raise IOError('filepath %s already exists. Set overwrite=True to ignore this warning.' % filepath)
contents = self.getPrintableContents()
<|code_end|>
using the current file's imports:
import os
import logging
from ship.utils import utilfunctions as uf
from ship.utils import filetools as ft
and any relevant context from other files:
# Path: ship/utils/utilfunctions.py
# def formatFloat(value, no_of_dps, ignore_empty_str=True):
# def checkFileType(file_path, ext):
# def isNumeric(s):
# def encodeStr(value):
# def isString(value):
# def isList(value):
# def arrayToString(self, str_array):
# def findSubstringInList(substr, the_list):
# def findMax(val1, val2):
# def fileExtensionWithoutPeriod(filepath, name_only=False):
# def findWholeWord(w):
# def convertRunOptionsToSEDict(options):
# def getSEResolvedFilename(filename, se_vals):
# def enum(*sequential, **named):
# def __init__(self):
# def isEmpty(self):
# def enqueue(self, item):
# def dequeue(self):
# def size(self):
# def __init__(self, max_size=-1):
# def isEmpty(self):
# def add(self, item):
# def pop(self):
# def peek(self):
# def size(self):
# class FileQueue(object):
# class LoadStack(object):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
. Output only the next line. | ft.writeFile(contents, filepath) |
Given the code snippet: <|code_start|> Summary:
Example use of the fmp package to update file paths in an .ief file
and save the ief file under a new name.
Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
# Contains functions for updating file paths and reading/writing files
def iefExample():
"""update some key file paths in an ief file.
Updates the .dat file, .tcf file, and results file paths referenced by
the ief file and save it under a new ief file name.
"""
# Load the tuflow model with a tcf file
ief_file = r'C:\path\to\an\isis\ieffile.ief'
<|code_end|>
, generate the next line using the imports in this file:
import os
from ship.utils.fileloaders import fileloader as fl
from ship.utils import filetools
and context (functions, classes, or occasionally code) from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
. Output only the next line. | loader = fl.FileLoader() |
Given snippet: <|code_start|>
from __future__ import unicode_literals
# logging references with a __name__ set to this module.
logger = logging.getLogger(__name__)
def getFile(file_path):
"""Text file reader.
Reads a text file, appending each new line to a list.
Args:
filePath (str): File path for text file to load.
Returns:
List - contents of text file split by new lines.
Raises:
IOError: if problem in reading file.
TypeError: if string not given for file_path
"""
file_contents = []
line = ""
try:
with open(file_path, 'rU') as f:
for line in f:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import logging
from ship.utils import utilfunctions as uf
and context:
# Path: ship/utils/utilfunctions.py
# def formatFloat(value, no_of_dps, ignore_empty_str=True):
# def checkFileType(file_path, ext):
# def isNumeric(s):
# def encodeStr(value):
# def isString(value):
# def isList(value):
# def arrayToString(self, str_array):
# def findSubstringInList(substr, the_list):
# def findMax(val1, val2):
# def fileExtensionWithoutPeriod(filepath, name_only=False):
# def findWholeWord(w):
# def convertRunOptionsToSEDict(options):
# def getSEResolvedFilename(filename, se_vals):
# def enum(*sequential, **named):
# def __init__(self):
# def isEmpty(self):
# def enqueue(self, item):
# def dequeue(self):
# def size(self):
# def __init__(self, max_size=-1):
# def isEmpty(self):
# def add(self, item):
# def pop(self):
# def peek(self):
# def size(self):
# class FileQueue(object):
# class LoadStack(object):
which might include code, classes, or functions. Output only the next line. | file_contents.append(uf.encodeStr(line)) |
Predict the next line for this snippet: <|code_start|> 01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
def main():
utils.assertionErrors = 0
# FMP
print ('*************************************************************')
print ('Running fmp package integration tests...')
print ('*************************************************************')
dl = datload.DatLoadTests().runTests()
print ('\n*************************************************************')
print ('fmp package integration tests complete.')
print ('*************************************************************\n\n')
# TUFLOW
print ('*************************************************************')
print ('Running tuflow package integration tests...')
print ('*************************************************************\n')
<|code_end|>
with the help of current file imports:
from integration_tests import test_tuflowload as tuflowload
from integration_tests import test_tuflowupdate as tuflowupdate
from integration_tests import test_updatetcfmodelfile as tcfmodelfile
from integration_tests import test_datload as datload
from integration_tests.test_tuflowupdate import TestError
from integration_tests import utils
and context from other files:
# Path: integration_tests/test_tuflowload.py
# class TuflowLoadTests(object):
# def runTests(self):
# def loadTuflowModel(self, path):
# def test_nonExistingControlFiles(self):
# def test_writeTuflowModel(self):
# def test_deactiveLogic(self):
# def test_controlFileTypes(self):
# def test_seVals(self):
# def test_files(self):
# def test_variables(self):
# def test_allFilepaths(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# class TuflowUpdateTests(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_changeActiveStatus(self):
# def test_addPartToPartHolder(self):
# def test_removePartFromPartHolder(self):
# def test_addPartToLogicHolder(self):
# def test_removePartFromLogicHolder(self):
#
# Path: integration_tests/test_updatetcfmodelfile.py
# class UpdateTcfModelFile(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_addTcfModelFile(self):
# def test_removeTcfModelFile(self):
# def test_replaceTcfModelFile(self):
#
# Path: integration_tests/test_datload.py
# class DatLoadTests(object):
# def runTests(self):
# def loadDatModel(self, path):
# def test_unitCounts(self):
# def test_icsSetup(self):
# def test_datWrite(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# pass
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
, which may contain function names, class names, or code. Output only the next line. | tt = tuflowload.TuflowLoadTests().runTests() |
Predict the next line after this snippet: <|code_start|>
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
def main():
utils.assertionErrors = 0
# FMP
print ('*************************************************************')
print ('Running fmp package integration tests...')
print ('*************************************************************')
dl = datload.DatLoadTests().runTests()
print ('\n*************************************************************')
print ('fmp package integration tests complete.')
print ('*************************************************************\n\n')
# TUFLOW
print ('*************************************************************')
print ('Running tuflow package integration tests...')
print ('*************************************************************\n')
tt = tuflowload.TuflowLoadTests().runTests()
<|code_end|>
using the current file's imports:
from integration_tests import test_tuflowload as tuflowload
from integration_tests import test_tuflowupdate as tuflowupdate
from integration_tests import test_updatetcfmodelfile as tcfmodelfile
from integration_tests import test_datload as datload
from integration_tests.test_tuflowupdate import TestError
from integration_tests import utils
and any relevant context from other files:
# Path: integration_tests/test_tuflowload.py
# class TuflowLoadTests(object):
# def runTests(self):
# def loadTuflowModel(self, path):
# def test_nonExistingControlFiles(self):
# def test_writeTuflowModel(self):
# def test_deactiveLogic(self):
# def test_controlFileTypes(self):
# def test_seVals(self):
# def test_files(self):
# def test_variables(self):
# def test_allFilepaths(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# class TuflowUpdateTests(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_changeActiveStatus(self):
# def test_addPartToPartHolder(self):
# def test_removePartFromPartHolder(self):
# def test_addPartToLogicHolder(self):
# def test_removePartFromLogicHolder(self):
#
# Path: integration_tests/test_updatetcfmodelfile.py
# class UpdateTcfModelFile(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_addTcfModelFile(self):
# def test_removeTcfModelFile(self):
# def test_replaceTcfModelFile(self):
#
# Path: integration_tests/test_datload.py
# class DatLoadTests(object):
# def runTests(self):
# def loadDatModel(self, path):
# def test_unitCounts(self):
# def test_icsSetup(self):
# def test_datWrite(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# pass
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
. Output only the next line. | tu = tuflowupdate.TuflowUpdateTests().runTests() |
Given the following code snippet before the placeholder: <|code_start|> Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
def main():
utils.assertionErrors = 0
# FMP
print ('*************************************************************')
print ('Running fmp package integration tests...')
print ('*************************************************************')
dl = datload.DatLoadTests().runTests()
print ('\n*************************************************************')
print ('fmp package integration tests complete.')
print ('*************************************************************\n\n')
# TUFLOW
print ('*************************************************************')
print ('Running tuflow package integration tests...')
print ('*************************************************************\n')
tt = tuflowload.TuflowLoadTests().runTests()
tu = tuflowupdate.TuflowUpdateTests().runTests()
<|code_end|>
, predict the next line using imports from the current file:
from integration_tests import test_tuflowload as tuflowload
from integration_tests import test_tuflowupdate as tuflowupdate
from integration_tests import test_updatetcfmodelfile as tcfmodelfile
from integration_tests import test_datload as datload
from integration_tests.test_tuflowupdate import TestError
from integration_tests import utils
and context including class names, function names, and sometimes code from other files:
# Path: integration_tests/test_tuflowload.py
# class TuflowLoadTests(object):
# def runTests(self):
# def loadTuflowModel(self, path):
# def test_nonExistingControlFiles(self):
# def test_writeTuflowModel(self):
# def test_deactiveLogic(self):
# def test_controlFileTypes(self):
# def test_seVals(self):
# def test_files(self):
# def test_variables(self):
# def test_allFilepaths(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# class TuflowUpdateTests(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_changeActiveStatus(self):
# def test_addPartToPartHolder(self):
# def test_removePartFromPartHolder(self):
# def test_addPartToLogicHolder(self):
# def test_removePartFromLogicHolder(self):
#
# Path: integration_tests/test_updatetcfmodelfile.py
# class UpdateTcfModelFile(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_addTcfModelFile(self):
# def test_removeTcfModelFile(self):
# def test_replaceTcfModelFile(self):
#
# Path: integration_tests/test_datload.py
# class DatLoadTests(object):
# def runTests(self):
# def loadDatModel(self, path):
# def test_unitCounts(self):
# def test_icsSetup(self):
# def test_datWrite(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# pass
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
. Output only the next line. | tu = tcfmodelfile.UpdateTcfModelFile().runTests() |
Predict the next line for this snippet: <|code_start|>
Note that one of the tests will attempt to write out the model to a folder
called 'test_output' in the integration_tests directory. This folder is
added to the .gitignore file. If your python path is different in may go
elsewhere as it uses the getcwd() function...so keep an eye on it!
Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
def main():
utils.assertionErrors = 0
# FMP
print ('*************************************************************')
print ('Running fmp package integration tests...')
print ('*************************************************************')
<|code_end|>
with the help of current file imports:
from integration_tests import test_tuflowload as tuflowload
from integration_tests import test_tuflowupdate as tuflowupdate
from integration_tests import test_updatetcfmodelfile as tcfmodelfile
from integration_tests import test_datload as datload
from integration_tests.test_tuflowupdate import TestError
from integration_tests import utils
and context from other files:
# Path: integration_tests/test_tuflowload.py
# class TuflowLoadTests(object):
# def runTests(self):
# def loadTuflowModel(self, path):
# def test_nonExistingControlFiles(self):
# def test_writeTuflowModel(self):
# def test_deactiveLogic(self):
# def test_controlFileTypes(self):
# def test_seVals(self):
# def test_files(self):
# def test_variables(self):
# def test_allFilepaths(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# class TuflowUpdateTests(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_changeActiveStatus(self):
# def test_addPartToPartHolder(self):
# def test_removePartFromPartHolder(self):
# def test_addPartToLogicHolder(self):
# def test_removePartFromLogicHolder(self):
#
# Path: integration_tests/test_updatetcfmodelfile.py
# class UpdateTcfModelFile(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_addTcfModelFile(self):
# def test_removeTcfModelFile(self):
# def test_replaceTcfModelFile(self):
#
# Path: integration_tests/test_datload.py
# class DatLoadTests(object):
# def runTests(self):
# def loadDatModel(self, path):
# def test_unitCounts(self):
# def test_icsSetup(self):
# def test_datWrite(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# pass
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
, which may contain function names, class names, or code. Output only the next line. | dl = datload.DatLoadTests().runTests() |
Using the snippet: <|code_start|>
Summary:
This is the main class for the integration tests.
It calls a suite of tests that work on an actual loaded TuflowModel to
check that it functions properly in the 'real world'.
Note that one of the tests will attempt to write out the model to a folder
called 'test_output' in the integration_tests directory. This folder is
added to the .gitignore file. If your python path is different in may go
elsewhere as it uses the getcwd() function...so keep an eye on it!
Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
def main():
<|code_end|>
, determine the next line of code. You have imports:
from integration_tests import test_tuflowload as tuflowload
from integration_tests import test_tuflowupdate as tuflowupdate
from integration_tests import test_updatetcfmodelfile as tcfmodelfile
from integration_tests import test_datload as datload
from integration_tests.test_tuflowupdate import TestError
from integration_tests import utils
and context (class names, function names, or code) available:
# Path: integration_tests/test_tuflowload.py
# class TuflowLoadTests(object):
# def runTests(self):
# def loadTuflowModel(self, path):
# def test_nonExistingControlFiles(self):
# def test_writeTuflowModel(self):
# def test_deactiveLogic(self):
# def test_controlFileTypes(self):
# def test_seVals(self):
# def test_files(self):
# def test_variables(self):
# def test_allFilepaths(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# class TuflowUpdateTests(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_changeActiveStatus(self):
# def test_addPartToPartHolder(self):
# def test_removePartFromPartHolder(self):
# def test_addPartToLogicHolder(self):
# def test_removePartFromLogicHolder(self):
#
# Path: integration_tests/test_updatetcfmodelfile.py
# class UpdateTcfModelFile(object):
# def runTests(self):
# def loadTuflowModel(self):
# def test_addTcfModelFile(self):
# def test_removeTcfModelFile(self):
# def test_replaceTcfModelFile(self):
#
# Path: integration_tests/test_datload.py
# class DatLoadTests(object):
# def runTests(self):
# def loadDatModel(self, path):
# def test_unitCounts(self):
# def test_icsSetup(self):
# def test_datWrite(self):
#
# Path: integration_tests/test_tuflowupdate.py
# class TestError(ValueError):
# pass
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
. Output only the next line. | utils.assertionErrors = 0 |
Based on the snippet: <|code_start|>from __future__ import unicode_literals
class DatLoadTests(object):
def runTests(self):
cwd = os.getcwd()
path = "integration_tests/test_data/model1/fmp/ship_test_v1-1.DAT"
main_path = os.path.normpath(os.path.join(cwd, path))
self.loadDatModel(main_path)
self.test_unitCounts()
self.test_icsSetup()
self.test_datWrite()
def loadDatModel(self, path):
print ('Loading FMP .dat model...')
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import copy
from ship.utils.fileloaders import fileloader
from ship.utils import filetools as ft
from integration_tests import utils
and context (classes, functions, sometimes code) from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
. Output only the next line. | loader = fileloader.FileLoader() |
Predict the next line for this snippet: <|code_start|> main_path = os.path.normpath(os.path.join(cwd, path))
self.loadDatModel(main_path)
self.test_unitCounts()
self.test_icsSetup()
self.test_datWrite()
def loadDatModel(self, path):
print ('Loading FMP .dat model...')
loader = fileloader.FileLoader()
self.dat = loader.loadFile(path)
print ('FMP model load complete.')
def test_unitCounts(self):
print ('Test unit counts...')
assert(len(self.dat.units) == 18)
headers = self.dat.unitsByType('header')
comments = self.dat.unitsByType('comment')
refhs = self.dat.unitsByType('refh')
rivers = self.dat.unitsByType('river')
bridges = self.dat.unitsByCategory('bridge')
junctions = self.dat.unitsByType('junction')
spills = self.dat.unitsByType('spill')
htbdys = self.dat.unitsByType('htbdy')
unknowns = self.dat.unitsByType('unknown')
ics = self.dat.unitsByType('initial_conditions')
gis = self.dat.unitsByType('gis_info')
<|code_end|>
with the help of current file imports:
import os
import copy
from ship.utils.fileloaders import fileloader
from ship.utils import filetools as ft
from integration_tests import utils
and context from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
, which may contain function names, class names, or code. Output only the next line. | utils.softAssertion(len(headers), 1) |
Given the code snippet: <|code_start|> return False
elif data in b'? ':
return None
else:
# Todo: return something? (But that would be misleading!)
message = 'Illegal value for logical field: {!r}'
raise ValueError(message.format(data))
def _parse_memo_index(self, data):
if len(data) == 4:
return struct.unpack('<I', data)[0]
else:
try:
return int(data)
except ValueError:
if data.strip(b' \x00') == b'':
return 0
else:
raise ValueError(
'Memo index is not an integer: {!r}'.format(data))
def parseM(self, field, data):
"""Parse memo field (M, G, B or P)
Returns memo index (an integer), which can be used to look up
the corresponding memo in the memo file.
"""
memo = self.get_memo(self._parse_memo_index(data))
# Visual FoxPro allows binary data in memo fields.
# These should not be decoded as string.
<|code_end|>
, generate the next line using the imports in this file:
import sys
import datetime
import struct
from decimal import Decimal
from .memo import BinaryMemo
and context (functions, classes, or occasionally code) from other files:
# Path: ship/utils/dbfread/memo.py
# class BinaryMemo(VFPMemo):
# pass
. Output only the next line. | if isinstance(memo, BinaryMemo): |
Using the snippet: <|code_start|>
class TuflowLoadTests(object):
def runTests(self):
cwd = os.getcwd()
path1 = "integration_tests/test_data/model1/tuflow/runs/test_run1.tcf"
path2 = "integration_tests/test_data/model1/tuflow/runs/test_run_noexist.tcf"
main_path = os.path.normpath(os.path.join(cwd, path1))
missing_path = os.path.normpath(os.path.join(cwd, path2))
self.loadTuflowModel(missing_path)
self.test_nonExistingControlFiles()
del self.tuflow
self.loadTuflowModel(main_path)
utils.softAssertion(self.tuflow.missing_model_files, [])
# self.test_deactiveLogic()
self.test_writeTuflowModel()
self.test_controlFileTypes()
self.test_allFilepaths()
self.test_variables()
self.test_files()
self.test_seVals()
def loadTuflowModel(self, path):
print ('Loading tuflow model...')
<|code_end|>
, determine the next line of code. You have imports:
import os
import copy
from ship.utils.fileloaders import fileloader
from ship.utils import filetools as ft
from integration_tests import utils
and context (class names, function names, or code) available:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
. Output only the next line. | loader = fileloader.FileLoader() |
Next line prediction: <|code_start|> os.path.join(cwd, 'asread/model1'),
os.path.join(cwd, 'asread/model1/tuflow'),
os.path.join(cwd, 'asread/model1/tuflow/runs'),
os.path.join(cwd, 'asread/model1/tuflow/model')
]
try:
for n in need_dirs:
if not os.path.isdir(n):
os.mkdir(n)
except IOError:
print ('\t Could not make test directeries - aborting test')
print ('\nFail!\n')
tuflow = copy.deepcopy(self.tuflow)
new_root = os.path.normpath(need_dirs[4]) # ending 'runs'
root_compare = os.path.normpath(need_dirs[3]) # ending 'tuflow'
tuflow.root = new_root
contents = {}
for ckey, cval in tuflow.control_files.items():
if not ckey in contents.keys():
contents[ckey] = {}
temp = cval.getPrintableContents()
for tkey, tval in temp.items():
# print ('root compare: ' + root_compare)
# print ('tkey: ' + tkey)
utils.softAssertionIn(root_compare, tkey)
contents[ckey][tkey] = tval
for ctype, c in contents.items():
for pkey, val in c.items():
<|code_end|>
. Use current file imports:
(import os
import copy
from ship.utils.fileloaders import fileloader
from ship.utils import filetools as ft
from integration_tests import utils)
and context including class names, function names, or small code snippets from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
. Output only the next line. | ft.writeFile(val, pkey) |
Given the code snippet: <|code_start|>from __future__ import unicode_literals
class TuflowLoadTests(object):
def runTests(self):
cwd = os.getcwd()
path1 = "integration_tests/test_data/model1/tuflow/runs/test_run1.tcf"
path2 = "integration_tests/test_data/model1/tuflow/runs/test_run_noexist.tcf"
main_path = os.path.normpath(os.path.join(cwd, path1))
missing_path = os.path.normpath(os.path.join(cwd, path2))
self.loadTuflowModel(missing_path)
self.test_nonExistingControlFiles()
del self.tuflow
self.loadTuflowModel(main_path)
<|code_end|>
, generate the next line using the imports in this file:
import os
import copy
from ship.utils.fileloaders import fileloader
from ship.utils import filetools as ft
from integration_tests import utils
and context (functions, classes, or occasionally code) from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/utils/filetools.py
# def getFile(file_path):
# def writeFile(contents, file_path, add_newline=True):
# def finalFolder(path):
# def setFinalFolder(path, folder_name):
# def getFileName(in_path, with_extension=False):
# def directory(in_path):
# def __init__(self, path, root=None):
# def _setupVars(self, path):
# def finalFolder(self):
# def setFinalFolder(self, folder_name):
# def absolutePath(self, filename=None, relative_roots=[], normalize=True):
# def directory(self):
# def relativePath(self, with_extension=True, filename=None):
# def filenameAndExtension(self):
# def setAbsolutePath(self, absolute_path, keep_relative_root=False):
# def setFilename(self, filename, has_extension=False, keep_extension=False):
# def pathExists(self, ext=None):
# class PathHolder(object):
# ABSOLUTE, RELATIVE, DIRECTORY, NAME, EXTENSION, NAME_AND_EXTENSION = range(6)
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
. Output only the next line. | utils.softAssertion(self.tuflow.missing_model_files, []) |
Given the following code snippet before the placeholder: <|code_start|> return data[:end_of_memo]
# Alternative end of memo markers:
# \x1a\x1a
# \x0d\x0a
return data[:eom]
class DB4MemoFile(MemoFile):
"""dBase IV memo file"""
def __getitem__(self, index):
if index <= 0:
return None
# Todo: read this from the file header.
block_size = 512
self._seek(index * block_size)
memo_header = DB4MemoHeader.read(self.file)
data = self._read(memo_header.length)
# Todo: fields are terminated in different ways.
# \x1a is one of them
# \x1f seems to be another (dbase_8b.dbt)
return data.split(b'\x1f', 1)[0]
def find_memofile(dbf_filename):
for ext in ['.fpt', '.dbt']:
<|code_end|>
, predict the next line using imports from the current file:
from collections import namedtuple
from .ifiles import ifind
from .struct_parser import StructParser
and context including class names, function names, and sometimes code from other files:
# Path: ship/utils/dbfread/ifiles.py
# def ifind(pat, ext=None):
# """Look for a file in a case insensitive way.
#
# Returns filename it a matching file was found, or None if it was not.
# """
#
# if ext:
# pat = os.path.splitext(pat)[0] + ext
#
# files = iglob(pat)
# if files:
# return files[0] # Return an arbitrary file
# else:
# return None
. Output only the next line. | name = ifind(dbf_filename, ext=ext) |
Given the code snippet: <|code_start|> 'tcf': tuflowloader.TuflowLoader,
'dat': datloader.DatLoader,
'ied': datloader.DatLoader}
self.warnings = []
def loadFile(self, filepath, arg_dict={}):
"""Load a file from disk.
Args:
filepath (str): the path to the file to load.
arg_dict={}(Dict): contains keyword referenced arguments needed by
any of the loaders. E.g. the TuflowLoader can take some
scenario values.
Returns:
The object created by the individual file loaders. E.g. for .dat
files this will be an IsisUnitCollection. See the individual
ALoader implementations for details of return types.
Raises:
AttributeError: if the file type is not tcf/dat/ief/ied.
See Also:
:class:'ALoader'
:class:'IefLoader'
:class:'TuflowLoader'
:class:'DatLoader'
"""
<|code_end|>
, generate the next line using the imports in this file:
from ship.utils import utilfunctions as uuf
from ship.utils.fileloaders import tuflowloader
from ship.utils.fileloaders import iefloader
from ship.utils.fileloaders import datloader
import logging
and context (functions, classes, or occasionally code) from other files:
# Path: ship/utils/utilfunctions.py
# def formatFloat(value, no_of_dps, ignore_empty_str=True):
# def checkFileType(file_path, ext):
# def isNumeric(s):
# def encodeStr(value):
# def isString(value):
# def isList(value):
# def arrayToString(self, str_array):
# def findSubstringInList(substr, the_list):
# def findMax(val1, val2):
# def fileExtensionWithoutPeriod(filepath, name_only=False):
# def findWholeWord(w):
# def convertRunOptionsToSEDict(options):
# def getSEResolvedFilename(filename, se_vals):
# def enum(*sequential, **named):
# def __init__(self):
# def isEmpty(self):
# def enqueue(self, item):
# def dequeue(self):
# def size(self):
# def __init__(self, max_size=-1):
# def isEmpty(self):
# def add(self, item):
# def pop(self):
# def peek(self):
# def size(self):
# class FileQueue(object):
# class LoadStack(object):
#
# Path: ship/utils/fileloaders/tuflowloader.py
# class TuflowLoader(ALoader):
# def __init__(self):
# def _resetLoader(self):
# def loadFile(self, tcf_path, arg_dict={}):
# def loadModel(self, tcf_path, arg_dict={}):
# def loadControlFile(self, model_file):
# def _orderModel(self, tcf_path):
# def buildControlFiles(self, _load_list, model):
# def _fetchTuflowModel(self, root):
# def _readControlFile(self, raw_contents, root, control_part):
# def createUnknown(unknown_store, l):
# def addLogicAssociate(lpart, logic_stack):
# def parseDefineLogic(self, line, parent, key):
# def parseIfLogic(self, line, parent, root, key):
# def _addModelVariable(self, part):
# def getFile(self, path):
#
# Path: ship/utils/fileloaders/iefloader.py
# class IefLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def _addHeaderLine(self, event_header, value):
# def _addDetailsLine(self, event_details, contents, index):
# def _addSnapshotLine(self, snapshot, contents, index):
# def _addIedLine(self, ied_data, contents, index):
# def _loadDescription(self, description, contents, index):
# def _loadFile(self, filepath):
#
# Path: ship/utils/fileloaders/datloader.py
# class DatLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def buildDat(self, contents, arg_dict={}):
# def createUnknownSection(self):
# def getUnits(self):
# def updateSubContents(self):
# def __loadFile(self, filepath):
. Output only the next line. | ext = uuf.fileExtensionWithoutPeriod(filepath) |
Given snippet: <|code_start|>
Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
from __future__ import unicode_literals
logger = logging.getLogger(__name__)
"""logging references with a __name__ set to this module."""
class FileLoader(object):
"""
"""
def __init__(self):
"""
"""
self._known_files = {'ief': iefloader.IefLoader,
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from ship.utils import utilfunctions as uuf
from ship.utils.fileloaders import tuflowloader
from ship.utils.fileloaders import iefloader
from ship.utils.fileloaders import datloader
import logging
and context:
# Path: ship/utils/utilfunctions.py
# def formatFloat(value, no_of_dps, ignore_empty_str=True):
# def checkFileType(file_path, ext):
# def isNumeric(s):
# def encodeStr(value):
# def isString(value):
# def isList(value):
# def arrayToString(self, str_array):
# def findSubstringInList(substr, the_list):
# def findMax(val1, val2):
# def fileExtensionWithoutPeriod(filepath, name_only=False):
# def findWholeWord(w):
# def convertRunOptionsToSEDict(options):
# def getSEResolvedFilename(filename, se_vals):
# def enum(*sequential, **named):
# def __init__(self):
# def isEmpty(self):
# def enqueue(self, item):
# def dequeue(self):
# def size(self):
# def __init__(self, max_size=-1):
# def isEmpty(self):
# def add(self, item):
# def pop(self):
# def peek(self):
# def size(self):
# class FileQueue(object):
# class LoadStack(object):
#
# Path: ship/utils/fileloaders/tuflowloader.py
# class TuflowLoader(ALoader):
# def __init__(self):
# def _resetLoader(self):
# def loadFile(self, tcf_path, arg_dict={}):
# def loadModel(self, tcf_path, arg_dict={}):
# def loadControlFile(self, model_file):
# def _orderModel(self, tcf_path):
# def buildControlFiles(self, _load_list, model):
# def _fetchTuflowModel(self, root):
# def _readControlFile(self, raw_contents, root, control_part):
# def createUnknown(unknown_store, l):
# def addLogicAssociate(lpart, logic_stack):
# def parseDefineLogic(self, line, parent, key):
# def parseIfLogic(self, line, parent, root, key):
# def _addModelVariable(self, part):
# def getFile(self, path):
#
# Path: ship/utils/fileloaders/iefloader.py
# class IefLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def _addHeaderLine(self, event_header, value):
# def _addDetailsLine(self, event_details, contents, index):
# def _addSnapshotLine(self, snapshot, contents, index):
# def _addIedLine(self, ied_data, contents, index):
# def _loadDescription(self, description, contents, index):
# def _loadFile(self, filepath):
#
# Path: ship/utils/fileloaders/datloader.py
# class DatLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def buildDat(self, contents, arg_dict={}):
# def createUnknownSection(self):
# def getUnits(self):
# def updateSubContents(self):
# def __loadFile(self, filepath):
which might include code, classes, or functions. Output only the next line. | 'tcf': tuflowloader.TuflowLoader, |
Given snippet: <|code_start|> simple to load any type of file from one place.
Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
from __future__ import unicode_literals
logger = logging.getLogger(__name__)
"""logging references with a __name__ set to this module."""
class FileLoader(object):
"""
"""
def __init__(self):
"""
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from ship.utils import utilfunctions as uuf
from ship.utils.fileloaders import tuflowloader
from ship.utils.fileloaders import iefloader
from ship.utils.fileloaders import datloader
import logging
and context:
# Path: ship/utils/utilfunctions.py
# def formatFloat(value, no_of_dps, ignore_empty_str=True):
# def checkFileType(file_path, ext):
# def isNumeric(s):
# def encodeStr(value):
# def isString(value):
# def isList(value):
# def arrayToString(self, str_array):
# def findSubstringInList(substr, the_list):
# def findMax(val1, val2):
# def fileExtensionWithoutPeriod(filepath, name_only=False):
# def findWholeWord(w):
# def convertRunOptionsToSEDict(options):
# def getSEResolvedFilename(filename, se_vals):
# def enum(*sequential, **named):
# def __init__(self):
# def isEmpty(self):
# def enqueue(self, item):
# def dequeue(self):
# def size(self):
# def __init__(self, max_size=-1):
# def isEmpty(self):
# def add(self, item):
# def pop(self):
# def peek(self):
# def size(self):
# class FileQueue(object):
# class LoadStack(object):
#
# Path: ship/utils/fileloaders/tuflowloader.py
# class TuflowLoader(ALoader):
# def __init__(self):
# def _resetLoader(self):
# def loadFile(self, tcf_path, arg_dict={}):
# def loadModel(self, tcf_path, arg_dict={}):
# def loadControlFile(self, model_file):
# def _orderModel(self, tcf_path):
# def buildControlFiles(self, _load_list, model):
# def _fetchTuflowModel(self, root):
# def _readControlFile(self, raw_contents, root, control_part):
# def createUnknown(unknown_store, l):
# def addLogicAssociate(lpart, logic_stack):
# def parseDefineLogic(self, line, parent, key):
# def parseIfLogic(self, line, parent, root, key):
# def _addModelVariable(self, part):
# def getFile(self, path):
#
# Path: ship/utils/fileloaders/iefloader.py
# class IefLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def _addHeaderLine(self, event_header, value):
# def _addDetailsLine(self, event_details, contents, index):
# def _addSnapshotLine(self, snapshot, contents, index):
# def _addIedLine(self, ied_data, contents, index):
# def _loadDescription(self, description, contents, index):
# def _loadFile(self, filepath):
#
# Path: ship/utils/fileloaders/datloader.py
# class DatLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def buildDat(self, contents, arg_dict={}):
# def createUnknownSection(self):
# def getUnits(self):
# def updateSubContents(self):
# def __loadFile(self, filepath):
which might include code, classes, or functions. Output only the next line. | self._known_files = {'ief': iefloader.IefLoader, |
Given snippet: <|code_start|> Author:
Duncan Runnacles
Created:
01 Apr 2016
Copyright:
Duncan Runnacles 2016
TODO:
Updates:
"""
from __future__ import unicode_literals
logger = logging.getLogger(__name__)
"""logging references with a __name__ set to this module."""
class FileLoader(object):
"""
"""
def __init__(self):
"""
"""
self._known_files = {'ief': iefloader.IefLoader,
'tcf': tuflowloader.TuflowLoader,
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from ship.utils import utilfunctions as uuf
from ship.utils.fileloaders import tuflowloader
from ship.utils.fileloaders import iefloader
from ship.utils.fileloaders import datloader
import logging
and context:
# Path: ship/utils/utilfunctions.py
# def formatFloat(value, no_of_dps, ignore_empty_str=True):
# def checkFileType(file_path, ext):
# def isNumeric(s):
# def encodeStr(value):
# def isString(value):
# def isList(value):
# def arrayToString(self, str_array):
# def findSubstringInList(substr, the_list):
# def findMax(val1, val2):
# def fileExtensionWithoutPeriod(filepath, name_only=False):
# def findWholeWord(w):
# def convertRunOptionsToSEDict(options):
# def getSEResolvedFilename(filename, se_vals):
# def enum(*sequential, **named):
# def __init__(self):
# def isEmpty(self):
# def enqueue(self, item):
# def dequeue(self):
# def size(self):
# def __init__(self, max_size=-1):
# def isEmpty(self):
# def add(self, item):
# def pop(self):
# def peek(self):
# def size(self):
# class FileQueue(object):
# class LoadStack(object):
#
# Path: ship/utils/fileloaders/tuflowloader.py
# class TuflowLoader(ALoader):
# def __init__(self):
# def _resetLoader(self):
# def loadFile(self, tcf_path, arg_dict={}):
# def loadModel(self, tcf_path, arg_dict={}):
# def loadControlFile(self, model_file):
# def _orderModel(self, tcf_path):
# def buildControlFiles(self, _load_list, model):
# def _fetchTuflowModel(self, root):
# def _readControlFile(self, raw_contents, root, control_part):
# def createUnknown(unknown_store, l):
# def addLogicAssociate(lpart, logic_stack):
# def parseDefineLogic(self, line, parent, key):
# def parseIfLogic(self, line, parent, root, key):
# def _addModelVariable(self, part):
# def getFile(self, path):
#
# Path: ship/utils/fileloaders/iefloader.py
# class IefLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def _addHeaderLine(self, event_header, value):
# def _addDetailsLine(self, event_details, contents, index):
# def _addSnapshotLine(self, snapshot, contents, index):
# def _addIedLine(self, ied_data, contents, index):
# def _loadDescription(self, description, contents, index):
# def _loadFile(self, filepath):
#
# Path: ship/utils/fileloaders/datloader.py
# class DatLoader(ATool, ALoader):
# def __init__(self):
# def loadFile(self, file_path, arg_dict={}):
# def buildDat(self, contents, arg_dict={}):
# def createUnknownSection(self):
# def getUnits(self):
# def updateSubContents(self):
# def __loadFile(self, filepath):
which might include code, classes, or functions. Output only the next line. | 'dat': datloader.DatLoader, |
Predict the next line for this snippet: <|code_start|>from __future__ import unicode_literals
class TestError(ValueError):
pass
class TuflowUpdateTests(object):
def runTests(self):
self.loadTuflowModel()
self.test_changeActiveStatus()
self.test_addPartToPartHolder()
self.test_removePartFromPartHolder()
self.test_addPartToLogicHolder()
self.test_removePartFromLogicHolder()
def loadTuflowModel(self):
print ('Loading tuflow model...')
path = "integration_tests/test_data/model1/tuflow/runs/test_run1.tcf"
path = os.path.normpath(os.path.join(os.getcwd(), path))
<|code_end|>
with the help of current file imports:
import os
import copy
from ship.utils.fileloaders import fileloader
from ship.tuflow import tuflowfactory as factory
from integration_tests import utils
and context from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/tuflow/tuflowfactory.py
# class TuflowFactory(object):
# def getTuflowPart(cls, line, parent, part_type=None, logic=None):
# def createModelVariableType(line, parent, **kwargs):
# def createUserVariableType(line, parent, **kwargs):
# def createBcEventVariable(line, parent, **kwargs):
# def createVariableType(line, parent, **kwargs):
# def createKeyValueType(line, parent, **kwargs):
# def createDataType(line, parent, **kwargs):
# def createResultType(line, parent, **kwargs):
# def createGisType(line, parent, **kwargs):
# def createModelType(line, parent, **kwargs):
# def createIfLogic(parent, commands, terms, comments):
# def createBlockLogic(parent, commands, terms, comments):
# def partsFromPipedFiles(part_type, parent, **kwargs):
# def assignSiblings(parts):
# def checkMultiTypes(line, part_type):
# def checkEstryAuto(line, parent):
# def checkIsComment(line):
# def takeParentType(path):
# def getExtension(path, upper=True):
# def breakLine(line):
# def separateComment(instruction):
# def resolveResult(result_part):
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
, which may contain function names, class names, or code. Output only the next line. | loader = fileloader.FileLoader() |
Here is a snippet: <|code_start|>
# Make sure we know what it is before
zpt_part = control.contains(command="Set Zpts", variable="2",
parent_filename="test_trd2")
zln_part = control.contains(command="Z Line THIN", filename="zln_shiptest_trd_v2")
parts = zpt_part + zln_part
utils.softAssertion(len(parts), 3)
trd = control.contains(filename="test_trd2")[0]
trd.active = False
# Then check after
zpt_part = control.contains(command="Set Zpts", variable="2",
parent_filename="test_trd2")
zln_part = control.contains(command="Z Line THIN", filename="zln_shiptest_trd_v2")
parts = zpt_part + zln_part
utils.softAssertion(len(parts), 0)
print ('Done')
def test_addPartToPartHolder(self):
print ('Testing add part to PartHolder...')
# tuflow = copy.deepcopy(self.tuflow)
tuflow = self.tuflow
control = tuflow.control_files['TGC']
tgc = None
for c in control.control_files:
if c.filename == 'test_tgc1': tgc = c
line = "Timestep == 12 ! timestep 12 comment"
<|code_end|>
. Write the next line using the current file imports:
import os
import copy
from ship.utils.fileloaders import fileloader
from ship.tuflow import tuflowfactory as factory
from integration_tests import utils
and context from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/tuflow/tuflowfactory.py
# class TuflowFactory(object):
# def getTuflowPart(cls, line, parent, part_type=None, logic=None):
# def createModelVariableType(line, parent, **kwargs):
# def createUserVariableType(line, parent, **kwargs):
# def createBcEventVariable(line, parent, **kwargs):
# def createVariableType(line, parent, **kwargs):
# def createKeyValueType(line, parent, **kwargs):
# def createDataType(line, parent, **kwargs):
# def createResultType(line, parent, **kwargs):
# def createGisType(line, parent, **kwargs):
# def createModelType(line, parent, **kwargs):
# def createIfLogic(parent, commands, terms, comments):
# def createBlockLogic(parent, commands, terms, comments):
# def partsFromPipedFiles(part_type, parent, **kwargs):
# def assignSiblings(parts):
# def checkMultiTypes(line, part_type):
# def checkEstryAuto(line, parent):
# def checkIsComment(line):
# def takeParentType(path):
# def getExtension(path, upper=True):
# def breakLine(line):
# def separateComment(instruction):
# def resolveResult(result_part):
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
, which may include functions, classes, or code. Output only the next line. | varpart = factory.TuflowFactory.getTuflowPart(line, tgc)[0] |
Here is a snippet: <|code_start|>from __future__ import unicode_literals
class TestError(ValueError):
pass
class TuflowUpdateTests(object):
def runTests(self):
self.loadTuflowModel()
self.test_changeActiveStatus()
self.test_addPartToPartHolder()
self.test_removePartFromPartHolder()
self.test_addPartToLogicHolder()
self.test_removePartFromLogicHolder()
def loadTuflowModel(self):
print ('Loading tuflow model...')
path = "integration_tests/test_data/model1/tuflow/runs/test_run1.tcf"
path = os.path.normpath(os.path.join(os.getcwd(), path))
loader = fileloader.FileLoader()
self.tuflow = loader.loadFile(path)
<|code_end|>
. Write the next line using the current file imports:
import os
import copy
from ship.utils.fileloaders import fileloader
from ship.tuflow import tuflowfactory as factory
from integration_tests import utils
and context from other files:
# Path: ship/utils/fileloaders/fileloader.py
# class FileLoader(object):
# def __init__(self):
# def loadFile(self, filepath, arg_dict={}):
#
# Path: ship/tuflow/tuflowfactory.py
# class TuflowFactory(object):
# def getTuflowPart(cls, line, parent, part_type=None, logic=None):
# def createModelVariableType(line, parent, **kwargs):
# def createUserVariableType(line, parent, **kwargs):
# def createBcEventVariable(line, parent, **kwargs):
# def createVariableType(line, parent, **kwargs):
# def createKeyValueType(line, parent, **kwargs):
# def createDataType(line, parent, **kwargs):
# def createResultType(line, parent, **kwargs):
# def createGisType(line, parent, **kwargs):
# def createModelType(line, parent, **kwargs):
# def createIfLogic(parent, commands, terms, comments):
# def createBlockLogic(parent, commands, terms, comments):
# def partsFromPipedFiles(part_type, parent, **kwargs):
# def assignSiblings(parts):
# def checkMultiTypes(line, part_type):
# def checkEstryAuto(line, parent):
# def checkIsComment(line):
# def takeParentType(path):
# def getExtension(path, upper=True):
# def breakLine(line):
# def separateComment(instruction):
# def resolveResult(result_part):
#
# Path: integration_tests/utils.py
# def softAssertion(var, value, testEqual=True):
# def softAssertionIn(var, value, testIn=True):
, which may include functions, classes, or code. Output only the next line. | utils.softAssertion(self.tuflow.missing_model_files, []) |
Given snippet: <|code_start|>
class FunctionFamilyDetector(object):
def __init__(self,kb,language,family):
self.language = language
self.kb = kb
self.family = family
def __str__(self):
return '<' + self.language.id + ': ' + \
'function words (family=' + self.family + ')>'
def __call__(self,word):
return (word in self.kb)
class FunctionWord(models.Model):
""" a word of particular significance to a parser """
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.db import models
from conceptnet.corpus.models import Language
from conceptnet.models import Relation
and context:
# Path: conceptnet/corpus/models.py
# class Language(models.Model):
# """
# A database object representing a language.
#
# Instances of Language can be used in filter expressions to select only
# objects that apply to a particular language. For example:
#
# >>> en = Language.get('en')
# >>> english_sentences = Sentence.objects.filter(language=en)
# """
# id = models.CharField(max_length=16,primary_key=True)
# name = models.TextField(blank=True)
# sentence_count = models.IntegerField(default=0)
#
# def __str__(self):
# return "%s (%s)" % (self.name, self.id)
#
# @staticmethod
# def get(id):
# """
# Get a language from its ISO language code.
#
# Some relevant language codes::
#
# en = English
# pt = Portuguese
# ko = Korean
# ja = Japanese
# nl = Dutch
# es = Spanish
# fr = French
# ar = Arabic
# zh = Chinese
# """
# if isinstance(id,Language): return id
# return get_lang(id)
#
# @property
# def nl(self):
# """
# A collection of natural language tools for a language.
#
# See :mod:`simplenlp` for more information on using these tools.
# """
# return get_nl(self.id)
#
# Path: conceptnet/models.py
# class Relation(models.Model):
# name = models.CharField(max_length=128,unique=True)
# description = models.CharField(max_length=255, null=True, blank=True)
#
# def __unicode__(self):
# return self.name
#
# @classmethod
# def get(cls, name):
# # Check if the parameter is already a Relation. We don't use
# # isinstance in case of accidental multiple imports (e.g.,
# # conceptnet.models vs conceptnet4.models).
# if hasattr(name, 'id'):
# return name
# return cls.objects.get(name=name)
which might include code, classes, or functions. Output only the next line. | language = models.ForeignKey(Language) |
Continue the code snippet: <|code_start|> word = models.TextField()
unique_together = (('language', 'word'),)
def __str__(self):
return "<" + self.language.id + ":" + self.word + ">"
class Meta:
db_table = 'functionwords'
class FunctionFamily(models.Model):
""" defines a family of function words """
family = models.TextField()
f_word = models.ForeignKey(FunctionWord)
unique_together = (('family', 'f_word'),)
def __str__(self):
return self.family + ": " + str(self.f_word)
class Meta:
db_table = 'functionfamilies'
@staticmethod
def build_function_detector(language, family):
# Prepare the kb
words = list(FunctionFamily.objects.filter(family=family,f_word__language=language).values_list('f_word__word', flat=True))
return FunctionFamilyDetector(words,language,family)
class ParsingPattern(models.Model):
pattern = models.TextField(blank=False)
<|code_end|>
. Use current file imports:
from django.db import models
from conceptnet.corpus.models import Language
from conceptnet.models import Relation
and context (classes, functions, or code) from other files:
# Path: conceptnet/corpus/models.py
# class Language(models.Model):
# """
# A database object representing a language.
#
# Instances of Language can be used in filter expressions to select only
# objects that apply to a particular language. For example:
#
# >>> en = Language.get('en')
# >>> english_sentences = Sentence.objects.filter(language=en)
# """
# id = models.CharField(max_length=16,primary_key=True)
# name = models.TextField(blank=True)
# sentence_count = models.IntegerField(default=0)
#
# def __str__(self):
# return "%s (%s)" % (self.name, self.id)
#
# @staticmethod
# def get(id):
# """
# Get a language from its ISO language code.
#
# Some relevant language codes::
#
# en = English
# pt = Portuguese
# ko = Korean
# ja = Japanese
# nl = Dutch
# es = Spanish
# fr = French
# ar = Arabic
# zh = Chinese
# """
# if isinstance(id,Language): return id
# return get_lang(id)
#
# @property
# def nl(self):
# """
# A collection of natural language tools for a language.
#
# See :mod:`simplenlp` for more information on using these tools.
# """
# return get_nl(self.id)
#
# Path: conceptnet/models.py
# class Relation(models.Model):
# name = models.CharField(max_length=128,unique=True)
# description = models.CharField(max_length=255, null=True, blank=True)
#
# def __unicode__(self):
# return self.name
#
# @classmethod
# def get(cls, name):
# # Check if the parameter is already a Relation. We don't use
# # isinstance in case of accidental multiple imports (e.g.,
# # conceptnet.models vs conceptnet4.models).
# if hasattr(name, 'id'):
# return name
# return cls.objects.get(name=name)
. Output only the next line. | predtype = models.ForeignKey(Relation) |
Here is a snippet: <|code_start|>| <NP>:t1 <ADVP>:adv (<text "contain"> | <text "contains">):x1 <NP>:t2
=> dict(frame=words(SLOT1, adv, x1, SLOT2),
relation="HasA", text1=t1, text2=t2, adv=adv)
| <NP>:t1 <BE>:x1 <ADVP>:adv <NP>:t2 <POST>
=> dict(frame=words(SLOT1, x1, adv, SLOT2),
relation="IsA", text1=t1, text2=t2, adv=adv)
| <NP>:t1 <text "can">:x2 <ADVP>:adv <VP>:t2
=> dict(frame=words(SLOT1, x2, adv, t2),
relation="CapableOf", text1=t1, text2=t2)
| <NP>:t1 (<text "ca n't"> | <text "cannot">):x2 <VP>:t2
=> dict(frame=words(SLOT1, x2, t2),
relation="CapableOf", text1=t1, text2=t2, adv="not")
| <NP>:t1 <ADVP>:adv <VP>:t2
=> dict(frame=words(SLOT1, adv, SLOT2),
relation="CapableOf", text1=t1, text2=t2, adv=adv)
)
"""
parser = PatternParserBase.makeGrammar(metapatterns, globals(), name="Metachunker")
def parse(tagged_sent):
try:
return parser(tagged_sent).apply("assertion")
except ParseError:
return None
print parser("ball_NN").apply("NN")
print parser("Sometimes_RB ball_NN causes_VBZ competition_NN").apply("assertion")
<|code_end|>
. Write the next line using the current file imports:
from pymeta.grammar import OMeta, ParseError
from conceptnet.corpus.models import Sentence, TaggedSentence
from itertools import chain
import sys
and context from other files:
# Path: conceptnet/corpus/models.py
# class Sentence(models.Model, ScoredModel):
# """
# A statement entered by a contributor, in unparsed natural language.
# """
# text = models.TextField(blank=False)
# creator = models.ForeignKey(User)
# created_on = models.DateTimeField(default=datetime.now)
# language = models.ForeignKey(Language)
# activity = models.ForeignKey(Activity)
# score = models.IntegerField(default=0)
# votes = generic.GenericRelation(Vote)
#
# def __unicode__(self):
# return u'<' + self.language.id + u': ' + \
# u'"' + self.text + u'"' + \
# u'(by:' + unicode(self.creator_id) + \
# u' activity:' + self.activity.name + \
# u')>'
#
#
# def update_consistency(self):
# """
# Assume that the creator of this sentence voted for it, and calculate
# the score.
# """
# try:
# if self.creator is not None and self.get_rating(self.creator) is None:
# if self.creator.username != 'verbosity':
# Vote.objects.record_vote(self, self.creator, 1)
# self.update_score()
# except User.DoesNotExist:
# self.creator = User.objects.get(username='_ghost')
# Vote.objects.record_vote(self, self.creator, 1)
# self.update_score()
#
# class TaggedSentence(models.Model):
# """
# The results of running a sentence through a tagger such as MXPOST.
#
# We could use this as a step in parsing ConceptNet, but we currently don't.
# """
# text = models.TextField()
# language = models.ForeignKey(Language)
# sentence = models.ForeignKey(Sentence, primary_key=True)
#
# def tagged_words(self):
# for part in self.text.split(" "):
# word, tag = part.rsplit("/", 1)
# yield word, tag
#
# def __unicode__(self):
# return self.text
, which may include functions, classes, or code. Output only the next line. | for sent in TaggedSentence.objects.all(): |
Next line prediction: <|code_start|>
def update_scores():
queryset_foreach(Assertion, lambda x: x.update_score(),
batch_size=100)
<|code_end|>
. Use current file imports:
(from csc_utils.batch import queryset_foreach
from conceptnet.models import Sentence, Assertion, RawAssertion)
and context including class names, function names, or small code snippets from other files:
# Path: conceptnet/models.py
# DEFAULT_LANGUAGE = en = Language(id='en', name='English')
# class TimestampedModel(models.Model):
# class Meta:
# class UserData(TimestampedModel):
# class Meta:
# class Batch(TimestampedModel):
# class Relation(models.Model):
# class Frame(models.Model):
# class Feature(object):
# class LeftFeature(Feature):
# class RightFeature(Feature):
# class Proposition(object):
# class Concept(models.Model):
# class Meta:
# class UsefulAssertionManager(models.Manager):
# class SurfaceForm(models.Model):
# class Meta:
# class Assertion(models.Model, ScoredModel):
# class Meta:
# class RawAssertion(TimestampedModel, ScoredModel):
# class Meta:
# class Meta:
# def save(self, **kwargs):
# def __unicode__(self):
# def __unicode__(self):
# def get(cls, name):
# def preferred(self):
# def fill_in(self, a, b):
# def __unicode__(self):
# def re_pattern(self):
# def match(self, text):
# def display(self):
# def match_sentence(text, language):
# def __init__(self, relation, concept):
# def to_tuple(self):
# def language(self):
# def __hash__(self): # Features should be immutable.
# def __cmp__(self, other):
# def from_tuple(tup, lang=DEFAULT_LANGUAGE, lemmatize=False):
# def from_obj_tuple(typ, relation, concept):
# def frame(self):
# def fill_in(self, newconcept):
# def matching_assertions(self):
# def matching_raw(self):
# def nl_frame(self, gap=None):
# def nl_statement(self, gap='...'):
# def _matching_assertions(self):
# def _matching_raw(self):
# def nl_parts(self, gap='...'):
# def direction(self):
# def __repr__(self):
# def __unicode__(self):
# def fill_in(self, newconcept):
# def matching_assertions(self):
# def matching_raw(self):
# def __unicode__(self):
# def fill_in(self, newconcept):
# def matching_assertions(self):
# def matching_raw(self):
# def ensure_concept(concept):
# def __init__(self, concept1, rel, concept2, lang):
# def __unicode__(self):
# def nl_question_bad(self):
# def right_feature(self):
# def left_feature(self):
# def nl_parts(self):
# def nl_parts_topdown(self):
# def save(self, *a, **kw):
# def canonical_name(self):
# def __unicode__(self):
# def get_assertions(self, useful_only=True):
# def get_assertions_forward(self, useful_only=True):
# def get_assertions_reverse(self, useful_only=True):
# def raw_assertions(self):
# def raw_assertions_no_dupes(self, n=10, related=None):
# def get_my_right_features(self, useful_only=True):
# def get_my_left_features(self, useful_only=True):
# def has_feature(self, feature):
# def score_for_feature(self, feature):
# def group_assertions_by_feature(self, useful_only=True):
# def top_assertions_by_feature(self, limit=50, useful_only=True):
# def some_surface(self):
# def get(cls, text, language, auto_create=False):
# def get_raw(cls, normalized_text, language, auto_create=False):
# def exists(cls, text, language, is_raw=False):
# def exists_raw(cls, normalized_text, language):
# def get_by_id(cls, id):
# def update_num_assertions(self):
# def get_absolute_url(self):
# def get_query_set(self):
# def get(text, lang, auto_create=False):
# def update_raw(self):
# def update(self, stem, residue):
# def urltext(self):
# def __unicode__(self):
# def best_raw(self):
# def nl_repr(self, wrap_text=lambda assertion, text: text):
# def update_raw_cache(self):
# def update_score(self):
# def creator(self):
# def polarity(self):
# def __unicode__(self):
# def get_filtered(cls, *a, **kw):
# def get_absolute_url(self):
# def denormalize_num_assertions(sender, instance, created=False, **kwargs):
# def relation(self): return self.frame.relation
# def text1(self): return self.surface1.text
# def text2(self): return self.surface2.text
# def __unicode__(self):
# def nl_repr(self, wrap_text=lambda assertion, text: text):
# def main_sentence(self):
# def surface(self, idx):
# def correct_assertion(self, frame, surf1, surf2):
# def update_assertion(self):
# def make(user, frame, text1, text2, activity, vote=1):
# def update_score(self):
# def get_absolute_url(self):
. Output only the next line. | queryset_foreach(RawAssertion, lambda x: x.update_score(), |
Using the snippet: <|code_start|>class AssertionVote(models.Model):
"""
A vote on an Assertion by a User.
This is temporarily a view of the big Votes table:
CREATE VIEW temp_assertion_votes AS
SELECT id, user_id, object_id AS assertion_id, vote
FROM votes WHERE content_type_id=68;
"""
user = models.ForeignKey(User)
assertion = models.ForeignKey(Assertion)
vote = models.SmallIntegerField(choices=SCORES)
class Meta:
db_table = 'temp_assertion_votes'
'''
class RawAssertion(TimestampedModel, ScoredModel):
"""
A RawAssertion represents the connection between an :class:`Assertion` and
natural language. Where an Assertion describes a :class:`Relation` between
two :class:`Concepts`, a RawAssertion describes a sentence :class:`Frame`
that connects the :class:`SurfaceForms` of those concepts.
A RawAssertion also represents how a particular :class:`Sentence` can
be interpreted to make an Assertion. :attr:`surface1` and :attr:`surface2`
generally come from chunks of a sentence that someone entered into Open
Mind.
"""
<|code_end|>
, determine the next line of code. You have imports:
from django.db import models
from django.db.models import Q
from conceptnet.corpus.models import Language, Sentence, User, ScoredModel, Frequency
from events.models import Event, Activity
from voting.models import Vote, SCORES
from django.contrib.contenttypes import generic
from csc_utils.cache import cached
from datetime import datetime
from urllib import quote as urlquote
from django.db.models import F
import re
and context (class names, function names, or code) available:
# Path: conceptnet/corpus/models.py
# class ScoredModel(object):
# class Language(models.Model):
# class Sentence(models.Model, ScoredModel):
# class TaggedSentence(models.Model):
# class DependencyParse(models.Model):
# class Frequency(models.Model):
# class Meta:
# def get_rating(self, user):
# def set_rating(self, user, val, activity=None):
# def update_score(self):
# def denormalize_votes(sender, instance, created=False, **kwargs):
# def get_lang(lang_code):
# def __str__(self):
# def get(id):
# def nl(self):
# def __unicode__(self):
# def update_consistency(self):
# def tagged_words(self):
# def __unicode__(self):
# def from_string(sentence_id, depstring):
# def __unicode__(self):
# def __unicode__(self):
# _PARSE_RE = re.compile(r"(.+)\((.*)-(\d+)'*, (.*)-(\d+)'*\)")
. Output only the next line. | sentence = models.ForeignKey(Sentence, null=True) |
Continue the code snippet: <|code_start|>__version__ = "4.0rc2"
DEFAULT_LANGUAGE = en = Language(id='en', name='English')
class TimestampedModel(models.Model):
created = models.DateTimeField(default=datetime.now)
updated = models.DateTimeField()
def save(self, **kwargs):
self.updated = datetime.now()
super(TimestampedModel, self).save(**kwargs)
class Meta:
abstract = True
class UserData(TimestampedModel):
<|code_end|>
. Use current file imports:
from django.db import models
from django.db.models import Q
from conceptnet.corpus.models import Language, Sentence, User, ScoredModel, Frequency
from events.models import Event, Activity
from voting.models import Vote, SCORES
from django.contrib.contenttypes import generic
from csc_utils.cache import cached
from datetime import datetime
from urllib import quote as urlquote
from django.db.models import F
import re
and context (classes, functions, or code) from other files:
# Path: conceptnet/corpus/models.py
# class ScoredModel(object):
# class Language(models.Model):
# class Sentence(models.Model, ScoredModel):
# class TaggedSentence(models.Model):
# class DependencyParse(models.Model):
# class Frequency(models.Model):
# class Meta:
# def get_rating(self, user):
# def set_rating(self, user, val, activity=None):
# def update_score(self):
# def denormalize_votes(sender, instance, created=False, **kwargs):
# def get_lang(lang_code):
# def __str__(self):
# def get(id):
# def nl(self):
# def __unicode__(self):
# def update_consistency(self):
# def tagged_words(self):
# def __unicode__(self):
# def from_string(sentence_id, depstring):
# def __unicode__(self):
# def __unicode__(self):
# _PARSE_RE = re.compile(r"(.+)\((.*)-(\d+)'*, (.*)-(\d+)'*\)")
. Output only the next line. | user = models.ForeignKey(User) |
Given snippet: <|code_start|>
# use get_or_create so it's atomic
surface_form, _ = SurfaceForm.objects.get_or_create(concept=concept,
text=text, residue=residue, language=lang)
return surface_form
def update_raw(self):
for raw in self.left_rawassertion_set.all():
raw.update_assertion()
for raw in self.right_rawassertion_set.all():
raw.update_assertion()
def update(self, stem, residue):
self.concept = Concept.get_raw(stem, self.language, auto_create=True)
self.residue = residue
self.save()
self.update_raw()
return self
@property
def urltext(self):
return urlquote(self.text)
def __unicode__(self):
return self.text
class Meta:
unique_together = (('language', 'text'),)
ordering = ['-use_count']
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.db import models
from django.db.models import Q
from conceptnet.corpus.models import Language, Sentence, User, ScoredModel, Frequency
from events.models import Event, Activity
from voting.models import Vote, SCORES
from django.contrib.contenttypes import generic
from csc_utils.cache import cached
from datetime import datetime
from urllib import quote as urlquote
from django.db.models import F
import re
and context:
# Path: conceptnet/corpus/models.py
# class ScoredModel(object):
# class Language(models.Model):
# class Sentence(models.Model, ScoredModel):
# class TaggedSentence(models.Model):
# class DependencyParse(models.Model):
# class Frequency(models.Model):
# class Meta:
# def get_rating(self, user):
# def set_rating(self, user, val, activity=None):
# def update_score(self):
# def denormalize_votes(sender, instance, created=False, **kwargs):
# def get_lang(lang_code):
# def __str__(self):
# def get(id):
# def nl(self):
# def __unicode__(self):
# def update_consistency(self):
# def tagged_words(self):
# def __unicode__(self):
# def from_string(sentence_id, depstring):
# def __unicode__(self):
# def __unicode__(self):
# _PARSE_RE = re.compile(r"(.+)\((.*)-(\d+)'*, (.*)-(\d+)'*\)")
which might include code, classes, or functions. Output only the next line. | class Assertion(models.Model, ScoredModel): |
Predict the next line after this snippet: <|code_start|>
class Relation(models.Model):
name = models.CharField(max_length=128,unique=True)
description = models.CharField(max_length=255, null=True, blank=True)
def __unicode__(self):
return self.name
@classmethod
def get(cls, name):
# Check if the parameter is already a Relation. We don't use
# isinstance in case of accidental multiple imports (e.g.,
# conceptnet.models vs conceptnet4.models).
if hasattr(name, 'id'):
return name
return cls.objects.get(name=name)
class Frame(models.Model):
"""
A Frame is a natural-language template containing two slots, representing a
way that a :class:`Relation` could be expressed in language.
It can be used
for pattern matching to create a :class:`RawAssertion`, or to express an
existing :class:`RawAssertion` as a sentence.
"""
language = models.ForeignKey(Language)
text = models.TextField()
relation = models.ForeignKey(Relation)
goodness = models.IntegerField()
<|code_end|>
using the current file's imports:
from django.db import models
from django.db.models import Q
from conceptnet.corpus.models import Language, Sentence, User, ScoredModel, Frequency
from events.models import Event, Activity
from voting.models import Vote, SCORES
from django.contrib.contenttypes import generic
from csc_utils.cache import cached
from datetime import datetime
from urllib import quote as urlquote
from django.db.models import F
import re
and any relevant context from other files:
# Path: conceptnet/corpus/models.py
# class ScoredModel(object):
# class Language(models.Model):
# class Sentence(models.Model, ScoredModel):
# class TaggedSentence(models.Model):
# class DependencyParse(models.Model):
# class Frequency(models.Model):
# class Meta:
# def get_rating(self, user):
# def set_rating(self, user, val, activity=None):
# def update_score(self):
# def denormalize_votes(sender, instance, created=False, **kwargs):
# def get_lang(lang_code):
# def __str__(self):
# def get(id):
# def nl(self):
# def __unicode__(self):
# def update_consistency(self):
# def tagged_words(self):
# def __unicode__(self):
# def from_string(sentence_id, depstring):
# def __unicode__(self):
# def __unicode__(self):
# _PARSE_RE = re.compile(r"(.+)\((.*)-(\d+)'*, (.*)-(\d+)'*\)")
. Output only the next line. | frequency = models.ForeignKey(Frequency) |
Using the snippet: <|code_start|>
def get_config_file():
return path.join(get_config_directory(), 'canaryd.conf')
def get_scripts_directory():
return path.join(get_config_directory(), 'scripts')
def _get_settings(config_file=None):
'''
Load the config from the filesystem if provided, with defaults.
'''
config_file = config_file or get_config_file()
settings = CanarydSettings()
parser = RawConfigParser()
if not path.exists(config_file):
return settings
try:
parser.read(config_file)
except ConfigParserError as e:
logger.critical('Error in config file ({0}): {1}'.format(
config_file, e.message,
))
<|code_end|>
, determine the next line of code. You have imports:
import platform
from collections import defaultdict
from os import environ, geteuid, makedirs, path
from canaryd_packages import click, six
from canaryd_packages.six.moves.configparser import (
DuplicateSectionError,
Error as ConfigParserError,
RawConfigParser,
)
from canaryd.exceptions import ConfigError
from canaryd.log import logger
and context (class names, function names, or code) available:
# Path: canaryd/exceptions.py
# class ConfigError(CanarydError):
# '''
# Triggered when the config file is invalid/broken/missing.
# '''
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
. Output only the next line. | raise ConfigError('Config file error') |
Using the snippet: <|code_start|> log_file_rotation = None
# Number of files to rotate
log_file_rotation_count = 5
# syslog facility to log to
syslog_facility = None
debug = False
collect_interval_s = 30
# Only collect slow plugin data this often
slow_collect_interval_s = 900
# API access key
api_key = API_KEY
# ID of (hopefully this) server attached to this key
server_id = SERVER_ID
def __init__(self, **kwargs):
self.update(kwargs)
self.plugin_settings = defaultdict(dict)
# If no log file specified, we're root and /var/log exists, use that
if (
self.log_file is None
and geteuid() <= 0
and path.exists(path.join('/', 'var', 'log'))
):
<|code_end|>
, determine the next line of code. You have imports:
import platform
from collections import defaultdict
from os import environ, geteuid, makedirs, path
from canaryd_packages import click, six
from canaryd_packages.six.moves.configparser import (
DuplicateSectionError,
Error as ConfigParserError,
RawConfigParser,
)
from canaryd.exceptions import ConfigError
from canaryd.log import logger
and context (class names, function names, or code) available:
# Path: canaryd/exceptions.py
# class ConfigError(CanarydError):
# '''
# Triggered when the config file is invalid/broken/missing.
# '''
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
. Output only the next line. | logger.debug('Root user, so setting log file to /var/log/canaryd.log') |
Given the code snippet: <|code_start|>from __future__ import division
def get_ps_cpu_stats():
'''
Uses ps + awk to total CPU usage, then divide by # CPUs to get the final %.
'''
<|code_end|>
, generate the next line using the imports in this file:
import re
import sys
from multiprocessing import cpu_count
from os import path, sep as os_sep
from canaryd_packages import six
from canaryd.subprocess import get_command_output
and context (functions, classes, or occasionally code) from other files:
# Path: canaryd/subprocess.py
# def get_command_output(command, *args, **kwargs):
# logger.debug('Executing command: {0}'.format(command))
#
# if not kwargs.get('shell', False):
# command = ensure_command_tuple(command)
#
# try:
# output = check_output( # noqa: F405
# command,
# close_fds=True,
# stderr=STDOUT, # noqa: F405
# *args, **kwargs
# )
# except CalledProcessError as e: # noqa: F405
# e.output = decode_output(e.output)
# raise e
#
# output = decode_output(output)
# return output
. Output only the next line. | output = get_command_output( |
Given the following code snippet before the placeholder: <|code_start|>
if time_taken < interval:
sleep(interval - time_taken)
def _daemon_loop(iteration, previous_states, settings):
slow_plugin_iter_interval = round(
settings.slow_collect_interval_s / settings.collect_interval_s,
)
do_slow_plugins = iteration % slow_plugin_iter_interval == 0
logger.info('Getting plugin (include_slow={0}) states...'.format(do_slow_plugins))
# Load the plugin list
plugins = get_and_prepare_working_plugins(settings, include_slow=do_slow_plugins)
states = get_plugin_states(plugins, settings)
state_changes = []
for plugin, status_data in states:
status, data = status_data
# Plugin ran OK and we have state!
if status is True:
previous_state = previous_states.get(plugin, {})
# If the previous state was good - ie not an Exception instance - this
# prevents first-failing then working plugin from generating addition
# events on first successful run.
if isinstance(previous_state, dict):
<|code_end|>
, predict the next line using imports from the current file:
from time import sleep, time
from canaryd.diff import get_state_diff
from canaryd.log import logger
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, upload_state_changes
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/diff.py
# def get_state_diff(plugin, plugin_state, previous_plugin_state):
# '''
# Diffs two state dicts and returns a list of changes.
#
# Changes:
# All come as a tuple of ``(plugin, type, key, data=None)``.
# '''
#
# changes = []
#
# # Look through the previous state to find any items that have been removed
# for key, previous_item in six.iteritems(previous_plugin_state):
# if key not in plugin_state:
# state_diff = dict(
# (k, (v, None))
# for k, v in six.iteritems(previous_item)
# )
#
# changes.append(Change(plugin, 'deleted', key, data=state_diff))
#
# # Loop the new state
# for key, item in six.iteritems(plugin_state):
# previous_item = previous_plugin_state.get(key)
#
# # Add anything that doesn't exist
# if not previous_item:
# state_diff = dict(
# (k, (None, v))
# for k, v in six.iteritems(item)
# )
# changes.append(Change(plugin, 'added', key, data=state_diff))
# continue
#
# # Create the diff, which is a key -> (old, new) values
# all_keys = list(six.iterkeys(item)) + list(six.iterkeys(previous_item))
# all_keys = set(all_keys)
#
# state_diff = dict(
# (k, (previous_item.get(k), item.get(k)))
# for k in all_keys
# if item.get(k) != previous_item.get(k)
# )
#
# # If something changed - send the event!
# if state_diff:
# # If this plugin disables diffs, remake the state diff w/everything
# if not plugin.diff_updates:
# state_diff = dict(
# (k, (previous_item.get(k), v))
# for k, v in six.iteritems(item)
# )
#
# changes.append(Change(plugin, 'updated', key, data=state_diff))
#
# return changes
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def upload_state_changes(state_changes, settings):
# '''
# Uploads partial state to api.servicecanary.com.
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/state'.format(settings.server_id),
# make_sync_or_changes_dict(state_changes),
# settings,
# )
. Output only the next line. | state_diff = get_state_diff(plugin, data, previous_state) |
Predict the next line after this snippet: <|code_start|>from __future__ import division
def _sleep_until_interval(start, interval):
time_taken = time() - start
if time_taken < interval:
sleep(interval - time_taken)
def _daemon_loop(iteration, previous_states, settings):
slow_plugin_iter_interval = round(
settings.slow_collect_interval_s / settings.collect_interval_s,
)
do_slow_plugins = iteration % slow_plugin_iter_interval == 0
<|code_end|>
using the current file's imports:
from time import sleep, time
from canaryd.diff import get_state_diff
from canaryd.log import logger
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, upload_state_changes
and any relevant context from other files:
# Path: canaryd/diff.py
# def get_state_diff(plugin, plugin_state, previous_plugin_state):
# '''
# Diffs two state dicts and returns a list of changes.
#
# Changes:
# All come as a tuple of ``(plugin, type, key, data=None)``.
# '''
#
# changes = []
#
# # Look through the previous state to find any items that have been removed
# for key, previous_item in six.iteritems(previous_plugin_state):
# if key not in plugin_state:
# state_diff = dict(
# (k, (v, None))
# for k, v in six.iteritems(previous_item)
# )
#
# changes.append(Change(plugin, 'deleted', key, data=state_diff))
#
# # Loop the new state
# for key, item in six.iteritems(plugin_state):
# previous_item = previous_plugin_state.get(key)
#
# # Add anything that doesn't exist
# if not previous_item:
# state_diff = dict(
# (k, (None, v))
# for k, v in six.iteritems(item)
# )
# changes.append(Change(plugin, 'added', key, data=state_diff))
# continue
#
# # Create the diff, which is a key -> (old, new) values
# all_keys = list(six.iterkeys(item)) + list(six.iterkeys(previous_item))
# all_keys = set(all_keys)
#
# state_diff = dict(
# (k, (previous_item.get(k), item.get(k)))
# for k in all_keys
# if item.get(k) != previous_item.get(k)
# )
#
# # If something changed - send the event!
# if state_diff:
# # If this plugin disables diffs, remake the state diff w/everything
# if not plugin.diff_updates:
# state_diff = dict(
# (k, (previous_item.get(k), v))
# for k, v in six.iteritems(item)
# )
#
# changes.append(Change(plugin, 'updated', key, data=state_diff))
#
# return changes
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def upload_state_changes(state_changes, settings):
# '''
# Uploads partial state to api.servicecanary.com.
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/state'.format(settings.server_id),
# make_sync_or_changes_dict(state_changes),
# settings,
# )
. Output only the next line. | logger.info('Getting plugin (include_slow={0}) states...'.format(do_slow_plugins)) |
Predict the next line for this snippet: <|code_start|>from __future__ import division
def _sleep_until_interval(start, interval):
time_taken = time() - start
if time_taken < interval:
sleep(interval - time_taken)
def _daemon_loop(iteration, previous_states, settings):
slow_plugin_iter_interval = round(
settings.slow_collect_interval_s / settings.collect_interval_s,
)
do_slow_plugins = iteration % slow_plugin_iter_interval == 0
logger.info('Getting plugin (include_slow={0}) states...'.format(do_slow_plugins))
# Load the plugin list
<|code_end|>
with the help of current file imports:
from time import sleep, time
from canaryd.diff import get_state_diff
from canaryd.log import logger
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, upload_state_changes
and context from other files:
# Path: canaryd/diff.py
# def get_state_diff(plugin, plugin_state, previous_plugin_state):
# '''
# Diffs two state dicts and returns a list of changes.
#
# Changes:
# All come as a tuple of ``(plugin, type, key, data=None)``.
# '''
#
# changes = []
#
# # Look through the previous state to find any items that have been removed
# for key, previous_item in six.iteritems(previous_plugin_state):
# if key not in plugin_state:
# state_diff = dict(
# (k, (v, None))
# for k, v in six.iteritems(previous_item)
# )
#
# changes.append(Change(plugin, 'deleted', key, data=state_diff))
#
# # Loop the new state
# for key, item in six.iteritems(plugin_state):
# previous_item = previous_plugin_state.get(key)
#
# # Add anything that doesn't exist
# if not previous_item:
# state_diff = dict(
# (k, (None, v))
# for k, v in six.iteritems(item)
# )
# changes.append(Change(plugin, 'added', key, data=state_diff))
# continue
#
# # Create the diff, which is a key -> (old, new) values
# all_keys = list(six.iterkeys(item)) + list(six.iterkeys(previous_item))
# all_keys = set(all_keys)
#
# state_diff = dict(
# (k, (previous_item.get(k), item.get(k)))
# for k in all_keys
# if item.get(k) != previous_item.get(k)
# )
#
# # If something changed - send the event!
# if state_diff:
# # If this plugin disables diffs, remake the state diff w/everything
# if not plugin.diff_updates:
# state_diff = dict(
# (k, (previous_item.get(k), v))
# for k, v in six.iteritems(item)
# )
#
# changes.append(Change(plugin, 'updated', key, data=state_diff))
#
# return changes
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def upload_state_changes(state_changes, settings):
# '''
# Uploads partial state to api.servicecanary.com.
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/state'.format(settings.server_id),
# make_sync_or_changes_dict(state_changes),
# settings,
# )
, which may contain function names, class names, or code. Output only the next line. | plugins = get_and_prepare_working_plugins(settings, include_slow=do_slow_plugins) |
Predict the next line after this snippet: <|code_start|>from __future__ import division
def _sleep_until_interval(start, interval):
time_taken = time() - start
if time_taken < interval:
sleep(interval - time_taken)
def _daemon_loop(iteration, previous_states, settings):
slow_plugin_iter_interval = round(
settings.slow_collect_interval_s / settings.collect_interval_s,
)
do_slow_plugins = iteration % slow_plugin_iter_interval == 0
logger.info('Getting plugin (include_slow={0}) states...'.format(do_slow_plugins))
# Load the plugin list
plugins = get_and_prepare_working_plugins(settings, include_slow=do_slow_plugins)
<|code_end|>
using the current file's imports:
from time import sleep, time
from canaryd.diff import get_state_diff
from canaryd.log import logger
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, upload_state_changes
and any relevant context from other files:
# Path: canaryd/diff.py
# def get_state_diff(plugin, plugin_state, previous_plugin_state):
# '''
# Diffs two state dicts and returns a list of changes.
#
# Changes:
# All come as a tuple of ``(plugin, type, key, data=None)``.
# '''
#
# changes = []
#
# # Look through the previous state to find any items that have been removed
# for key, previous_item in six.iteritems(previous_plugin_state):
# if key not in plugin_state:
# state_diff = dict(
# (k, (v, None))
# for k, v in six.iteritems(previous_item)
# )
#
# changes.append(Change(plugin, 'deleted', key, data=state_diff))
#
# # Loop the new state
# for key, item in six.iteritems(plugin_state):
# previous_item = previous_plugin_state.get(key)
#
# # Add anything that doesn't exist
# if not previous_item:
# state_diff = dict(
# (k, (None, v))
# for k, v in six.iteritems(item)
# )
# changes.append(Change(plugin, 'added', key, data=state_diff))
# continue
#
# # Create the diff, which is a key -> (old, new) values
# all_keys = list(six.iterkeys(item)) + list(six.iterkeys(previous_item))
# all_keys = set(all_keys)
#
# state_diff = dict(
# (k, (previous_item.get(k), item.get(k)))
# for k in all_keys
# if item.get(k) != previous_item.get(k)
# )
#
# # If something changed - send the event!
# if state_diff:
# # If this plugin disables diffs, remake the state diff w/everything
# if not plugin.diff_updates:
# state_diff = dict(
# (k, (previous_item.get(k), v))
# for k, v in six.iteritems(item)
# )
#
# changes.append(Change(plugin, 'updated', key, data=state_diff))
#
# return changes
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def upload_state_changes(state_changes, settings):
# '''
# Uploads partial state to api.servicecanary.com.
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/state'.format(settings.server_id),
# make_sync_or_changes_dict(state_changes),
# settings,
# )
. Output only the next line. | states = get_plugin_states(plugins, settings) |
Given the code snippet: <|code_start|> else:
state_changes.append((plugin, ('SYNC', data)))
# Plugin state collected OK so now check for any specific events to
# send in addition.
plugin_events = plugin.get_events(settings)
if plugin_events:
state_changes.append((plugin, ('EVENTS', plugin_events)))
# Plugin raised an exception, fail!
else:
logger.critical((
'Unexpected exception while getting {0} state: '
'{1}({2})'
).format(plugin.name, data.__class__.__name__, data))
# Send the failed exception to the server, generating a warning
exception_data = {
'class': data.__class__.__name__,
'message': '{0}'.format(data),
'traceback': getattr(data, '_traceback'),
}
state_changes.append((plugin, ('ERROR', exception_data)))
# Always set the previous state - this means if we errored the next time
# we succeed we'll do a SYNC with the server.
previous_states[plugin] = data
logger.info('Uploading state changes...')
<|code_end|>
, generate the next line using the imports in this file:
from time import sleep, time
from canaryd.diff import get_state_diff
from canaryd.log import logger
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, upload_state_changes
and context (functions, classes, or occasionally code) from other files:
# Path: canaryd/diff.py
# def get_state_diff(plugin, plugin_state, previous_plugin_state):
# '''
# Diffs two state dicts and returns a list of changes.
#
# Changes:
# All come as a tuple of ``(plugin, type, key, data=None)``.
# '''
#
# changes = []
#
# # Look through the previous state to find any items that have been removed
# for key, previous_item in six.iteritems(previous_plugin_state):
# if key not in plugin_state:
# state_diff = dict(
# (k, (v, None))
# for k, v in six.iteritems(previous_item)
# )
#
# changes.append(Change(plugin, 'deleted', key, data=state_diff))
#
# # Loop the new state
# for key, item in six.iteritems(plugin_state):
# previous_item = previous_plugin_state.get(key)
#
# # Add anything that doesn't exist
# if not previous_item:
# state_diff = dict(
# (k, (None, v))
# for k, v in six.iteritems(item)
# )
# changes.append(Change(plugin, 'added', key, data=state_diff))
# continue
#
# # Create the diff, which is a key -> (old, new) values
# all_keys = list(six.iterkeys(item)) + list(six.iterkeys(previous_item))
# all_keys = set(all_keys)
#
# state_diff = dict(
# (k, (previous_item.get(k), item.get(k)))
# for k in all_keys
# if item.get(k) != previous_item.get(k)
# )
#
# # If something changed - send the event!
# if state_diff:
# # If this plugin disables diffs, remake the state diff w/everything
# if not plugin.diff_updates:
# state_diff = dict(
# (k, (previous_item.get(k), v))
# for k, v in six.iteritems(item)
# )
#
# changes.append(Change(plugin, 'updated', key, data=state_diff))
#
# return changes
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def upload_state_changes(state_changes, settings):
# '''
# Uploads partial state to api.servicecanary.com.
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/state'.format(settings.server_id),
# make_sync_or_changes_dict(state_changes),
# settings,
# )
. Output only the next line. | settings_changes = backoff( |
Predict the next line for this snippet: <|code_start|> state_changes.append((plugin, ('SYNC', data)))
# Plugin state collected OK so now check for any specific events to
# send in addition.
plugin_events = plugin.get_events(settings)
if plugin_events:
state_changes.append((plugin, ('EVENTS', plugin_events)))
# Plugin raised an exception, fail!
else:
logger.critical((
'Unexpected exception while getting {0} state: '
'{1}({2})'
).format(plugin.name, data.__class__.__name__, data))
# Send the failed exception to the server, generating a warning
exception_data = {
'class': data.__class__.__name__,
'message': '{0}'.format(data),
'traceback': getattr(data, '_traceback'),
}
state_changes.append((plugin, ('ERROR', exception_data)))
# Always set the previous state - this means if we errored the next time
# we succeed we'll do a SYNC with the server.
previous_states[plugin] = data
logger.info('Uploading state changes...')
settings_changes = backoff(
<|code_end|>
with the help of current file imports:
from time import sleep, time
from canaryd.diff import get_state_diff
from canaryd.log import logger
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, upload_state_changes
and context from other files:
# Path: canaryd/diff.py
# def get_state_diff(plugin, plugin_state, previous_plugin_state):
# '''
# Diffs two state dicts and returns a list of changes.
#
# Changes:
# All come as a tuple of ``(plugin, type, key, data=None)``.
# '''
#
# changes = []
#
# # Look through the previous state to find any items that have been removed
# for key, previous_item in six.iteritems(previous_plugin_state):
# if key not in plugin_state:
# state_diff = dict(
# (k, (v, None))
# for k, v in six.iteritems(previous_item)
# )
#
# changes.append(Change(plugin, 'deleted', key, data=state_diff))
#
# # Loop the new state
# for key, item in six.iteritems(plugin_state):
# previous_item = previous_plugin_state.get(key)
#
# # Add anything that doesn't exist
# if not previous_item:
# state_diff = dict(
# (k, (None, v))
# for k, v in six.iteritems(item)
# )
# changes.append(Change(plugin, 'added', key, data=state_diff))
# continue
#
# # Create the diff, which is a key -> (old, new) values
# all_keys = list(six.iterkeys(item)) + list(six.iterkeys(previous_item))
# all_keys = set(all_keys)
#
# state_diff = dict(
# (k, (previous_item.get(k), item.get(k)))
# for k in all_keys
# if item.get(k) != previous_item.get(k)
# )
#
# # If something changed - send the event!
# if state_diff:
# # If this plugin disables diffs, remake the state diff w/everything
# if not plugin.diff_updates:
# state_diff = dict(
# (k, (previous_item.get(k), v))
# for k, v in six.iteritems(item)
# )
#
# changes.append(Change(plugin, 'updated', key, data=state_diff))
#
# return changes
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def upload_state_changes(state_changes, settings):
# '''
# Uploads partial state to api.servicecanary.com.
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/state'.format(settings.server_id),
# make_sync_or_changes_dict(state_changes),
# settings,
# )
, which may contain function names, class names, or code. Output only the next line. | upload_state_changes, |
Here is a snippet: <|code_start|>
DEB_REGEX = r'^[a-z]+\s+([a-zA-Z0-9\+\-\.]+):?[a-zA-Z0-9]*\s+([a-zA-Z0-9:~\.\-\+]+).+$'
RPM_REGEX = r'^([a-zA-Z0-9_\-\+]+)\-([0-9a-z\.\-]+)\.[a-z0-9_\.]+$'
PKG_REGEX = r'^([a-zA-Z0-9_\-\+]+)\-([0-9a-z\.]+)'
def get_parse_packages(timeout, package_type, command, regex, lower=True):
<|code_end|>
. Write the next line using the current file imports:
import re
from canaryd_packages import six
from canaryd.subprocess import get_command_output
and context from other files:
# Path: canaryd/subprocess.py
# def get_command_output(command, *args, **kwargs):
# logger.debug('Executing command: {0}'.format(command))
#
# if not kwargs.get('shell', False):
# command = ensure_command_tuple(command)
#
# try:
# output = check_output( # noqa: F405
# command,
# close_fds=True,
# stderr=STDOUT, # noqa: F405
# *args, **kwargs
# )
# except CalledProcessError as e: # noqa: F405
# e.output = decode_output(e.output)
# raise e
#
# output = decode_output(output)
# return output
, which may include functions, classes, or code. Output only the next line. | output = get_command_output( |
Here is a snippet: <|code_start|>
def check_root(message, exit=False):
# If we're root, we're all good!
if geteuid() <= 0:
return
message = click.style(message, 'yellow')
# If exit, just fail
if exit:
<|code_end|>
. Write the next line using the current file imports:
from os import geteuid
from canaryd_packages import click
from canaryd.exceptions import CanarydError, UserCancelError
and context from other files:
# Path: canaryd/exceptions.py
# class CanarydError(ClickException):
# '''
# Generic canaryd exception.
# '''
#
# def __init__(self, message='exiting!', *args, **kwargs):
# return super(CanarydError, self).__init__(message, *args, **kwargs)
#
# class UserCancelError(CanarydError):
# '''
# Triggered when a user cancels an action.
# '''
#
# def __init__(self):
# return super(UserCancelError, self).__init__('User cancelled')
, which may include functions, classes, or code. Output only the next line. | raise CanarydError(message) |
Given the following code snippet before the placeholder: <|code_start|>
def check_root(message, exit=False):
# If we're root, we're all good!
if geteuid() <= 0:
return
message = click.style(message, 'yellow')
# If exit, just fail
if exit:
raise CanarydError(message)
# Warn the user about the implications of running as non-root
click.echo(message)
# Confirm this is OK!
if not click.confirm('Do you wish to continue?'):
<|code_end|>
, predict the next line using imports from the current file:
from os import geteuid
from canaryd_packages import click
from canaryd.exceptions import CanarydError, UserCancelError
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/exceptions.py
# class CanarydError(ClickException):
# '''
# Generic canaryd exception.
# '''
#
# def __init__(self, message='exiting!', *args, **kwargs):
# return super(CanarydError, self).__init__(message, *args, **kwargs)
#
# class UserCancelError(CanarydError):
# '''
# Triggered when a user cancels an action.
# '''
#
# def __init__(self):
# return super(UserCancelError, self).__init__('User cancelled')
. Output only the next line. | raise UserCancelError() |
Given snippet: <|code_start|>
def get_docker_containers(timeout):
containers = {}
try:
output = get_command_output(
'docker inspect `docker ps -qa`',
shell=True,
timeout=timeout,
)
# Either Docker is down or there are no containers
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
from canaryd.subprocess import CalledProcessError, get_command_output
and context:
# Path: canaryd/subprocess.py
# def ensure_command_tuple(command):
# def decode_output(output):
# def get_command_output(command, *args, **kwargs):
which might include code, classes, or functions. Output only the next line. | except CalledProcessError: |
Predict the next line after this snippet: <|code_start|>
def get_docker_containers(timeout):
containers = {}
try:
<|code_end|>
using the current file's imports:
import json
from canaryd.subprocess import CalledProcessError, get_command_output
and any relevant context from other files:
# Path: canaryd/subprocess.py
# def ensure_command_tuple(command):
# def decode_output(output):
# def get_command_output(command, *args, **kwargs):
. Output only the next line. | output = get_command_output( |
Given the following code snippet before the placeholder: <|code_start|>
def setUpModule():
get_plugins()
class TestServicesEvents(TestCase):
def setUp(self):
self.plugin = get_plugin_by_name('services')
def test_should_apply_change_up_ports_only(self):
<|code_end|>
, predict the next line using imports from the current file:
from unittest import TestCase
from canaryd.diff import Change
from canaryd.plugin import get_plugin_by_name, get_plugins
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/diff.py
# class Change(object):
# def __init__(self, plugin, type_, key, data=None):
# self.plugin = plugin
# self.type = type_
# self.key = key
#
# # COMPAT w/canaryd < 0.2
# # Legacy support where added/deleted would either have data/None,
# # rather than data changes (where every key: (old_value, new_value)).
# if data and self.type in ('added', 'deleted') and not all(
# isinstance(item, (tuple, list)) and len(item) == 2
# for _, item in six.iteritems(data)
# ):
# if self.type == 'added':
# data = dict(
# (key, (None, value))
# for key, value in six.iteritems(data)
# )
#
# elif self.type == 'deleted':
# data = dict(
# (key, (value, None))
# for key, value in six.iteritems(data)
# )
#
# logger.info('Converted legacy data: {0}'.format(data))
#
# self.data = data
#
# def serialise(self):
# return self.type, self.key, self.data
#
# def __repr__(self):
# return 'Change: {0}'.format((self.plugin, self.type, self.key, self.data))
#
# Path: canaryd/plugin.py
# def get_plugin_by_name(plugin_name):
# if plugin_name not in NAME_TO_PLUGIN:
# raise NoPluginError('Missing plugin: {0}'.format(plugin_name))
#
# return NAME_TO_PLUGIN[plugin_name]
#
# def get_plugins():
# '''
# Get the list of installed plugins.
# '''
#
# return PLUGINS
. Output only the next line. | change = Change( |
Next line prediction: <|code_start|>
def setUpModule():
get_plugins()
class TestServicesEvents(TestCase):
def setUp(self):
<|code_end|>
. Use current file imports:
(from unittest import TestCase
from canaryd.diff import Change
from canaryd.plugin import get_plugin_by_name, get_plugins)
and context including class names, function names, or small code snippets from other files:
# Path: canaryd/diff.py
# class Change(object):
# def __init__(self, plugin, type_, key, data=None):
# self.plugin = plugin
# self.type = type_
# self.key = key
#
# # COMPAT w/canaryd < 0.2
# # Legacy support where added/deleted would either have data/None,
# # rather than data changes (where every key: (old_value, new_value)).
# if data and self.type in ('added', 'deleted') and not all(
# isinstance(item, (tuple, list)) and len(item) == 2
# for _, item in six.iteritems(data)
# ):
# if self.type == 'added':
# data = dict(
# (key, (None, value))
# for key, value in six.iteritems(data)
# )
#
# elif self.type == 'deleted':
# data = dict(
# (key, (value, None))
# for key, value in six.iteritems(data)
# )
#
# logger.info('Converted legacy data: {0}'.format(data))
#
# self.data = data
#
# def serialise(self):
# return self.type, self.key, self.data
#
# def __repr__(self):
# return 'Change: {0}'.format((self.plugin, self.type, self.key, self.data))
#
# Path: canaryd/plugin.py
# def get_plugin_by_name(plugin_name):
# if plugin_name not in NAME_TO_PLUGIN:
# raise NoPluginError('Missing plugin: {0}'.format(plugin_name))
#
# return NAME_TO_PLUGIN[plugin_name]
#
# def get_plugins():
# '''
# Get the list of installed plugins.
# '''
#
# return PLUGINS
. Output only the next line. | self.plugin = get_plugin_by_name('services') |
Based on the snippet: <|code_start|>
class TestPluginRealStates(TestCase):
def test_meta_plugin(self):
plugin = get_plugin_by_name('meta')
plugin.get_state({})
@six.add_metaclass(JsonTest)
class TestPluginStates(TestCase):
jsontest_files = path.join('tests/plugins')
<|code_end|>
, predict the immediate next line with the help of imports:
from contextlib import contextmanager
from os import path
from unittest import TestCase
from dictdiffer import diff
from jsontest import JsonTest
from mock import patch
from canaryd_packages import six
from canaryd.plugin import get_plugin_by_name
from canaryd.settings import CanarydSettings
and context (classes, functions, sometimes code) from other files:
# Path: canaryd/plugin.py
# def get_plugin_by_name(plugin_name):
# if plugin_name not in NAME_TO_PLUGIN:
# raise NoPluginError('Missing plugin: {0}'.format(plugin_name))
#
# return NAME_TO_PLUGIN[plugin_name]
#
# Path: canaryd/settings.py
# class CanarydSettings(object):
# api_base = API_BASE
# api_version = API_VERSION
#
# log_file = None
# # Rotate logs by this size (in bytes!) or a TimedRotatingFileHandler "when"
# log_file_rotation = None
# # Number of files to rotate
# log_file_rotation_count = 5
#
# # syslog facility to log to
# syslog_facility = None
#
# debug = False
#
# collect_interval_s = 30
# # Only collect slow plugin data this often
# slow_collect_interval_s = 900
#
# # API access key
# api_key = API_KEY
#
# # ID of (hopefully this) server attached to this key
# server_id = SERVER_ID
#
# def __init__(self, **kwargs):
# self.update(kwargs)
#
# self.plugin_settings = defaultdict(dict)
#
# # If no log file specified, we're root and /var/log exists, use that
# if (
# self.log_file is None
# and geteuid() <= 0
# and path.exists(path.join('/', 'var', 'log'))
# ):
# logger.debug('Root user, so setting log file to /var/log/canaryd.log')
# self.log_file = path.join('/', 'var', 'log', 'canaryd.log')
#
# def __getattr__(self, key):
# try:
# return super(CanarydSettings, self).__getattr__(key)
# except AttributeError:
# pass
#
# def to_dict(self):
# return dict(
# (key, value)
# for key, value in self.__dict__.items()
# if key != 'plugin_settings'
# )
#
# def update(self, data):
# changed_keys = []
#
# for key, value in six.iteritems(data):
# if key == 'plugin_settings':
# raise ValueError('Cannot update plugin_settings directly!')
#
# if getattr(self, key, None) != value:
# setattr(self, key, value)
# changed_keys.append(key)
#
# logger.debug('Settings updated: {0} <= {1}'.format(changed_keys, data))
# return changed_keys
#
# def update_plugin_settings(self, plugin_name, data):
# self.plugin_settings[plugin_name].update(data)
# logger.debug('Plugin settings updated: {0} <= {1}'.format(plugin_name, data))
#
# def get_plugin_settings(self, plugin_name):
# return self.plugin_settings.get(plugin_name, {})
. Output only the next line. | test_settings = CanarydSettings() |
Predict the next line for this snippet: <|code_start|>
Args:
change (Change): a change object
Returns:
status (bool): whether the change should be applied
'''
return True
@staticmethod
def generate_issues_from_change(change, settings):
'''
Plugin can generate issue type events from a given change. Executed on
the server side on receiving state.
Args:
change (Change): a change object
settings (dict): canaryd daemon settings
Yields:
events (tuple): ``(event_type, event_message, event_data)``
'''
def get_timeout(self, settings):
# Set an alarm: a plugin can only run for MAX half the interval time
return int(round(settings.collect_interval_s / 2))
def log(self, message):
message = '[{0}]: {1}'.format(self.name, message)
<|code_end|>
with the help of current file imports:
import re
import traceback
from distutils.spawn import find_executable
from canaryd_packages import six
from canaryd.log import logger, print_exception
from canaryd.subprocess import get_command_output
and context from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/subprocess.py
# def get_command_output(command, *args, **kwargs):
# logger.debug('Executing command: {0}'.format(command))
#
# if not kwargs.get('shell', False):
# command = ensure_command_tuple(command)
#
# try:
# output = check_output( # noqa: F405
# command,
# close_fds=True,
# stderr=STDOUT, # noqa: F405
# *args, **kwargs
# )
# except CalledProcessError as e: # noqa: F405
# e.output = decode_output(e.output)
# raise e
#
# output = decode_output(output)
# return output
, which may contain function names, class names, or code. Output only the next line. | logger.debug(message) |
Predict the next line for this snippet: <|code_start|>
def get_plugins():
'''
Get the list of installed plugins.
'''
return PLUGINS
def get_plugin_names():
return six.iterkeys(NAME_TO_PLUGIN)
def get_plugin_by_name(plugin_name):
if plugin_name not in NAME_TO_PLUGIN:
raise NoPluginError('Missing plugin: {0}'.format(plugin_name))
return NAME_TO_PLUGIN[plugin_name]
def prepare_plugin(plugin, settings):
logger.debug('Preparing plugin: {0}'.format(plugin))
try:
plugin.prepare(settings)
except Plugin.PrepareError as e:
logger.info('Plugin prepare failed: {0}: {1}'.format(
plugin.name, e.args,
))
<|code_end|>
with the help of current file imports:
import re
import traceback
from distutils.spawn import find_executable
from canaryd_packages import six
from canaryd.log import logger, print_exception
from canaryd.subprocess import get_command_output
and context from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/subprocess.py
# def get_command_output(command, *args, **kwargs):
# logger.debug('Executing command: {0}'.format(command))
#
# if not kwargs.get('shell', False):
# command = ensure_command_tuple(command)
#
# try:
# output = check_output( # noqa: F405
# command,
# close_fds=True,
# stderr=STDOUT, # noqa: F405
# *args, **kwargs
# )
# except CalledProcessError as e: # noqa: F405
# e.output = decode_output(e.output)
# raise e
#
# output = decode_output(output)
# return output
, which may contain function names, class names, or code. Output only the next line. | print_exception(debug_only=True) |
Predict the next line for this snippet: <|code_start|> def log(self, message):
message = '[{0}]: {1}'.format(self.name, message)
logger.debug(message)
def get_events(self, settings):
'''
Plugin can generate raw events here, ie for things that don't fit into
the spec/state model. This should be a generator yielding event tuples:
Args:
settings (dict): canaryd daemon settings
Returns:
events (list): ``[(event_type, event_message, event_data), ...]``
'''
return []
def get_state(self, settings):
'''
Get the current state for this plugin. Returns structured objects that
match the spec defined on the plugin.
Args:
settings (dict): canaryd daemon settings
Returns:
state (dict): dictionary of current state items, matching plugin spec
'''
<|code_end|>
with the help of current file imports:
import re
import traceback
from distutils.spawn import find_executable
from canaryd_packages import six
from canaryd.log import logger, print_exception
from canaryd.subprocess import get_command_output
and context from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/subprocess.py
# def get_command_output(command, *args, **kwargs):
# logger.debug('Executing command: {0}'.format(command))
#
# if not kwargs.get('shell', False):
# command = ensure_command_tuple(command)
#
# try:
# output = check_output( # noqa: F405
# command,
# close_fds=True,
# stderr=STDOUT, # noqa: F405
# *args, **kwargs
# )
# except CalledProcessError as e: # noqa: F405
# e.output = decode_output(e.output)
# raise e
#
# output = decode_output(output)
# return output
, which may contain function names, class names, or code. Output only the next line. | data = get_command_output( |
Given the following code snippet before the placeholder: <|code_start|>
def disable_script(script, raise_if_noop=True):
scripts_directory = get_scripts_directory()
source_script = path.join(scripts_directory, 'available', script)
link_name = path.join(scripts_directory, 'enabled', script)
if not path.exists(source_script):
raise NoScriptFoundError(source_script)
if not path.exists(link_name):
if not raise_if_noop:
return
raise NoScriptChangesError()
if not path.islink(link_name):
raise ScriptNotLinkError(
'Script {0} is not a link. You should move it to: {1}.'.format(
link_name, source_script,
),
)
remove(link_name)
def ensure_scripts_directory():
# Make sure the scripts directory exists
scripts_directory = get_scripts_directory()
if not path.exists(scripts_directory):
<|code_end|>
, predict the next line using imports from the current file:
from distutils.spawn import find_executable
from os import access, listdir, makedirs, path, remove, symlink, X_OK
from shutil import copy
from canaryd.log import logger
from canaryd.settings import get_scripts_directory
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/settings.py
# def get_scripts_directory():
# return path.join(get_config_directory(), 'scripts')
. Output only the next line. | logger.debug('Creating scripts directory: {0}'.format(scripts_directory)) |
Next line prediction: <|code_start|> settings[key] = value
return settings
def _get_scripts(dirname):
scripts = []
all_settings = {}
script_names = listdir(dirname)
for name in script_names:
if any(name.endswith(e) for e in INVALID_EXTENSIONS):
continue
full_path = path.join(dirname, name)
if not access(full_path, X_OK):
continue
scripts.append(name)
settings = _extract_script_settings(full_path)
if settings:
all_settings[name] = settings
return scripts, all_settings
def get_scripts(settings):
script_settings = {}
available_scripts, available_script_settings = _get_scripts(
<|code_end|>
. Use current file imports:
(from distutils.spawn import find_executable
from os import access, listdir, makedirs, path, remove, symlink, X_OK
from shutil import copy
from canaryd.log import logger
from canaryd.settings import get_scripts_directory)
and context including class names, function names, or small code snippets from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/settings.py
# def get_scripts_directory():
# return path.join(get_config_directory(), 'scripts')
. Output only the next line. | path.join(get_scripts_directory(), 'available'), |
Here is a snippet: <|code_start|> if status is not True:
continue
working_states.append((plugin, ('SYNC', data)))
# Sync this state and get settings
logger.info('Syncing initial state...')
remote_settings = backoff(
sync_states, working_states, settings,
error_message='Could not sync state',
max_wait=settings.collect_interval_s,
)
# Update settings w/remote ones
settings.update(remote_settings)
# Run the loop
logger.info('Starting daemon loop...')
# Make previous states dict
previous_states = dict(
(plugin, status_data[1])
for plugin, status_data in working_states
)
# Now that we've settings - setup graceful (clean shutdown) exit handling
signal.signal(signal.SIGTERM, handle_graceful_quit)
signal.signal(signal.SIGINT, handle_graceful_quit)
try:
<|code_end|>
. Write the next line using the current file imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
, which may include functions, classes, or code. Output only the next line. | run_daemon(previous_states, settings, start_time=start_time) |
Given snippet: <|code_start|># Desc: entry point for canaryd
class GracefulExitRequested(Exception):
pass
def handle_graceful_quit(signum, frame):
raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
version=__version__,
prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
which might include code, classes, or functions. Output only the next line. | logger.info('Starting canaryd v{0}'.format(__version__)) |
Based on the snippet: <|code_start|># canaryd
# File: canaryd/__main__.py
# Desc: entry point for canaryd
class GracefulExitRequested(Exception):
pass
def handle_graceful_quit(signum, frame):
raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
version=__version__,
prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context (classes, functions, sometimes code) from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | log_level = setup_logging(verbose, debug) |
Predict the next line after this snippet: <|code_start|> raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
version=__version__,
prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
settings = get_settings()
# Setup any log file/syslog
<|code_end|>
using the current file's imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and any relevant context from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | setup_logging_from_settings(settings) |
Given the following code snippet before the placeholder: <|code_start|>
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
settings = get_settings()
# Setup any log file/syslog
setup_logging_from_settings(settings)
if not settings.api_key or not settings.server_id:
logger.critical('Missing api_key and/or server_id in config file')
return
# Initial ping for API presence
logger.info('Ping API...')
backoff(
ping, settings,
error_message='Could not ping',
max_wait=settings.collect_interval_s,
)
# Load the plugin list
<|code_end|>
, predict the next line using imports from the current file:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | plugins = get_and_prepare_working_plugins(settings) |
Based on the snippet: <|code_start|> logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
settings = get_settings()
# Setup any log file/syslog
setup_logging_from_settings(settings)
if not settings.api_key or not settings.server_id:
logger.critical('Missing api_key and/or server_id in config file')
return
# Initial ping for API presence
logger.info('Ping API...')
backoff(
ping, settings,
error_message='Could not ping',
max_wait=settings.collect_interval_s,
)
# Load the plugin list
plugins = get_and_prepare_working_plugins(settings)
# Get the initial state
logger.info('Getting initial state...')
start_time = time()
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context (classes, functions, sometimes code) from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | states = get_plugin_states(plugins, settings) |
Predict the next line for this snippet: <|code_start|> prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
settings = get_settings()
# Setup any log file/syslog
setup_logging_from_settings(settings)
if not settings.api_key or not settings.server_id:
logger.critical('Missing api_key and/or server_id in config file')
return
# Initial ping for API presence
logger.info('Ping API...')
<|code_end|>
with the help of current file imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
, which may contain function names, class names, or code. Output only the next line. | backoff( |
Given the code snippet: <|code_start|> message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
settings = get_settings()
# Setup any log file/syslog
setup_logging_from_settings(settings)
if not settings.api_key or not settings.server_id:
logger.critical('Missing api_key and/or server_id in config file')
return
# Initial ping for API presence
logger.info('Ping API...')
backoff(
<|code_end|>
, generate the next line using the imports in this file:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context (functions, classes, or occasionally code) from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | ping, settings, |
Using the snippet: <|code_start|> working_states.append((plugin, ('SYNC', data)))
# Sync this state and get settings
logger.info('Syncing initial state...')
remote_settings = backoff(
sync_states, working_states, settings,
error_message='Could not sync state',
max_wait=settings.collect_interval_s,
)
# Update settings w/remote ones
settings.update(remote_settings)
# Run the loop
logger.info('Starting daemon loop...')
# Make previous states dict
previous_states = dict(
(plugin, status_data[1])
for plugin, status_data in working_states
)
# Now that we've settings - setup graceful (clean shutdown) exit handling
signal.signal(signal.SIGTERM, handle_graceful_quit)
signal.signal(signal.SIGINT, handle_graceful_quit)
try:
run_daemon(previous_states, settings, start_time=start_time)
except GracefulExitRequested:
<|code_end|>
, determine the next line of code. You have imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context (class names, function names, or code) available:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | shutdown(settings) # we're exiting, so only one shot at this |
Given the following code snippet before the placeholder: <|code_start|> logger.info('Ping API...')
backoff(
ping, settings,
error_message='Could not ping',
max_wait=settings.collect_interval_s,
)
# Load the plugin list
plugins = get_and_prepare_working_plugins(settings)
# Get the initial state
logger.info('Getting initial state...')
start_time = time()
states = get_plugin_states(plugins, settings)
# Filter out the non-working plugins and wrap as a (command, data) tuple
# we don't track errors on the initial sync because often canaryd starts
# early on a server meaning some things aren't up. The next state collection
# will collect and sync these.
working_states = []
for plugin, status_data in states:
status, data = status_data
if status is not True:
continue
working_states.append((plugin, ('SYNC', data)))
# Sync this state and get settings
logger.info('Syncing initial state...')
remote_settings = backoff(
<|code_end|>
, predict the next line using imports from the current file:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | sync_states, working_states, settings, |
Given the following code snippet before the placeholder: <|code_start|>
class GracefulExitRequested(Exception):
pass
def handle_graceful_quit(signum, frame):
raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
version=__version__,
prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
<|code_end|>
, predict the next line using imports from the current file:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | ensure_config_directory() |
Predict the next line for this snippet: <|code_start|>
def handle_graceful_quit(signum, frame):
raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
version=__version__,
prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
<|code_end|>
with the help of current file imports:
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
and context from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
, which may contain function names, class names, or code. Output only the next line. | settings = get_settings() |
Next line prediction: <|code_start|># canaryd
# File: canaryd/__main__.py
# Desc: entry point for canaryd
class GracefulExitRequested(Exception):
pass
def handle_graceful_quit(signum, frame):
raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
<|code_end|>
. Use current file imports:
(import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__)
and context including class names, function names, or small code snippets from other files:
# Path: canaryd/daemon.py
# def run_daemon(previous_states, settings, start_time=None):
# if start_time:
# _sleep_until_interval(
# start_time, settings.collect_interval_s,
# )
#
# iterations = 1 # start 1 for the initial sync
#
# while True:
# start = time()
#
# _daemon_loop(iterations, previous_states, settings)
# iterations += 1
#
# _sleep_until_interval(
# start, settings.collect_interval_s,
# )
#
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/plugin.py
# def get_and_prepare_working_plugins(settings, include_slow=True):
# all_plugins = get_plugins()
# working_plugins = []
#
# for plugin in all_plugins:
# if not include_slow and plugin.is_slow:
# continue
#
# status = prepare_plugin(plugin, settings)
#
# if status is True:
# working_plugins.append(plugin)
#
# logger.info('Loaded plugins: {0}'.format(', '.join([
# plugin.name
# for plugin in working_plugins
# ])))
#
# return working_plugins
#
# def get_plugin_states(plugins, settings):
# '''
# Gets state output from multiple plugins.
# '''
#
# return [
# (plugin, get_plugin_state(plugin, settings))
# for plugin in plugins
# ]
#
# Path: canaryd/remote.py
# def backoff(function, *args, **kwargs):
# data = None
# interval = 0
#
# error_message = kwargs.pop('error_message', 'API error')
# max_wait = kwargs.pop('max_wait', 300)
#
# while data is None:
# try:
# return function(*args, **kwargs)
#
# except ApiError as e:
# if interval + 10 <= max_wait:
# interval += 10
#
# e.log()
#
# logger.critical('{0}, retrying in {1}s'.format(error_message, interval))
# sleep(interval)
#
# def ping(settings):
# return make_api_request(
# get_session().get,
# 'server/{0}/ping'.format(settings.server_id),
# settings=settings,
# )
#
# def shutdown(settings):
# return make_api_request(
# get_session().post,
# 'server/{0}/shutdown'.format(settings.server_id),
# settings=settings,
# )
#
# def sync_states(states, settings):
# '''
# Uploads a full state to api.servicecanary.com and returns any data sent back
# from the server (settings).
# '''
#
# return _upload_states_return_settings(
# 'server/{0}/sync'.format(settings.server_id),
# make_sync_or_changes_dict(states),
# settings,
# json={
# 'hostname': socket.gethostname(),
# 'canaryd_version': __version__,
# },
# )
#
# Path: canaryd/settings.py
# def ensure_config_directory():
# # Make sure the config directory exists
# config_directory = get_config_directory()
#
# if not path.exists(config_directory):
# logger.debug('Creating config directory: {0}'.format(config_directory))
# makedirs(config_directory)
#
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# Path: canaryd/version.py
. Output only the next line. | version=__version__, |
Predict the next line after this snippet: <|code_start|>
# We *require* procfs to check PID -> port mappings
HAS_PROCFS = path.exists('/proc')
PID_TO_PORTS = defaultdict(set)
def _get_lsof_pid_to_listens(timeout):
output = get_command_output(
'lsof -i -n -P -b -l -L -s TCP:LISTEN',
timeout=timeout,
)
for line in output.splitlines():
# Skip bad/error lines
if 'no pwd entry' in line:
continue
try:
_, pid, _, _, ip_type, _, _, _, ip_host, _ = line.split(None, 9)
ip_type = ip_type.lower()
pid = int(pid)
# Work out the host:port bit
host, port = ip_host.rsplit(':', 1)
port = int(port)
host_port_tuple = (ip_type, host, port)
PID_TO_PORTS[pid].add(host_port_tuple)
except ValueError:
<|code_end|>
using the current file's imports:
import re
from collections import defaultdict
from os import listdir, path, sep as os_sep
from canaryd.log import logger
from canaryd.subprocess import CalledProcessError, get_command_output
and any relevant context from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/subprocess.py
# def ensure_command_tuple(command):
# def decode_output(output):
# def get_command_output(command, *args, **kwargs):
. Output only the next line. | logger.warning('Dodgy lsof line ignored: "{0}"'.format(line)) |
Next line prediction: <|code_start|> bits = line.split()
proto, _, _, local_address, _, _, program = bits
# Get the pid from PID/PROGRAM
pid = program.split('/')[0]
pid = int(pid)
# Work out the host:port bit
host, port = local_address.rsplit(':', 1)
port = int(port)
ip_type = 'ipv6' if proto == 'tcp6' else 'ipv4'
host_port_tuple = (ip_type, host, port)
PID_TO_PORTS[pid].add(host_port_tuple)
except ValueError:
logger.warning('Dodgy netstat line ignored: "{0}"'.format(line))
def get_pid_to_listens(timeout):
if not HAS_PROCFS:
return PID_TO_PORTS
# Loop through the results and cleanup any PIDs that don't exist
for pid in list(PID_TO_PORTS.keys()):
if not path.exists('/proc/{0}'.format(pid)):
PID_TO_PORTS.pop(pid)
try:
_get_lsof_pid_to_listens(timeout=timeout)
<|code_end|>
. Use current file imports:
(import re
from collections import defaultdict
from os import listdir, path, sep as os_sep
from canaryd.log import logger
from canaryd.subprocess import CalledProcessError, get_command_output)
and context including class names, function names, or small code snippets from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/subprocess.py
# def ensure_command_tuple(command):
# def decode_output(output):
# def get_command_output(command, *args, **kwargs):
. Output only the next line. | except (CalledProcessError, OSError): |
Given the following code snippet before the placeholder: <|code_start|>
# We ignore these as they regularly get deleted/added as part of normal OSX
# lifecycle - and as such any events generated are not of use.
LAUNCHCTL_IGNORE_NAMES = ('oneshot', 'mdworker', 'mbfloagent')
# Systemd service types to ignore
SYSTEMD_IGNORE_TYPES = ('oneshot',)
SYSTEMD_REGEX = re.compile(r'^([a-z\-]+)\.service\s+[a-z\-]+\s+[a-z]+\s+([a-z]+)')
UPSTART_REGEX = re.compile(r'^([a-z\-]+) [a-z]+\/([a-z]+),?\s?(process)?\s?([0-9]+)?')
SUPERVISOR_REGEX = re.compile(r'([a-z\-]+)\s+([A-Z]+)\s+pid\s([0-9]+)')
INITD_USAGE_REGEX = re.compile(r'Usage:[^\n]+status')
INITD_STATUS_REGEX = re.compile(r'\(pid\s+([0-9]+)\)')
# Known init scripts that either don't support status or don't handle it well/quickly
IGNORE_INIT_SCRIPTS = [
'networking', 'udev-post', 'halt', 'killall',
'kcare', 'vz',
]
# We *require* procfs to check PID -> port mappings
HAS_PROCFS = path.exists('/proc')
PID_TO_PORTS = defaultdict(set)
def _get_lsof_pid_to_listens(timeout):
<|code_end|>
, predict the next line using imports from the current file:
import re
from collections import defaultdict
from os import listdir, path, sep as os_sep
from canaryd.log import logger
from canaryd.subprocess import CalledProcessError, get_command_output
and context including class names, function names, and sometimes code from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/subprocess.py
# def ensure_command_tuple(command):
# def decode_output(output):
# def get_command_output(command, *args, **kwargs):
. Output only the next line. | output = get_command_output( |
Using the snippet: <|code_start|>
class Change(object):
def __init__(self, plugin, type_, key, data=None):
self.plugin = plugin
self.type = type_
self.key = key
# COMPAT w/canaryd < 0.2
# Legacy support where added/deleted would either have data/None,
# rather than data changes (where every key: (old_value, new_value)).
if data and self.type in ('added', 'deleted') and not all(
isinstance(item, (tuple, list)) and len(item) == 2
for _, item in six.iteritems(data)
):
if self.type == 'added':
data = dict(
(key, (None, value))
for key, value in six.iteritems(data)
)
elif self.type == 'deleted':
data = dict(
(key, (value, None))
for key, value in six.iteritems(data)
)
<|code_end|>
, determine the next line of code. You have imports:
from canaryd_packages import six
from canaryd.log import logger
and context (class names, function names, or code) available:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
. Output only the next line. | logger.info('Converted legacy data: {0}'.format(data)) |
Given the code snippet: <|code_start|>
if hasattr(obj, 'serialise'):
return obj.serialise()
if hasattr(obj, 'isoformat'):
return obj.isoformat()
if isinstance(obj, six.binary_type):
return obj.decode()
return JSONEncoder.default(self, obj)
def backoff(function, *args, **kwargs):
data = None
interval = 0
error_message = kwargs.pop('error_message', 'API error')
max_wait = kwargs.pop('max_wait', 300)
while data is None:
try:
return function(*args, **kwargs)
except ApiError as e:
if interval + 10 <= max_wait:
interval += 10
e.log()
<|code_end|>
, generate the next line using the imports in this file:
import socket
from json import dumps as json_dumps, JSONEncoder
from time import sleep
from canaryd_packages import requests, six
from canaryd.log import logger
from canaryd.settings import get_settings, VALID_STATUSES
from canaryd.version import __version__
and context (functions, classes, or occasionally code) from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/settings.py
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# VALID_STATUSES = ('DIFF', 'SYNC', 'ERROR', 'EVENTS')
#
# Path: canaryd/version.py
. Output only the next line. | logger.critical('{0}, retrying in {1}s'.format(error_message, interval)) |
Given the code snippet: <|code_start|> logger.debug('Response data: {0}'.format(self.content))
def get_session():
global SESSION
if not SESSION:
SESSION = requests.Session()
return SESSION
def make_sync_or_changes_dict(states):
plugin_to_state = {}
for plugin, (status, state) in states:
if status not in VALID_STATUSES:
raise TypeError('Unknown status for {0} plugin: {1}/{2}'.format(
plugin.name, status, state,
))
plugin_to_state[plugin.name] = (status, state)
return plugin_to_state
def make_api_request(
method, endpoint,
api_key=None, settings=None, json=None, **kwargs
):
<|code_end|>
, generate the next line using the imports in this file:
import socket
from json import dumps as json_dumps, JSONEncoder
from time import sleep
from canaryd_packages import requests, six
from canaryd.log import logger
from canaryd.settings import get_settings, VALID_STATUSES
from canaryd.version import __version__
and context (functions, classes, or occasionally code) from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/settings.py
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# VALID_STATUSES = ('DIFF', 'SYNC', 'ERROR', 'EVENTS')
#
# Path: canaryd/version.py
. Output only the next line. | settings = settings or get_settings() |
Given the code snippet: <|code_start|> self.name = name
self.message = message
self.content = content
def log(self):
logger.critical(
'{0}: {1}{2}'.format(
self.status_code,
self.name,
'({0})'.format(self.message) if self.message else '',
),
)
if self.content:
logger.debug('Response data: {0}'.format(self.content))
def get_session():
global SESSION
if not SESSION:
SESSION = requests.Session()
return SESSION
def make_sync_or_changes_dict(states):
plugin_to_state = {}
for plugin, (status, state) in states:
<|code_end|>
, generate the next line using the imports in this file:
import socket
from json import dumps as json_dumps, JSONEncoder
from time import sleep
from canaryd_packages import requests, six
from canaryd.log import logger
from canaryd.settings import get_settings, VALID_STATUSES
from canaryd.version import __version__
and context (functions, classes, or occasionally code) from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/settings.py
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# VALID_STATUSES = ('DIFF', 'SYNC', 'ERROR', 'EVENTS')
#
# Path: canaryd/version.py
. Output only the next line. | if status not in VALID_STATUSES: |
Next line prediction: <|code_start|>def _upload_states_return_settings(url, states, settings, json=None):
json = json or states
if json is not states:
json['states'] = states
response_data = make_api_request(
get_session().post, url,
settings=settings,
json=json,
# Explicitly set the max (matching server) timeout for syncing states
# to avoid any sync "thrashing".
timeout=600,
)
return response_data['settings']
def sync_states(states, settings):
'''
Uploads a full state to api.servicecanary.com and returns any data sent back
from the server (settings).
'''
return _upload_states_return_settings(
'server/{0}/sync'.format(settings.server_id),
make_sync_or_changes_dict(states),
settings,
json={
'hostname': socket.gethostname(),
<|code_end|>
. Use current file imports:
(import socket
from json import dumps as json_dumps, JSONEncoder
from time import sleep
from canaryd_packages import requests, six
from canaryd.log import logger
from canaryd.settings import get_settings, VALID_STATUSES
from canaryd.version import __version__)
and context including class names, function names, or small code snippets from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
#
# Path: canaryd/settings.py
# def get_settings(config_file=None):
# '''
# Cached/public version of _get_settings.
# '''
#
# global SETTINGS
#
# if SETTINGS is None:
# SETTINGS = _get_settings(config_file=config_file)
#
# return SETTINGS
#
# VALID_STATUSES = ('DIFF', 'SYNC', 'ERROR', 'EVENTS')
#
# Path: canaryd/version.py
. Output only the next line. | 'canaryd_version': __version__, |
Based on the snippet: <|code_start|>
# Not ideal but using the vendored in (to requests) chardet package
if os.name == 'posix' and sys.version_info[0] < 3:
else:
def ensure_command_tuple(command):
if not isinstance(command, (list, tuple)):
return shlex.split(command)
return command
def decode_output(output):
if isinstance(output, six.binary_type):
encoding = chardet.detect(output)['encoding']
if encoding:
output = output.decode(encoding)
else:
output = output.decode()
return output
def get_command_output(command, *args, **kwargs):
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import shlex
import sys
from canaryd_packages import six
from canaryd_packages.requests.packages import chardet
from canaryd.log import logger
from canaryd_packages.subprocess32 import * # noqa: F403
from subprocess import * # noqa: F403
and context (classes, functions, sometimes code) from other files:
# Path: canaryd/log.py
# STDOUT_LOG_LEVELS = (logging.DEBUG, logging.INFO)
# STDERR_LOG_LEVELS = (logging.WARNING, logging.ERROR, logging.CRITICAL)
# class LogFilter(logging.Filter):
# class LogFormatter(logging.Formatter):
# def __init__(self, *levels):
# def filter(self, record):
# def format(self, record):
# def setup_logging(verbose, debug, default_level=logging.CRITICAL):
# def setup_logging_from_settings(settings):
# def print_exception(debug_only=False):
. Output only the next line. | logger.debug('Executing command: {0}'.format(command)) |
Predict the next line after this snippet: <|code_start|>#!/usr/bin/env python
# Run once an hour, smartctl can be intensive
# CANARYD_INTERVAL = 3600
SMART_RETURN_BITS = {
0: False, # command line parse error
1: 'device open failed',
2: 'SMART command failed',
3: 'disk failing',
# We track thresholds via stdout
4: True, # pre-fail attrs <= thresh
5: True, # pre-fail attrs <= thresh in past
# We track errors via stdout
6: True, # error log
7: True, # self-test errors
}
try:
# Ensure smartctl is present & working
get_command_output(
'smartctl --version',
)
<|code_end|>
using the current file's imports:
import sys
from canaryd.subprocess import CalledProcessError, get_command_output
and any relevant context from other files:
# Path: canaryd/subprocess.py
# def ensure_command_tuple(command):
# def decode_output(output):
# def get_command_output(command, *args, **kwargs):
. Output only the next line. | except (CalledProcessError, OSError): |
Continue the code snippet: <|code_start|>#!/usr/bin/env python
# Run once an hour, smartctl can be intensive
# CANARYD_INTERVAL = 3600
SMART_RETURN_BITS = {
0: False, # command line parse error
1: 'device open failed',
2: 'SMART command failed',
3: 'disk failing',
# We track thresholds via stdout
4: True, # pre-fail attrs <= thresh
5: True, # pre-fail attrs <= thresh in past
# We track errors via stdout
6: True, # error log
7: True, # self-test errors
}
try:
# Ensure smartctl is present & working
<|code_end|>
. Use current file imports:
import sys
from canaryd.subprocess import CalledProcessError, get_command_output
and context (classes, functions, or code) from other files:
# Path: canaryd/subprocess.py
# def ensure_command_tuple(command):
# def decode_output(output):
# def get_command_output(command, *args, **kwargs):
. Output only the next line. | get_command_output( |
Given the following code snippet before the placeholder: <|code_start|>
class Contact(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
<|code_end|>
, predict the next line using imports from the current file:
import json
from mainhandler import Handler
from ..utilities.utils import get_gist_data
and context including class names, function names, and sometimes code from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def get_gist_data(gist_id, gist_filename):
# gist_data = urlfetch.fetch("https://gist.github.com/sagarsane/" + gist_id + "/raw/" + gist_filename + ".json").content
# gist_data = json.loads(gist_data)
# return gist_data
. Output only the next line. | self.write(json.dumps(get_gist_data('3785786','contact_data'))) |
Given the following code snippet before the placeholder: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class Achievements(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
<|code_end|>
, predict the next line using imports from the current file:
import json
from mainhandler import Handler
from ..utilities.utils import get_gist_data
and context including class names, function names, and sometimes code from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def get_gist_data(gist_id, gist_filename):
# gist_data = urlfetch.fetch("https://gist.github.com/sagarsane/" + gist_id + "/raw/" + gist_filename + ".json").content
# gist_data = json.loads(gist_data)
# return gist_data
. Output only the next line. | self.write(json.dumps(get_gist_data('3785895','achievements_data'))) |
Here is a snippet: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class Experience(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
<|code_end|>
. Write the next line using the current file imports:
import json
from mainhandler import Handler
from ..utilities.utils import get_gist_data
and context from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def get_gist_data(gist_id, gist_filename):
# gist_data = urlfetch.fetch("https://gist.github.com/sagarsane/" + gist_id + "/raw/" + gist_filename + ".json").content
# gist_data = json.loads(gist_data)
# return gist_data
, which may include functions, classes, or code. Output only the next line. | self.write(json.dumps(get_gist_data('3735396','experience_data'))) |
Given the code snippet: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class GithubActivity(Handler):
def get(self):
activity = {}
entries = []
<|code_end|>
, generate the next line using the imports in this file:
import json
from mainhandler import Handler
from app.utilities.utils import setUser, getFollowers, getFollowing, getWatchedRepos
and context (functions, classes, or occasionally code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def setUser(user="", password=""):
# return Github(user, password).get_user()
#
# def getFollowers(user):
# followers = []
# for f in user.get_followers():
# ret = {}
# ret['type'] = "Followers"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# followers.append(ret)
# return followers
#
# def getFollowing(user):
# following = []
# for f in user.get_following():
# ret = {}
# ret['type'] = "Following"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# following.append(ret)
# return following
#
# def getWatchedRepos(user):
# watched = []
# for w in user.get_watched():
# ret = {}
# ret['type'] = "Watched"
# ret['name'] = w.name + " - " + w.description
# ret['url'] = w.html_url
# watched.append(ret)
# return watched
. Output only the next line. | user = setUser("<username>", "<password>") |
Given the code snippet: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class GithubActivity(Handler):
def get(self):
activity = {}
entries = []
user = setUser("<username>", "<password>")
<|code_end|>
, generate the next line using the imports in this file:
import json
from mainhandler import Handler
from app.utilities.utils import setUser, getFollowers, getFollowing, getWatchedRepos
and context (functions, classes, or occasionally code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def setUser(user="", password=""):
# return Github(user, password).get_user()
#
# def getFollowers(user):
# followers = []
# for f in user.get_followers():
# ret = {}
# ret['type'] = "Followers"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# followers.append(ret)
# return followers
#
# def getFollowing(user):
# following = []
# for f in user.get_following():
# ret = {}
# ret['type'] = "Following"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# following.append(ret)
# return following
#
# def getWatchedRepos(user):
# watched = []
# for w in user.get_watched():
# ret = {}
# ret['type'] = "Watched"
# ret['name'] = w.name + " - " + w.description
# ret['url'] = w.html_url
# watched.append(ret)
# return watched
. Output only the next line. | entries = entries + getFollowers(user) |
Given snippet: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class GithubActivity(Handler):
def get(self):
activity = {}
entries = []
user = setUser("<username>", "<password>")
entries = entries + getFollowers(user)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
from mainhandler import Handler
from app.utilities.utils import setUser, getFollowers, getFollowing, getWatchedRepos
and context:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def setUser(user="", password=""):
# return Github(user, password).get_user()
#
# def getFollowers(user):
# followers = []
# for f in user.get_followers():
# ret = {}
# ret['type'] = "Followers"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# followers.append(ret)
# return followers
#
# def getFollowing(user):
# following = []
# for f in user.get_following():
# ret = {}
# ret['type'] = "Following"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# following.append(ret)
# return following
#
# def getWatchedRepos(user):
# watched = []
# for w in user.get_watched():
# ret = {}
# ret['type'] = "Watched"
# ret['name'] = w.name + " - " + w.description
# ret['url'] = w.html_url
# watched.append(ret)
# return watched
which might include code, classes, or functions. Output only the next line. | entries = entries + getFollowing(user) |
Next line prediction: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class GithubActivity(Handler):
def get(self):
activity = {}
entries = []
user = setUser("<username>", "<password>")
entries = entries + getFollowers(user)
entries = entries + getFollowing(user)
<|code_end|>
. Use current file imports:
(import json
from mainhandler import Handler
from app.utilities.utils import setUser, getFollowers, getFollowing, getWatchedRepos )
and context including class names, function names, or small code snippets from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def setUser(user="", password=""):
# return Github(user, password).get_user()
#
# def getFollowers(user):
# followers = []
# for f in user.get_followers():
# ret = {}
# ret['type'] = "Followers"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# followers.append(ret)
# return followers
#
# def getFollowing(user):
# following = []
# for f in user.get_following():
# ret = {}
# ret['type'] = "Following"
# namedUser = Github().get_user(f.login)
# if namedUser.name:
# ret['name'] = namedUser.name + " - (" + f.login + ")"
# else:
# ret['name'] = f.login
# ret['url'] = "https://github.com/" + f.login
# following.append(ret)
# return following
#
# def getWatchedRepos(user):
# watched = []
# for w in user.get_watched():
# ret = {}
# ret['type'] = "Watched"
# ret['name'] = w.name + " - " + w.description
# ret['url'] = w.html_url
# watched.append(ret)
# return watched
. Output only the next line. | entries = entries + getWatchedRepos(user) |
Here is a snippet: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class Projects(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
<|code_end|>
. Write the next line using the current file imports:
import json
from mainhandler import Handler
from ..utilities.utils import get_gist_data
and context from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def get_gist_data(gist_id, gist_filename):
# gist_data = urlfetch.fetch("https://gist.github.com/sagarsane/" + gist_id + "/raw/" + gist_filename + ".json").content
# gist_data = json.loads(gist_data)
# return gist_data
, which may include functions, classes, or code. Output only the next line. | self.write(json.dumps(get_gist_data('3734919','projects_data'))) |
Here is a snippet: <|code_start|>'''
Created on Oct 1, 2012
@author: Sagar
'''
class Education(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
<|code_end|>
. Write the next line using the current file imports:
import json
from mainhandler import Handler
from ..utilities.utils import get_gist_data
and context from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/utils.py
# def get_gist_data(gist_id, gist_filename):
# gist_data = urlfetch.fetch("https://gist.github.com/sagarsane/" + gist_id + "/raw/" + gist_filename + ".json").content
# gist_data = json.loads(gist_data)
# return gist_data
, which may include functions, classes, or code. Output only the next line. | self.write(json.dumps(get_gist_data('3735401','education_data'))) |
Given the code snippet: <|code_start|> "media" : "",
"thumbnail" : "",
"credit" : "",
"caption" : ""
},
}
columns = row['f']
an_info['startDate'] = an_info['endDate'] = happened_date
check_link = columns[8]['v'].split("#")
if len(check_link) > 0:
an_info['headline'] = "<a href = '" + columns[8]['v'] + "' target='_blank'>" + columns[0]['v'] + "</a>"
else:
an_info['headline'] = "<a href = '" + columns[8]['v'] + "' target='_blank'>" + columns[0]['v'] + "</a>"
an_info['text'] = columns[4]['v'] + "-" + columns[7]['v']
an_info['tag'] = columns[0]['v']
an_info['asset']['caption'] = "<a href = '" + columns[8]['v'] + "' target='_blank'></a>"
an_info['asset']['credit'] = columns[6]['v']
#+ columns[5]['v'] + " - " + columns[6]['v']
split_date = columns[9]['v'].split(' ')
happened_date1 = ",".join(split_date[0].split("-"))
happened_date = happened_date1 + "," + ",".join(split_date[1].split(":"))
info.append(an_info)
timeline['date'] = info
timeline['era'] = era_info
timeline['chart'] = chart_info
data['timeline'] = timeline
return data
<|code_end|>
, generate the next line using the imports in this file:
import httplib2
import json
from apiclient.discovery import build
from oauth2client.appengine import AppAssertionCredentials
from mainhandler import Handler
from ..utilities.github_archive_bigquery_handler import execute_bigquery_githubarchive, execute_bigquery_toprepos, execute_bigquery_toplanguages, execute_bigquery_mostactive_users
and context (functions, classes, or occasionally code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/github_archive_bigquery_handler.py
# def execute_bigquery_githubarchive(service):
# query = """SELECT type, actor_attributes_login, payload_action, payload_pull_request_title, repository_name, repository_url, repository_description, repository_language, url, created_at as date
# FROM [githubarchive:github.timeline]
# WHERE (type="PullRequestReviewCommentEvent" OR type="IssuesEvent" OR type="IssueCommentEvent" OR type="PullRequestEvent") AND repository_owner!="sagarsane"
# AND actor_attributes_login="sagarsane"
# AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-07-01 00:00:00')
# GROUP BY type, actor_attributes_login, payload_action, payload_pull_request_title, repository_url, repository_description, repository_name, repository_language, url, date
# ORDER BY date DESC LIMIT 100;"""
# #return runSyncQuery(service, '<project_id>', 'test', query)
# return queryTableData(service, '<project_id>', 'test', 'github_query_result', startIndex=0)
#
# def execute_bigquery_toprepos(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_repos', startIndex=0)
#
# def execute_bigquery_toplanguages(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_languages', startIndex=0)
#
# def execute_bigquery_mostactive_users(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'mostactive_users', startIndex=0)
. Output only the next line. | class GithubArchiveHTML(Handler): |
Given the code snippet: <|code_start|> an_info['text'] = columns[4]['v'] + "-" + columns[7]['v']
an_info['tag'] = columns[0]['v']
an_info['asset']['caption'] = "<a href = '" + columns[8]['v'] + "' target='_blank'></a>"
an_info['asset']['credit'] = columns[6]['v']
#+ columns[5]['v'] + " - " + columns[6]['v']
split_date = columns[9]['v'].split(' ')
happened_date1 = ",".join(split_date[0].split("-"))
happened_date = happened_date1 + "," + ",".join(split_date[1].split(":"))
info.append(an_info)
timeline['date'] = info
timeline['era'] = era_info
timeline['chart'] = chart_info
data['timeline'] = timeline
return data
class GithubArchiveHTML(Handler):
def get(self):
#self.render_front("timeline.html")
self.render("metro_github_archive.html")
class GithubArchiveBigQuery(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
"""Local_parser_result
self.write(json.dumps(local_archive))
"""
"""Remote_parser_result"""
service = build("bigquery", "v2", http=http)
<|code_end|>
, generate the next line using the imports in this file:
import httplib2
import json
from apiclient.discovery import build
from oauth2client.appengine import AppAssertionCredentials
from mainhandler import Handler
from ..utilities.github_archive_bigquery_handler import execute_bigquery_githubarchive, execute_bigquery_toprepos, execute_bigquery_toplanguages, execute_bigquery_mostactive_users
and context (functions, classes, or occasionally code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/github_archive_bigquery_handler.py
# def execute_bigquery_githubarchive(service):
# query = """SELECT type, actor_attributes_login, payload_action, payload_pull_request_title, repository_name, repository_url, repository_description, repository_language, url, created_at as date
# FROM [githubarchive:github.timeline]
# WHERE (type="PullRequestReviewCommentEvent" OR type="IssuesEvent" OR type="IssueCommentEvent" OR type="PullRequestEvent") AND repository_owner!="sagarsane"
# AND actor_attributes_login="sagarsane"
# AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-07-01 00:00:00')
# GROUP BY type, actor_attributes_login, payload_action, payload_pull_request_title, repository_url, repository_description, repository_name, repository_language, url, date
# ORDER BY date DESC LIMIT 100;"""
# #return runSyncQuery(service, '<project_id>', 'test', query)
# return queryTableData(service, '<project_id>', 'test', 'github_query_result', startIndex=0)
#
# def execute_bigquery_toprepos(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_repos', startIndex=0)
#
# def execute_bigquery_toplanguages(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_languages', startIndex=0)
#
# def execute_bigquery_mostactive_users(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'mostactive_users', startIndex=0)
. Output only the next line. | parsed_archive = execute_bigquery_githubarchive(service) |
Given the code snippet: <|code_start|> self.response.headers['Content-Type'] = 'application/json'
"""Local_parser_result
self.write(json.dumps(local_archive))
"""
"""Remote_parser_result"""
service = build("bigquery", "v2", http=http)
parsed_archive = execute_bigquery_githubarchive(service)
self.write(json.dumps(parsed_archive))
class MostActiveUsers(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
service = build("bigquery", "v2", http=http)
parsed_archive = execute_bigquery_mostactive_users(service)
self.write(json.dumps(parsed_archive))
class TopLanguages(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
service = build("bigquery", "v2", http=http)
parsed_archive = execute_bigquery_toplanguages(service)
self.write(json.dumps(parsed_archive))
class TopRepos(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
service = build("bigquery", "v2", http=http)
<|code_end|>
, generate the next line using the imports in this file:
import httplib2
import json
from apiclient.discovery import build
from oauth2client.appengine import AppAssertionCredentials
from mainhandler import Handler
from ..utilities.github_archive_bigquery_handler import execute_bigquery_githubarchive, execute_bigquery_toprepos, execute_bigquery_toplanguages, execute_bigquery_mostactive_users
and context (functions, classes, or occasionally code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/github_archive_bigquery_handler.py
# def execute_bigquery_githubarchive(service):
# query = """SELECT type, actor_attributes_login, payload_action, payload_pull_request_title, repository_name, repository_url, repository_description, repository_language, url, created_at as date
# FROM [githubarchive:github.timeline]
# WHERE (type="PullRequestReviewCommentEvent" OR type="IssuesEvent" OR type="IssueCommentEvent" OR type="PullRequestEvent") AND repository_owner!="sagarsane"
# AND actor_attributes_login="sagarsane"
# AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-07-01 00:00:00')
# GROUP BY type, actor_attributes_login, payload_action, payload_pull_request_title, repository_url, repository_description, repository_name, repository_language, url, date
# ORDER BY date DESC LIMIT 100;"""
# #return runSyncQuery(service, '<project_id>', 'test', query)
# return queryTableData(service, '<project_id>', 'test', 'github_query_result', startIndex=0)
#
# def execute_bigquery_toprepos(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_repos', startIndex=0)
#
# def execute_bigquery_toplanguages(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_languages', startIndex=0)
#
# def execute_bigquery_mostactive_users(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'mostactive_users', startIndex=0)
. Output only the next line. | parsed_archive = execute_bigquery_toprepos(service) |
Given the following code snippet before the placeholder: <|code_start|>class GithubArchiveHTML(Handler):
def get(self):
#self.render_front("timeline.html")
self.render("metro_github_archive.html")
class GithubArchiveBigQuery(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
"""Local_parser_result
self.write(json.dumps(local_archive))
"""
"""Remote_parser_result"""
service = build("bigquery", "v2", http=http)
parsed_archive = execute_bigquery_githubarchive(service)
self.write(json.dumps(parsed_archive))
class MostActiveUsers(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
service = build("bigquery", "v2", http=http)
parsed_archive = execute_bigquery_mostactive_users(service)
self.write(json.dumps(parsed_archive))
class TopLanguages(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
service = build("bigquery", "v2", http=http)
<|code_end|>
, predict the next line using imports from the current file:
import httplib2
import json
from apiclient.discovery import build
from oauth2client.appengine import AppAssertionCredentials
from mainhandler import Handler
from ..utilities.github_archive_bigquery_handler import execute_bigquery_githubarchive, execute_bigquery_toprepos, execute_bigquery_toplanguages, execute_bigquery_mostactive_users
and context including class names, function names, and sometimes code from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/github_archive_bigquery_handler.py
# def execute_bigquery_githubarchive(service):
# query = """SELECT type, actor_attributes_login, payload_action, payload_pull_request_title, repository_name, repository_url, repository_description, repository_language, url, created_at as date
# FROM [githubarchive:github.timeline]
# WHERE (type="PullRequestReviewCommentEvent" OR type="IssuesEvent" OR type="IssueCommentEvent" OR type="PullRequestEvent") AND repository_owner!="sagarsane"
# AND actor_attributes_login="sagarsane"
# AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-07-01 00:00:00')
# GROUP BY type, actor_attributes_login, payload_action, payload_pull_request_title, repository_url, repository_description, repository_name, repository_language, url, date
# ORDER BY date DESC LIMIT 100;"""
# #return runSyncQuery(service, '<project_id>', 'test', query)
# return queryTableData(service, '<project_id>', 'test', 'github_query_result', startIndex=0)
#
# def execute_bigquery_toprepos(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_repos', startIndex=0)
#
# def execute_bigquery_toplanguages(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_languages', startIndex=0)
#
# def execute_bigquery_mostactive_users(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'mostactive_users', startIndex=0)
. Output only the next line. | parsed_archive = execute_bigquery_toplanguages(service) |
Here is a snippet: <|code_start|>
timeline['date'] = info
timeline['era'] = era_info
timeline['chart'] = chart_info
data['timeline'] = timeline
return data
class GithubArchiveHTML(Handler):
def get(self):
#self.render_front("timeline.html")
self.render("metro_github_archive.html")
class GithubArchiveBigQuery(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
"""Local_parser_result
self.write(json.dumps(local_archive))
"""
"""Remote_parser_result"""
service = build("bigquery", "v2", http=http)
parsed_archive = execute_bigquery_githubarchive(service)
self.write(json.dumps(parsed_archive))
class MostActiveUsers(Handler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
service = build("bigquery", "v2", http=http)
<|code_end|>
. Write the next line using the current file imports:
import httplib2
import json
from apiclient.discovery import build
from oauth2client.appengine import AppAssertionCredentials
from mainhandler import Handler
from ..utilities.github_archive_bigquery_handler import execute_bigquery_githubarchive, execute_bigquery_toprepos, execute_bigquery_toplanguages, execute_bigquery_mostactive_users
and context from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/utilities/github_archive_bigquery_handler.py
# def execute_bigquery_githubarchive(service):
# query = """SELECT type, actor_attributes_login, payload_action, payload_pull_request_title, repository_name, repository_url, repository_description, repository_language, url, created_at as date
# FROM [githubarchive:github.timeline]
# WHERE (type="PullRequestReviewCommentEvent" OR type="IssuesEvent" OR type="IssueCommentEvent" OR type="PullRequestEvent") AND repository_owner!="sagarsane"
# AND actor_attributes_login="sagarsane"
# AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-07-01 00:00:00')
# GROUP BY type, actor_attributes_login, payload_action, payload_pull_request_title, repository_url, repository_description, repository_name, repository_language, url, date
# ORDER BY date DESC LIMIT 100;"""
# #return runSyncQuery(service, '<project_id>', 'test', query)
# return queryTableData(service, '<project_id>', 'test', 'github_query_result', startIndex=0)
#
# def execute_bigquery_toprepos(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_repos', startIndex=0)
#
# def execute_bigquery_toplanguages(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'top_languages', startIndex=0)
#
# def execute_bigquery_mostactive_users(service):
# return queryTableData(service, "<project_id>", 'windows8app', 'mostactive_users', startIndex=0)
, which may include functions, classes, or code. Output only the next line. | parsed_archive = execute_bigquery_mostactive_users(service) |
Given the code snippet: <|code_start|>
app = webapp2.WSGIApplication([('/home', MainPage),
('/projects', Projects),
('/experience', Experience),
<|code_end|>
, generate the next line using the imports in this file:
import webapp2
from mainhandler import Handler
from app.content.index import MainPage
from app.content.experience import Experience
from app.content.education import Education
from app.content.projects import Projects
from app.content.contact import Contact
from app.content.achievements import Achievements
from app.utilities.githubactivity import GithubActivity
from app.utilities.githubarchive import GithubArchiveBigQuery, GithubArchiveHTML, TopRepos, TopLanguages, MostActiveUsers
and context (functions, classes, or occasionally code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/content/index.py
# class MainPage(Handler):
# def get(self):
# self.render_front("index.html")
#
# Path: app/content/experience.py
# class Experience(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735396','experience_data')))
#
# Path: app/content/education.py
# class Education(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735401','education_data')))
#
# Path: app/content/projects.py
# class Projects(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3734919','projects_data')))
#
# Path: app/content/contact.py
# class Contact(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785786','contact_data')))
#
# Path: app/content/achievements.py
# class Achievements(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785895','achievements_data')))
#
# Path: app/utilities/githubactivity.py
# class GithubActivity(Handler):
# def get(self):
# activity = {}
# entries = []
# user = setUser("<username>", "<password>")
# entries = entries + getFollowers(user)
# entries = entries + getFollowing(user)
# entries = entries + getWatchedRepos(user)
# activity['activities'] = entries
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(activity))
#
# Path: app/utilities/githubarchive.py
# class GithubArchiveBigQuery(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# """Local_parser_result
# self.write(json.dumps(local_archive))
# """
# """Remote_parser_result"""
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_githubarchive(service)
# self.write(json.dumps(parsed_archive))
#
# class GithubArchiveHTML(Handler):
# def get(self):
# #self.render_front("timeline.html")
# self.render("metro_github_archive.html")
#
# class TopRepos(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toprepos(service)
# self.write(json.dumps(parsed_archive))
#
# class TopLanguages(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toplanguages(service)
# self.write(json.dumps(parsed_archive))
#
# class MostActiveUsers(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_mostactive_users(service)
# self.write(json.dumps(parsed_archive))
. Output only the next line. | ('/education', Education), |
Predict the next line for this snippet: <|code_start|>
app = webapp2.WSGIApplication([('/home', MainPage),
('/projects', Projects),
('/experience', Experience),
('/education', Education),
<|code_end|>
with the help of current file imports:
import webapp2
from mainhandler import Handler
from app.content.index import MainPage
from app.content.experience import Experience
from app.content.education import Education
from app.content.projects import Projects
from app.content.contact import Contact
from app.content.achievements import Achievements
from app.utilities.githubactivity import GithubActivity
from app.utilities.githubarchive import GithubArchiveBigQuery, GithubArchiveHTML, TopRepos, TopLanguages, MostActiveUsers
and context from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/content/index.py
# class MainPage(Handler):
# def get(self):
# self.render_front("index.html")
#
# Path: app/content/experience.py
# class Experience(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735396','experience_data')))
#
# Path: app/content/education.py
# class Education(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735401','education_data')))
#
# Path: app/content/projects.py
# class Projects(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3734919','projects_data')))
#
# Path: app/content/contact.py
# class Contact(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785786','contact_data')))
#
# Path: app/content/achievements.py
# class Achievements(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785895','achievements_data')))
#
# Path: app/utilities/githubactivity.py
# class GithubActivity(Handler):
# def get(self):
# activity = {}
# entries = []
# user = setUser("<username>", "<password>")
# entries = entries + getFollowers(user)
# entries = entries + getFollowing(user)
# entries = entries + getWatchedRepos(user)
# activity['activities'] = entries
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(activity))
#
# Path: app/utilities/githubarchive.py
# class GithubArchiveBigQuery(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# """Local_parser_result
# self.write(json.dumps(local_archive))
# """
# """Remote_parser_result"""
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_githubarchive(service)
# self.write(json.dumps(parsed_archive))
#
# class GithubArchiveHTML(Handler):
# def get(self):
# #self.render_front("timeline.html")
# self.render("metro_github_archive.html")
#
# class TopRepos(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toprepos(service)
# self.write(json.dumps(parsed_archive))
#
# class TopLanguages(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toplanguages(service)
# self.write(json.dumps(parsed_archive))
#
# class MostActiveUsers(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_mostactive_users(service)
# self.write(json.dumps(parsed_archive))
, which may contain function names, class names, or code. Output only the next line. | ('/contact', Contact), |
Given the code snippet: <|code_start|>
app = webapp2.WSGIApplication([('/home', MainPage),
('/projects', Projects),
('/experience', Experience),
('/education', Education),
('/contact', Contact),
<|code_end|>
, generate the next line using the imports in this file:
import webapp2
from mainhandler import Handler
from app.content.index import MainPage
from app.content.experience import Experience
from app.content.education import Education
from app.content.projects import Projects
from app.content.contact import Contact
from app.content.achievements import Achievements
from app.utilities.githubactivity import GithubActivity
from app.utilities.githubarchive import GithubArchiveBigQuery, GithubArchiveHTML, TopRepos, TopLanguages, MostActiveUsers
and context (functions, classes, or occasionally code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/content/index.py
# class MainPage(Handler):
# def get(self):
# self.render_front("index.html")
#
# Path: app/content/experience.py
# class Experience(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735396','experience_data')))
#
# Path: app/content/education.py
# class Education(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735401','education_data')))
#
# Path: app/content/projects.py
# class Projects(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3734919','projects_data')))
#
# Path: app/content/contact.py
# class Contact(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785786','contact_data')))
#
# Path: app/content/achievements.py
# class Achievements(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785895','achievements_data')))
#
# Path: app/utilities/githubactivity.py
# class GithubActivity(Handler):
# def get(self):
# activity = {}
# entries = []
# user = setUser("<username>", "<password>")
# entries = entries + getFollowers(user)
# entries = entries + getFollowing(user)
# entries = entries + getWatchedRepos(user)
# activity['activities'] = entries
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(activity))
#
# Path: app/utilities/githubarchive.py
# class GithubArchiveBigQuery(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# """Local_parser_result
# self.write(json.dumps(local_archive))
# """
# """Remote_parser_result"""
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_githubarchive(service)
# self.write(json.dumps(parsed_archive))
#
# class GithubArchiveHTML(Handler):
# def get(self):
# #self.render_front("timeline.html")
# self.render("metro_github_archive.html")
#
# class TopRepos(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toprepos(service)
# self.write(json.dumps(parsed_archive))
#
# class TopLanguages(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toplanguages(service)
# self.write(json.dumps(parsed_archive))
#
# class MostActiveUsers(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_mostactive_users(service)
# self.write(json.dumps(parsed_archive))
. Output only the next line. | ('/achievements', Achievements), |
Based on the snippet: <|code_start|>
app = webapp2.WSGIApplication([('/home', MainPage),
('/projects', Projects),
('/experience', Experience),
('/education', Education),
('/contact', Contact),
('/achievements', Achievements),
<|code_end|>
, predict the immediate next line with the help of imports:
import webapp2
from mainhandler import Handler
from app.content.index import MainPage
from app.content.experience import Experience
from app.content.education import Education
from app.content.projects import Projects
from app.content.contact import Contact
from app.content.achievements import Achievements
from app.utilities.githubactivity import GithubActivity
from app.utilities.githubarchive import GithubArchiveBigQuery, GithubArchiveHTML, TopRepos, TopLanguages, MostActiveUsers
and context (classes, functions, sometimes code) from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/content/index.py
# class MainPage(Handler):
# def get(self):
# self.render_front("index.html")
#
# Path: app/content/experience.py
# class Experience(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735396','experience_data')))
#
# Path: app/content/education.py
# class Education(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735401','education_data')))
#
# Path: app/content/projects.py
# class Projects(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3734919','projects_data')))
#
# Path: app/content/contact.py
# class Contact(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785786','contact_data')))
#
# Path: app/content/achievements.py
# class Achievements(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785895','achievements_data')))
#
# Path: app/utilities/githubactivity.py
# class GithubActivity(Handler):
# def get(self):
# activity = {}
# entries = []
# user = setUser("<username>", "<password>")
# entries = entries + getFollowers(user)
# entries = entries + getFollowing(user)
# entries = entries + getWatchedRepos(user)
# activity['activities'] = entries
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(activity))
#
# Path: app/utilities/githubarchive.py
# class GithubArchiveBigQuery(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# """Local_parser_result
# self.write(json.dumps(local_archive))
# """
# """Remote_parser_result"""
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_githubarchive(service)
# self.write(json.dumps(parsed_archive))
#
# class GithubArchiveHTML(Handler):
# def get(self):
# #self.render_front("timeline.html")
# self.render("metro_github_archive.html")
#
# class TopRepos(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toprepos(service)
# self.write(json.dumps(parsed_archive))
#
# class TopLanguages(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toplanguages(service)
# self.write(json.dumps(parsed_archive))
#
# class MostActiveUsers(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_mostactive_users(service)
# self.write(json.dumps(parsed_archive))
. Output only the next line. | ('/github_activity', GithubActivity), |
Using the snippet: <|code_start|>
app = webapp2.WSGIApplication([('/home', MainPage),
('/projects', Projects),
('/experience', Experience),
('/education', Education),
('/contact', Contact),
('/achievements', Achievements),
('/github_activity', GithubActivity),
('/github_archive', GithubArchiveHTML),
<|code_end|>
, determine the next line of code. You have imports:
import webapp2
from mainhandler import Handler
from app.content.index import MainPage
from app.content.experience import Experience
from app.content.education import Education
from app.content.projects import Projects
from app.content.contact import Contact
from app.content.achievements import Achievements
from app.utilities.githubactivity import GithubActivity
from app.utilities.githubarchive import GithubArchiveBigQuery, GithubArchiveHTML, TopRepos, TopLanguages, MostActiveUsers
and context (class names, function names, or code) available:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/content/index.py
# class MainPage(Handler):
# def get(self):
# self.render_front("index.html")
#
# Path: app/content/experience.py
# class Experience(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735396','experience_data')))
#
# Path: app/content/education.py
# class Education(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735401','education_data')))
#
# Path: app/content/projects.py
# class Projects(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3734919','projects_data')))
#
# Path: app/content/contact.py
# class Contact(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785786','contact_data')))
#
# Path: app/content/achievements.py
# class Achievements(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785895','achievements_data')))
#
# Path: app/utilities/githubactivity.py
# class GithubActivity(Handler):
# def get(self):
# activity = {}
# entries = []
# user = setUser("<username>", "<password>")
# entries = entries + getFollowers(user)
# entries = entries + getFollowing(user)
# entries = entries + getWatchedRepos(user)
# activity['activities'] = entries
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(activity))
#
# Path: app/utilities/githubarchive.py
# class GithubArchiveBigQuery(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# """Local_parser_result
# self.write(json.dumps(local_archive))
# """
# """Remote_parser_result"""
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_githubarchive(service)
# self.write(json.dumps(parsed_archive))
#
# class GithubArchiveHTML(Handler):
# def get(self):
# #self.render_front("timeline.html")
# self.render("metro_github_archive.html")
#
# class TopRepos(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toprepos(service)
# self.write(json.dumps(parsed_archive))
#
# class TopLanguages(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toplanguages(service)
# self.write(json.dumps(parsed_archive))
#
# class MostActiveUsers(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_mostactive_users(service)
# self.write(json.dumps(parsed_archive))
. Output only the next line. | ('/archive_data', GithubArchiveBigQuery), |
Predict the next line after this snippet: <|code_start|>
app = webapp2.WSGIApplication([('/home', MainPage),
('/projects', Projects),
('/experience', Experience),
('/education', Education),
('/contact', Contact),
('/achievements', Achievements),
('/github_activity', GithubActivity),
<|code_end|>
using the current file's imports:
import webapp2
from mainhandler import Handler
from app.content.index import MainPage
from app.content.experience import Experience
from app.content.education import Education
from app.content.projects import Projects
from app.content.contact import Contact
from app.content.achievements import Achievements
from app.utilities.githubactivity import GithubActivity
from app.utilities.githubarchive import GithubArchiveBigQuery, GithubArchiveHTML, TopRepos, TopLanguages, MostActiveUsers
and any relevant context from other files:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/content/index.py
# class MainPage(Handler):
# def get(self):
# self.render_front("index.html")
#
# Path: app/content/experience.py
# class Experience(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735396','experience_data')))
#
# Path: app/content/education.py
# class Education(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735401','education_data')))
#
# Path: app/content/projects.py
# class Projects(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3734919','projects_data')))
#
# Path: app/content/contact.py
# class Contact(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785786','contact_data')))
#
# Path: app/content/achievements.py
# class Achievements(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785895','achievements_data')))
#
# Path: app/utilities/githubactivity.py
# class GithubActivity(Handler):
# def get(self):
# activity = {}
# entries = []
# user = setUser("<username>", "<password>")
# entries = entries + getFollowers(user)
# entries = entries + getFollowing(user)
# entries = entries + getWatchedRepos(user)
# activity['activities'] = entries
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(activity))
#
# Path: app/utilities/githubarchive.py
# class GithubArchiveBigQuery(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# """Local_parser_result
# self.write(json.dumps(local_archive))
# """
# """Remote_parser_result"""
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_githubarchive(service)
# self.write(json.dumps(parsed_archive))
#
# class GithubArchiveHTML(Handler):
# def get(self):
# #self.render_front("timeline.html")
# self.render("metro_github_archive.html")
#
# class TopRepos(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toprepos(service)
# self.write(json.dumps(parsed_archive))
#
# class TopLanguages(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toplanguages(service)
# self.write(json.dumps(parsed_archive))
#
# class MostActiveUsers(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_mostactive_users(service)
# self.write(json.dumps(parsed_archive))
. Output only the next line. | ('/github_archive', GithubArchiveHTML), |
Given snippet: <|code_start|>
app = webapp2.WSGIApplication([('/home', MainPage),
('/projects', Projects),
('/experience', Experience),
('/education', Education),
('/contact', Contact),
('/achievements', Achievements),
('/github_activity', GithubActivity),
('/github_archive', GithubArchiveHTML),
('/archive_data', GithubArchiveBigQuery),
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import webapp2
from mainhandler import Handler
from app.content.index import MainPage
from app.content.experience import Experience
from app.content.education import Education
from app.content.projects import Projects
from app.content.contact import Contact
from app.content.achievements import Achievements
from app.utilities.githubactivity import GithubActivity
from app.utilities.githubarchive import GithubArchiveBigQuery, GithubArchiveHTML, TopRepos, TopLanguages, MostActiveUsers
and context:
# Path: mainhandler.py
# class Handler(webapp2.RequestHandler):
# def write(self, *a, **kw):
# self.response.out.write(*a, **kw)
#
# def render_str(self, template, **params):
# t = jinja_env.get_template(template)
# return t.render(params)
#
# def render(self, template, **kw):
# self.write(self.render_str(template, **kw))
#
# def render_front(self, template):
# self.render(template)
#
# Path: app/content/index.py
# class MainPage(Handler):
# def get(self):
# self.render_front("index.html")
#
# Path: app/content/experience.py
# class Experience(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735396','experience_data')))
#
# Path: app/content/education.py
# class Education(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3735401','education_data')))
#
# Path: app/content/projects.py
# class Projects(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3734919','projects_data')))
#
# Path: app/content/contact.py
# class Contact(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785786','contact_data')))
#
# Path: app/content/achievements.py
# class Achievements(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(get_gist_data('3785895','achievements_data')))
#
# Path: app/utilities/githubactivity.py
# class GithubActivity(Handler):
# def get(self):
# activity = {}
# entries = []
# user = setUser("<username>", "<password>")
# entries = entries + getFollowers(user)
# entries = entries + getFollowing(user)
# entries = entries + getWatchedRepos(user)
# activity['activities'] = entries
# self.response.headers['Content-Type'] = 'application/json'
# self.write(json.dumps(activity))
#
# Path: app/utilities/githubarchive.py
# class GithubArchiveBigQuery(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# """Local_parser_result
# self.write(json.dumps(local_archive))
# """
# """Remote_parser_result"""
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_githubarchive(service)
# self.write(json.dumps(parsed_archive))
#
# class GithubArchiveHTML(Handler):
# def get(self):
# #self.render_front("timeline.html")
# self.render("metro_github_archive.html")
#
# class TopRepos(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toprepos(service)
# self.write(json.dumps(parsed_archive))
#
# class TopLanguages(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_toplanguages(service)
# self.write(json.dumps(parsed_archive))
#
# class MostActiveUsers(Handler):
# def get(self):
# self.response.headers['Content-Type'] = 'application/json'
# service = build("bigquery", "v2", http=http)
# parsed_archive = execute_bigquery_mostactive_users(service)
# self.write(json.dumps(parsed_archive))
which might include code, classes, or functions. Output only the next line. | ('/toprepos', TopRepos), |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.