Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Next line prediction: <|code_start|>
class Scenario(IsisScenario):
description = "making toast in isisworld"
author = "dustin smith"
version = "1"
def environment():
#k = kitchen(length=15, width=15, height=10)
#put_in_world(k)
b1 = block()
put_in_world(b1)
<|code_end|>
. Use current file imports:
(from src.isis_scenario import IsisScenario)
and context including class names, function names, or small code snippets from other files:
# Path: src/isis_scenario.py
# class IsisScenario(object):
#
# def __init__(self,filename):
# # _taskDict stores a mapping of task names to task methods
# self._taskDict = {}
# if hasattr(self,'name'):
# self.name = self.name
# else:
# self.name = filename
#
# self.description = "No description"
# self.author = "Unknown"
# # envDict stores a mapping of variable names to isisObject and isisAgent instances
# self.envDict = {}
# self._loadTaskFile(self.name)
#
# def _loadTaskFile(self,functionName):
# print "Loading: %s" % functionName
#
# # load all of the tasks
# task_functions = filter(lambda x: x[0:4] == "task",dir(self))
#
# if len(task_functions) == 0:
# raise IsisParseProblem("No tasks defined.",functionName)
#
# # create IsisTasks for each of the def task_* in the scenario file.
# print "Self dictionary", self.__dict__.keys()
# for tf in task_functions:
# new_task = IsisTask(self)
# new_task.executeTaskCode(tf,self.__getattribute__(tf))
# # add the task to the dictionary
# self._taskDict[new_task.name] = new_task
#
# def loadScenario(self, baseNode):
# if True:#try:
# load_objects(self, baseNode)
# return True
# else:#except Exception, e:
# raise IsisParseProblem(str(e),"%s in def environment()" % self.name)
# return False
#
# def getTaskByName(self,taskName):
# return self._taskDict[taskName]
#
# def getTaskList(self):
# return self._taskDict.keys()
#
# def __del__(self):
# """ Delete tasks """
# for task in self._taskDict.values():
# del task
. Output only the next line. | b2 = block() |
Based on the snippet: <|code_start|> k = kitchen(length=15, width=15, height=10)
put_in_world(k)
f = fridge()
put_in(f, k)
b = butter()
put_in(b, f)
ov = oven()
put_in(ov, k)
ta = table(scale=7)
put_in(ta, k)
ta2 = table(scale=7)
put_in(ta2, k)
fp = frying_pan()
put_on(fp, ta2)
# ta4 = table(scale=7)
# ta4.scale = 1
# put_in(ta4, k)
<|code_end|>
, predict the immediate next line with the help of imports:
from src.isis_scenario import IsisScenario
and context (classes, functions, sometimes code) from other files:
# Path: src/isis_scenario.py
# class IsisScenario(object):
#
# def __init__(self,filename):
# # _taskDict stores a mapping of task names to task methods
# self._taskDict = {}
# if hasattr(self,'name'):
# self.name = self.name
# else:
# self.name = filename
#
# self.description = "No description"
# self.author = "Unknown"
# # envDict stores a mapping of variable names to isisObject and isisAgent instances
# self.envDict = {}
# self._loadTaskFile(self.name)
#
# def _loadTaskFile(self,functionName):
# print "Loading: %s" % functionName
#
# # load all of the tasks
# task_functions = filter(lambda x: x[0:4] == "task",dir(self))
#
# if len(task_functions) == 0:
# raise IsisParseProblem("No tasks defined.",functionName)
#
# # create IsisTasks for each of the def task_* in the scenario file.
# print "Self dictionary", self.__dict__.keys()
# for tf in task_functions:
# new_task = IsisTask(self)
# new_task.executeTaskCode(tf,self.__getattribute__(tf))
# # add the task to the dictionary
# self._taskDict[new_task.name] = new_task
#
# def loadScenario(self, baseNode):
# if True:#try:
# load_objects(self, baseNode)
# return True
# else:#except Exception, e:
# raise IsisParseProblem(str(e),"%s in def environment()" % self.name)
# return False
#
# def getTaskByName(self,taskName):
# return self._taskDict[taskName]
#
# def getTaskList(self):
# return self._taskDict.keys()
#
# def __del__(self):
# """ Delete tasks """
# for task in self._taskDict.values():
# del task
. Output only the next line. | t = toaster() |
Given the following code snippet before the placeholder: <|code_start|>
class Scenario(IsisScenario):
description = "picking up a knife"
author = "dustin smith"
version = "1"
def environment():
k = kitchen()
put_in_world(k)
ta = table()
<|code_end|>
, predict the next line using imports from the current file:
from src.isis_scenario import IsisScenario
and context including class names, function names, and sometimes code from other files:
# Path: src/isis_scenario.py
# class IsisScenario(object):
#
# def __init__(self,filename):
# # _taskDict stores a mapping of task names to task methods
# self._taskDict = {}
# if hasattr(self,'name'):
# self.name = self.name
# else:
# self.name = filename
#
# self.description = "No description"
# self.author = "Unknown"
# # envDict stores a mapping of variable names to isisObject and isisAgent instances
# self.envDict = {}
# self._loadTaskFile(self.name)
#
# def _loadTaskFile(self,functionName):
# print "Loading: %s" % functionName
#
# # load all of the tasks
# task_functions = filter(lambda x: x[0:4] == "task",dir(self))
#
# if len(task_functions) == 0:
# raise IsisParseProblem("No tasks defined.",functionName)
#
# # create IsisTasks for each of the def task_* in the scenario file.
# print "Self dictionary", self.__dict__.keys()
# for tf in task_functions:
# new_task = IsisTask(self)
# new_task.executeTaskCode(tf,self.__getattribute__(tf))
# # add the task to the dictionary
# self._taskDict[new_task.name] = new_task
#
# def loadScenario(self, baseNode):
# if True:#try:
# load_objects(self, baseNode)
# return True
# else:#except Exception, e:
# raise IsisParseProblem(str(e),"%s in def environment()" % self.name)
# return False
#
# def getTaskByName(self,taskName):
# return self._taskDict[taskName]
#
# def getTaskList(self):
# return self._taskDict.keys()
#
# def __del__(self):
# """ Delete tasks """
# for task in self._taskDict.values():
# del task
. Output only the next line. | put_in(ta, k) |
Next line prediction: <|code_start|>from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
def test_most_likely_choice():
n = 100
some_list = [('a', 0.99), ('b', 0.01)]
vals = [utils.most_likely_choice(some_list) for i in range(n)]
a_count = vals.count('a')
b_count = vals.count('b')
assert a_count == n
assert b_count == 0
with pytest.raises(ValueError):
utils.most_likely_choice([('a', -1)])
with pytest.raises(ValueError):
utils.most_likely_choice([])
<|code_end|>
. Use current file imports:
(import random
import pytest
from concept_formation import utils
from concept_formation.cobweb3 import ContinuousValue)
and context including class names, function names, or small code snippets from other files:
# Path: concept_formation/utils.py
# def c4(n):
# def isNumber(n):
# def mean(values):
# def std(values):
# def weighted_choice(choices):
# def most_likely_choice(choices):
#
# Path: concept_formation/cobweb3.py
# class Cobweb3Tree(CobwebTree):
# class Cobweb3Node(CobwebNode):
# def __init__(self, scaling=0.5, inner_attr_scaling=True):
# def clear(self):
# def get_inner_attr(self, attr):
# def update_scales(self, instance):
# def cobweb(self, instance):
# def ifit(self, instance):
# def increment_counts(self, instance):
# def update_counts_from_node(self, node):
# def expected_correct_guesses(self):
# def pretty_print(self, depth=0):
# def get_weighted_values(self, attr, allow_none=True):
# def predict(self, attr, choice_fn="most likely", allow_none=True):
# def probability(self, attr, val):
# def log_likelihood(self, child_leaf):
# def is_exact_match(self, instance):
# def output_json(self):
. Output only the next line. | def test_weighted_choice(): |
Continue the code snippet: <|code_start|>from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
def test_most_likely_choice():
n = 100
some_list = [('a', 0.99), ('b', 0.01)]
vals = [utils.most_likely_choice(some_list) for i in range(n)]
a_count = vals.count('a')
b_count = vals.count('b')
<|code_end|>
. Use current file imports:
import random
import pytest
from concept_formation import utils
from concept_formation.cobweb3 import ContinuousValue
and context (classes, functions, or code) from other files:
# Path: concept_formation/utils.py
# def c4(n):
# def isNumber(n):
# def mean(values):
# def std(values):
# def weighted_choice(choices):
# def most_likely_choice(choices):
#
# Path: concept_formation/cobweb3.py
# class Cobweb3Tree(CobwebTree):
# class Cobweb3Node(CobwebNode):
# def __init__(self, scaling=0.5, inner_attr_scaling=True):
# def clear(self):
# def get_inner_attr(self, attr):
# def update_scales(self, instance):
# def cobweb(self, instance):
# def ifit(self, instance):
# def increment_counts(self, instance):
# def update_counts_from_node(self, node):
# def expected_correct_guesses(self):
# def pretty_print(self, depth=0):
# def get_weighted_values(self, attr, allow_none=True):
# def predict(self, attr, choice_fn="most likely", allow_none=True):
# def probability(self, attr, val):
# def log_likelihood(self, child_leaf):
# def is_exact_match(self, instance):
# def output_json(self):
. Output only the next line. | assert a_count == n |
Here is a snippet: <|code_start|>from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
seed(0)
num_clusters = 4
num_samples = 30
sigma = 1
xmean = [uniform(-8, 8) for i in range(num_clusters)]
ymean = [uniform(-8, 8) for i in range(num_clusters)]
label = ['bo', 'bo', 'bo', 'bo', 'bo', 'bo', 'bo']
shuffle(label)
label = label[0:num_clusters]
data = []
<|code_end|>
. Write the next line using the current file imports:
from random import normalvariate
from random import shuffle
from random import uniform
from random import seed
from matplotlib.patches import Ellipse
from concept_formation.cobweb3 import Cobweb3Tree
from concept_formation.cobweb3 import cv_key as cv
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
and context from other files:
# Path: concept_formation/cobweb3.py
# class Cobweb3Tree(CobwebTree):
# """
# The Cobweb3Tree contains the knowledge base of a partiucluar instance of
# the Cobweb/3 algorithm and can be used to fit and categorize instances.
# Cobweb/3's main difference over Cobweb is the ability to handle numerical
# attributes by applying an assumption that they should follow a normal
# distribution. For the purposes of Cobweb/3's core algorithms a numeric
# attribute is any value where ``isinstance(instance[attr], Number)`` returns
# ``True``.
#
# The scaling parameter determines whether online normalization of continuous
# attributes is used, and to what standard deviation the values are scaled
# to. Scaling divides the std of each attribute by the std of the attribute
# in the root divided by the scaling constant (i.e., :math:`\\sigma_{root} /
# scaling` when making category utility calculations. Scaling is useful to
# balance the weight of different numerical attributes, without scaling the
# magnitude of numerical attributes can affect category utility calculation
# meaning numbers that are naturally larger will recieve preference in the
# category utility calculation.
#
# :param scaling: The number of standard deviations numeric attributes
# are scaled to. By default this value is 0.5 (half a standard
# deviation), which is the max std of nominal values. If disabiling
# scaling is desirable, then it can be set to False or None.
# :type scaling: a float greater than 0.0, None, or False
# :param inner_attr_scaling: Whether to use the inner most attribute name
# when scaling numeric attributes. For example, if `('attr', '?o1')` was
# an attribute, then the inner most attribute would be 'attr'. When using
# inner most attributes, some objects might have multiple attributes
# (i.e., 'attr' for different objects) that contribute to the scaling.
# :param inner_attr_scaling: boolean
# """
#
# def __init__(self, scaling=0.5, inner_attr_scaling=True):
# """
# The tree constructor.
# """
# self.root = Cobweb3Node()
# self.root.tree = self
# self.scaling = scaling
# self.inner_attr_scaling = inner_attr_scaling
# self.attr_scales = {}
#
# def clear(self):
# """
# Clears the concepts of the tree, but maintains the scaling parameter.
# """
# self.root = Cobweb3Node()
# self.root.tree = self
# self.attr_scales = {}
#
# def get_inner_attr(self, attr):
# """
# Extracts the inner most attribute name from the provided attribute, if
# the attribute is a tuple and inner_attr_scaling is on. Otherwise it
# just returns the attribute. This is used to for normalizing attributes.
#
# >>> t = Cobweb3Tree()
# >>> t.get_inner_attr(('a', '?object1'))
# 'a'
# >>> t.get_inner_attr('a')
# 'a'
# """
# if isinstance(attr, tuple) and self.inner_attr_scaling:
# return attr[0]
# else:
# return attr
#
# def update_scales(self, instance):
# """
# Reads through all the attributes in an instance and updates the
# tree scales object so that the attributes can be properly scaled.
# """
# for attr in instance:
# if isNumber(instance[attr]):
# inner_attr = self.get_inner_attr(attr)
# if inner_attr not in self.attr_scales:
# self.attr_scales[inner_attr] = ContinuousValue()
# self.attr_scales[inner_attr].update(instance[attr])
#
# def cobweb(self, instance):
# """
# A modification of the cobweb function to update the scales object
# first, so that attribute values can be properly scaled.
# """
# self.update_scales(instance)
# return super(Cobweb3Tree, self).cobweb(instance)
#
# def ifit(self, instance):
# """
# Incrementally fit a new instance into the tree and return its resulting
# concept.
#
# The cobweb3 version of the :meth:`CobwebTree.ifit` function. This
# version keeps track of all of the continuous
#
# :param instance: An instance to be categorized into the tree.
# :type instance: :ref:`Instance<instance-rep>`
# :return: A concept describing the instance
# :rtype: Cobweb3Node
#
# .. seealso:: :meth:`CobwebTree.cobweb`
# """
# self._sanity_check_instance(instance)
# return self.cobweb(instance)
#
# Path: concept_formation/cobweb3.py
# class Cobweb3Tree(CobwebTree):
# class Cobweb3Node(CobwebNode):
# def __init__(self, scaling=0.5, inner_attr_scaling=True):
# def clear(self):
# def get_inner_attr(self, attr):
# def update_scales(self, instance):
# def cobweb(self, instance):
# def ifit(self, instance):
# def increment_counts(self, instance):
# def update_counts_from_node(self, node):
# def expected_correct_guesses(self):
# def pretty_print(self, depth=0):
# def get_weighted_values(self, attr, allow_none=True):
# def predict(self, attr, choice_fn="most likely", allow_none=True):
# def probability(self, attr, val):
# def log_likelihood(self, child_leaf):
# def is_exact_match(self, instance):
# def output_json(self):
, which may include functions, classes, or code. Output only the next line. | actual = [] |
Given the code snippet: <|code_start|>
flag_cache = SimpleCache(threshold=1000, default_timeout=300)
country_cache = SimpleCache(threshold=1000, default_timeout=300)
sensor_cache = SimpleCache(threshold=1000, default_timeout=300)
def is_private_addr(ip):
# 10.0.0.0/8
# 127.0.0.0/8
# 172.16.0.0/12
# 192.168.0.0/16
# fc00::/7 (unique local addresses)
# ::1/128 (localhost)
try:
ip_obj = ip_address(ip)
# Make exception for ::ffff/96 (ipv4-mapped)
if ip_obj.version == 6 and ip_obj.ipv4_mapped:
<|code_end|>
, generate the next line using the imports in this file:
import requests
import os
import struct
from flask import current_app as app, url_for
from mhn.ui import constants
from config import MHN_SERVER_HOME
from werkzeug.contrib.cache import SimpleCache
from ipaddress import ip_address
from mhn.api.models import Sensor
and context (functions, classes, or occasionally code) from other files:
# Path: mhn/ui/constants.py
# DEFAULT_FLAG_URL = 'img/unknown.png'
# DEFAULT_COUNTRY_NAME = 'Unknown'
#
# Path: mhn/api/models.py
# class Sensor(db.Model, APIModel):
#
# # Defines some properties on the fields:
# # required: Is required for creating object via
# # a POST request.
# # editable: Can be edited via a PUT request.
# all_fields = {
# 'uuid': {'required': False, 'editable': False},
# 'name': {'required': True, 'editable': True},
# 'created_date': {'required': False, 'editable': False},
# 'ip': {'required': False, 'editable': True},
# 'hostname': {'required': True, 'editable': True},
# 'honeypot': {'required': True, 'editable': False}
# }
#
# __tablename__ = 'sensors'
#
# id = db.Column(db.Integer, primary_key=True)
# uuid = db.Column(db.String(36), unique=True)
# name = db.Column(db.String(50))
# created_date = db.Column(
# db.DateTime(), default=datetime.utcnow)
# ip = db.Column(db.String(15))
# hostname = db.Column(db.String(50))
# identifier = db.Column(db.String(50), unique=True)
# honeypot = db.Column(db.String(50))
#
# def __init__(
# self, uuid=None, name=None, created_date=None, honeypot=None,
# ip=None, hostname=None, identifier=None, **args):
# self.uuid = uuid
# self.name = name
# self.created_date = created_date
# self.ip = ip
# self.hostname = hostname
# self.identifier = identifier
# self.honeypot = honeypot
#
# def __repr__(self):
# return '<Sensor>{}'.format(self.to_dict())
#
# def to_dict(self):
# return dict(
# uuid=self.uuid, name=self.name, honeypot=self.honeypot,
# created_date=str(self.created_date), ip=self.ip,
# hostname=self.hostname, identifier=self.uuid,
# # Extending with info from Mnemosyne.
# secret=self.authkey.secret, publish=self.authkey.publish)
#
# def new_auth_dict(self):
# el = string.ascii_letters + string.digits
# rand_str = lambda n: ''.join(choice(el) for _ in range(n))
# return dict(secret=rand_str(16), owner="chn",
# identifier=self.uuid, honeypot=self.honeypot,
# subscribe=[], publish=Sensor.get_channels(self.honeypot))
#
# @property
# def attacks_count(self):
# return Clio().counts.get_count(identifier=self.uuid)
#
# @property
# def authkey(self):
# return Clio().authkey.get(identifier=self.uuid)
#
# @staticmethod
# def get_channels(honeypot):
# from mhn import mhn
# return mhn.config.get('HONEYPOT_CHANNELS', {}).get(honeypot, [])
. Output only the next line. | return False |
Using the snippet: <|code_start|>
flag_cache = SimpleCache(threshold=1000, default_timeout=300)
country_cache = SimpleCache(threshold=1000, default_timeout=300)
sensor_cache = SimpleCache(threshold=1000, default_timeout=300)
def is_private_addr(ip):
# 10.0.0.0/8
# 127.0.0.0/8
# 172.16.0.0/12
# 192.168.0.0/16
# fc00::/7 (unique local addresses)
# ::1/128 (localhost)
try:
ip_obj = ip_address(ip)
# Make exception for ::ffff/96 (ipv4-mapped)
if ip_obj.version == 6 and ip_obj.ipv4_mapped:
return False
if ip_obj.is_private:
return True
except Exception as e:
print('Error (%s) on is_private_addr: %s' % (e, ip))
return False
def get_flag_ip(ipaddr):
<|code_end|>
, determine the next line of code. You have imports:
import requests
import os
import struct
from flask import current_app as app, url_for
from mhn.ui import constants
from config import MHN_SERVER_HOME
from werkzeug.contrib.cache import SimpleCache
from ipaddress import ip_address
from mhn.api.models import Sensor
and context (class names, function names, or code) available:
# Path: mhn/ui/constants.py
# DEFAULT_FLAG_URL = 'img/unknown.png'
# DEFAULT_COUNTRY_NAME = 'Unknown'
#
# Path: mhn/api/models.py
# class Sensor(db.Model, APIModel):
#
# # Defines some properties on the fields:
# # required: Is required for creating object via
# # a POST request.
# # editable: Can be edited via a PUT request.
# all_fields = {
# 'uuid': {'required': False, 'editable': False},
# 'name': {'required': True, 'editable': True},
# 'created_date': {'required': False, 'editable': False},
# 'ip': {'required': False, 'editable': True},
# 'hostname': {'required': True, 'editable': True},
# 'honeypot': {'required': True, 'editable': False}
# }
#
# __tablename__ = 'sensors'
#
# id = db.Column(db.Integer, primary_key=True)
# uuid = db.Column(db.String(36), unique=True)
# name = db.Column(db.String(50))
# created_date = db.Column(
# db.DateTime(), default=datetime.utcnow)
# ip = db.Column(db.String(15))
# hostname = db.Column(db.String(50))
# identifier = db.Column(db.String(50), unique=True)
# honeypot = db.Column(db.String(50))
#
# def __init__(
# self, uuid=None, name=None, created_date=None, honeypot=None,
# ip=None, hostname=None, identifier=None, **args):
# self.uuid = uuid
# self.name = name
# self.created_date = created_date
# self.ip = ip
# self.hostname = hostname
# self.identifier = identifier
# self.honeypot = honeypot
#
# def __repr__(self):
# return '<Sensor>{}'.format(self.to_dict())
#
# def to_dict(self):
# return dict(
# uuid=self.uuid, name=self.name, honeypot=self.honeypot,
# created_date=str(self.created_date), ip=self.ip,
# hostname=self.hostname, identifier=self.uuid,
# # Extending with info from Mnemosyne.
# secret=self.authkey.secret, publish=self.authkey.publish)
#
# def new_auth_dict(self):
# el = string.ascii_letters + string.digits
# rand_str = lambda n: ''.join(choice(el) for _ in range(n))
# return dict(secret=rand_str(16), owner="chn",
# identifier=self.uuid, honeypot=self.honeypot,
# subscribe=[], publish=Sensor.get_channels(self.honeypot))
#
# @property
# def attacks_count(self):
# return Clio().counts.get_count(identifier=self.uuid)
#
# @property
# def authkey(self):
# return Clio().authkey.get(identifier=self.uuid)
#
# @staticmethod
# def get_channels(honeypot):
# from mhn import mhn
# return mhn.config.get('HONEYPOT_CHANNELS', {}).get(honeypot, [])
. Output only the next line. | if is_private_addr(ipaddr): |
Predict the next line after this snippet: <|code_start|> # end def
def to_dict(self):
data = super().to_dict()
data["leader"] = self.leader
data["value"] = self.value
return data
# end def
# end class
class NewLeaderMessage(Message): # pragma: no cover
def __init__(self, sequence_no, node, leader, value):
super().__init__(VOTE, sequence_no, node)
self.leader = leader
self.value = value
# end def
# end class
class Acknowledge(Message):
def __init__(self, sequence_no, node, sender, raw):
super().__init__(ACKNOWLEDGE, sequence_no, node)
self.sender = sender
self.raw = raw
# end def
@classmethod
def from_dict(cls, data):
kwargs = {
<|code_end|>
using the current file's imports:
from luckydonaldUtils.logger import logging
from .enums import UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
and any relevant context from other files:
# Path: code/node/enums.py
# UNSET = 0
#
# INIT = 1
#
# LEADER_CHANGE = 5
#
# PROPOSE = 2
#
# PREVOTE = 3
#
# VOTE = 4
#
# ACKNOWLEDGE = -1
. Output only the next line. | "sequence_no": data["sequence_no"], |
Continue the code snippet: <|code_start|> @classmethod
def from_dict(cls, data):
raise NotImplementedError("LeaderChangeMessage")
kwargs = {
"type": data["type"],
"sequence_no": data["sequence_no"],
}
return cls(**kwargs)
# end def
def to_dict(self):
raise NotImplementedError("LeaderChangeMessage")
return {
"type": self.type,
"sequence_no": self.sequence_no,
}
# end def
# end class
class ProposeMessage(Message):
def __init__(self, sequence_no, node, leader, proposal, value_store):
super(ProposeMessage, self).__init__(PROPOSE, sequence_no, node)
self.leader = leader
self.proposal = proposal
assert isinstance(value_store, list)
self.value_store = value_store
@classmethod
def from_dict(cls, data):
<|code_end|>
. Use current file imports:
from luckydonaldUtils.logger import logging
from .enums import UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
and context (classes, functions, or code) from other files:
# Path: code/node/enums.py
# UNSET = 0
#
# INIT = 1
#
# LEADER_CHANGE = 5
#
# PROPOSE = 2
#
# PREVOTE = 3
#
# VOTE = 4
#
# ACKNOWLEDGE = -1
. Output only the next line. | value_store = [] |
Predict the next line for this snippet: <|code_start|> return cls(**kwargs)
# end def
def to_dict(self):
data = super().to_dict()
data["value"] = self.value
return data
# end def
# end class
class LeaderChangeMessage(Message): # pragma: no cover
def __init__(self, sequence_no, node_num, leader, P):
raise NotImplementedError("LeaderChangeMessage")
super(LeaderChangeMessage, self).__init__(LEADER_CHANGE, sequence_no)
# end def
@classmethod
def from_dict(cls, data):
raise NotImplementedError("LeaderChangeMessage")
kwargs = {
"type": data["type"],
"sequence_no": data["sequence_no"],
}
return cls(**kwargs)
# end def
def to_dict(self):
raise NotImplementedError("LeaderChangeMessage")
return {
<|code_end|>
with the help of current file imports:
from luckydonaldUtils.logger import logging
from .enums import UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
and context from other files:
# Path: code/node/enums.py
# UNSET = 0
#
# INIT = 1
#
# LEADER_CHANGE = 5
#
# PROPOSE = 2
#
# PREVOTE = 3
#
# VOTE = 4
#
# ACKNOWLEDGE = -1
, which may contain function names, class names, or code. Output only the next line. | "type": self.type, |
Given the code snippet: <|code_start|> # end def
def to_dict(self):
raise NotImplementedError("LeaderChangeMessage")
return {
"type": self.type,
"sequence_no": self.sequence_no,
}
# end def
# end class
class ProposeMessage(Message):
def __init__(self, sequence_no, node, leader, proposal, value_store):
super(ProposeMessage, self).__init__(PROPOSE, sequence_no, node)
self.leader = leader
self.proposal = proposal
assert isinstance(value_store, list)
self.value_store = value_store
@classmethod
def from_dict(cls, data):
value_store = []
for v in data.get("value_store", []):
msg = InitMessage.from_dict(v)
# value_store[msg.node] = msg
value_store.append(msg)
# end for
kwargs = {
"sequence_no": data["sequence_no"],
<|code_end|>
, generate the next line using the imports in this file:
from luckydonaldUtils.logger import logging
from .enums import UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
and context (functions, classes, or occasionally code) from other files:
# Path: code/node/enums.py
# UNSET = 0
#
# INIT = 1
#
# LEADER_CHANGE = 5
#
# PROPOSE = 2
#
# PREVOTE = 3
#
# VOTE = 4
#
# ACKNOWLEDGE = -1
. Output only the next line. | "node": data.get("node"), |
Given the following code snippet before the placeholder: <|code_start|> "proposal": data.get("proposal"),
"value_store": value_store
}
return cls(**kwargs)
# end def
def to_dict(self):
data = super().to_dict()
data["leader"] = self.leader
data["proposal"] = self.proposal
data["value_store"] = [x.to_dict() if hasattr(x, "to_dict") else x for x in self.value_store]
return data
# end def
# end class
class PrevoteMessage(Message):
def __init__(self, sequence_no, node, leader, value):
super().__init__(PREVOTE, sequence_no, node)
self.leader = leader
self.value = value
# end if
@classmethod
def from_dict(cls, data):
kwargs = {
"sequence_no": data["sequence_no"],
"node": data["node"],
"leader": data["leader"],
"value": data["value"],
<|code_end|>
, predict the next line using imports from the current file:
from luckydonaldUtils.logger import logging
from .enums import UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
and context including class names, function names, and sometimes code from other files:
# Path: code/node/enums.py
# UNSET = 0
#
# INIT = 1
#
# LEADER_CHANGE = 5
#
# PROPOSE = 2
#
# PREVOTE = 3
#
# VOTE = 4
#
# ACKNOWLEDGE = -1
. Output only the next line. | } |
Here is a snippet: <|code_start|> def __init__(self, sequence_no, node, leader, proposal, value_store):
super(ProposeMessage, self).__init__(PROPOSE, sequence_no, node)
self.leader = leader
self.proposal = proposal
assert isinstance(value_store, list)
self.value_store = value_store
@classmethod
def from_dict(cls, data):
value_store = []
for v in data.get("value_store", []):
msg = InitMessage.from_dict(v)
# value_store[msg.node] = msg
value_store.append(msg)
# end for
kwargs = {
"sequence_no": data["sequence_no"],
"node": data.get("node"),
"leader": data.get("leader"),
"proposal": data.get("proposal"),
"value_store": value_store
}
return cls(**kwargs)
# end def
def to_dict(self):
data = super().to_dict()
data["leader"] = self.leader
data["proposal"] = self.proposal
data["value_store"] = [x.to_dict() if hasattr(x, "to_dict") else x for x in self.value_store]
<|code_end|>
. Write the next line using the current file imports:
from luckydonaldUtils.logger import logging
from .enums import UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
and context from other files:
# Path: code/node/enums.py
# UNSET = 0
#
# INIT = 1
#
# LEADER_CHANGE = 5
#
# PROPOSE = 2
#
# PREVOTE = 3
#
# VOTE = 4
#
# ACKNOWLEDGE = -1
, which may include functions, classes, or code. Output only the next line. | return data |
Here is a snippet: <|code_start|>class LeaderChangeMessage(Message): # pragma: no cover
def __init__(self, sequence_no, node_num, leader, P):
raise NotImplementedError("LeaderChangeMessage")
super(LeaderChangeMessage, self).__init__(LEADER_CHANGE, sequence_no)
# end def
@classmethod
def from_dict(cls, data):
raise NotImplementedError("LeaderChangeMessage")
kwargs = {
"type": data["type"],
"sequence_no": data["sequence_no"],
}
return cls(**kwargs)
# end def
def to_dict(self):
raise NotImplementedError("LeaderChangeMessage")
return {
"type": self.type,
"sequence_no": self.sequence_no,
}
# end def
# end class
class ProposeMessage(Message):
def __init__(self, sequence_no, node, leader, proposal, value_store):
super(ProposeMessage, self).__init__(PROPOSE, sequence_no, node)
self.leader = leader
<|code_end|>
. Write the next line using the current file imports:
from luckydonaldUtils.logger import logging
from .enums import UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
and context from other files:
# Path: code/node/enums.py
# UNSET = 0
#
# INIT = 1
#
# LEADER_CHANGE = 5
#
# PROPOSE = 2
#
# PREVOTE = 3
#
# VOTE = 4
#
# ACKNOWLEDGE = -1
, which may include functions, classes, or code. Output only the next line. | self.proposal = proposal |
Predict the next line after this snippet: <|code_start|>def send_message(msg):
logger.debug(msg)
assert isinstance(msg, Message)
data = msg.to_dict()
data_string = json.dumps(data)
broadcast(data_string)
loggert = logging.getLogger("request")
def print_url(r, *args, **kwargs):
loggert.info(r.url)
# end def
while (True):
try:
requests.put(DATABASE_URL, data=data_string, hooks=dict(response=print_url))
break
except requests.RequestException as e:
logger.warning("Failed to report message to db: {e}".format(e=e))
# end def
return
# end def
def broadcast(message):
if not isinstance(message, str):
raise TypeError("Parameter `message` is not type `str` but {type}: {msg}".format(type=type(message), msg=message))
hosts = ServiceInfos().other_hostnames()
# msg = MSG_FORMAT.format(length=len(message), msg=message)
message += "\n"
msg = "ANSWER " + str(len(message)) + "\n" + message
logger.debug("Prepared sending to *:{port}:\n{msg}".format(port=NODE_PORT, msg=msg))
msg = bytes(msg, "utf-8")
<|code_end|>
using the current file's imports:
import socket
import json
import requests
from time import sleep
from luckydonaldUtils.logger import logging
from ..env import NODE_PORT, DATABASE_URL
from ..messages import Message
from ..todo import logger
from ..dockerus import ServiceInfos
and any relevant context from other files:
# Path: code/node/env.py
# NODE_PORT = int(os.environ.get("NODE_PORT", None))
#
# DATABASE_URL = "http://api/dump/"
#
# Path: code/node/messages.py
# class Message(object):
# def __init__(self, type, sequence_no, node):
# if type is None:
# type = UNSET
# # end if
# assert isinstance(type, int)
# self.type = type
# self.sequence_no = sequence_no
# self.node = node # i
#
#
# @classmethod
# def from_dict(cls, data):
# assert "type" in data
# type = data["type"]
# assert type in [UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE]
# if type == INIT:
# return InitMessage.from_dict(data)
# # end def
# if type == LEADER_CHANGE: # pragma: no cover
# return LeaderChangeMessage.from_dict(data)
# # end def
# if type == PROPOSE:
# return ProposeMessage.from_dict(data)
# # end def
# if type == PREVOTE:
# return PrevoteMessage.from_dict(data)
# # end def
# if type == VOTE:
# return VoteMessage.from_dict(data)
# # end def
# if type == ACKNOWLEDGE:
# return Acknowledge.from_dict(data)
# # end def
# return cls(**{
# "type": data["type"],
# "sequence_no": data["sequence_no"],
# "node": data["node"]
# })
# # end def
#
# def to_dict(self):
# return {
# "type": self.type,
# "sequence_no": self.sequence_no,
# "node": self.node
# }
# # end def
#
# def __str__(self):
# data = self.to_dict()
# return "{class_name}({values})".format(
# class_name=self.__class__.__name__,
# values=", ".join(["{key}={value!r}".format(key=k, value=data[k]) for k in sorted(data)])
# )
#
# Path: code/node/todo.py
# def get_sensor_value():
# def timeout():
. Output only the next line. | for node_host in hosts: |
Given the following code snippet before the placeholder: <|code_start|># -*- coding: utf-8 -*-
__author__ = 'luckydonald'
logger = logging.getLogger(__name__)
MSG_FORMAT = "ANSWER {length}\n{msg}\n"
def send_message(msg):
logger.debug(msg)
assert isinstance(msg, Message)
data = msg.to_dict()
data_string = json.dumps(data)
broadcast(data_string)
loggert = logging.getLogger("request")
def print_url(r, *args, **kwargs):
<|code_end|>
, predict the next line using imports from the current file:
import socket
import json
import requests
from time import sleep
from luckydonaldUtils.logger import logging
from ..env import NODE_PORT, DATABASE_URL
from ..messages import Message
from ..todo import logger
from ..dockerus import ServiceInfos
and context including class names, function names, and sometimes code from other files:
# Path: code/node/env.py
# NODE_PORT = int(os.environ.get("NODE_PORT", None))
#
# DATABASE_URL = "http://api/dump/"
#
# Path: code/node/messages.py
# class Message(object):
# def __init__(self, type, sequence_no, node):
# if type is None:
# type = UNSET
# # end if
# assert isinstance(type, int)
# self.type = type
# self.sequence_no = sequence_no
# self.node = node # i
#
#
# @classmethod
# def from_dict(cls, data):
# assert "type" in data
# type = data["type"]
# assert type in [UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE]
# if type == INIT:
# return InitMessage.from_dict(data)
# # end def
# if type == LEADER_CHANGE: # pragma: no cover
# return LeaderChangeMessage.from_dict(data)
# # end def
# if type == PROPOSE:
# return ProposeMessage.from_dict(data)
# # end def
# if type == PREVOTE:
# return PrevoteMessage.from_dict(data)
# # end def
# if type == VOTE:
# return VoteMessage.from_dict(data)
# # end def
# if type == ACKNOWLEDGE:
# return Acknowledge.from_dict(data)
# # end def
# return cls(**{
# "type": data["type"],
# "sequence_no": data["sequence_no"],
# "node": data["node"]
# })
# # end def
#
# def to_dict(self):
# return {
# "type": self.type,
# "sequence_no": self.sequence_no,
# "node": self.node
# }
# # end def
#
# def __str__(self):
# data = self.to_dict()
# return "{class_name}({values})".format(
# class_name=self.__class__.__name__,
# values=", ".join(["{key}={value!r}".format(key=k, value=data[k]) for k in sorted(data)])
# )
#
# Path: code/node/todo.py
# def get_sensor_value():
# def timeout():
. Output only the next line. | loggert.info(r.url) |
Continue the code snippet: <|code_start|> assert isinstance(msg, Message)
data = msg.to_dict()
data_string = json.dumps(data)
broadcast(data_string)
loggert = logging.getLogger("request")
def print_url(r, *args, **kwargs):
loggert.info(r.url)
# end def
while (True):
try:
requests.put(DATABASE_URL, data=data_string, hooks=dict(response=print_url))
break
except requests.RequestException as e:
logger.warning("Failed to report message to db: {e}".format(e=e))
# end def
return
# end def
def broadcast(message):
if not isinstance(message, str):
raise TypeError("Parameter `message` is not type `str` but {type}: {msg}".format(type=type(message), msg=message))
hosts = ServiceInfos().other_hostnames()
# msg = MSG_FORMAT.format(length=len(message), msg=message)
message += "\n"
msg = "ANSWER " + str(len(message)) + "\n" + message
logger.debug("Prepared sending to *:{port}:\n{msg}".format(port=NODE_PORT, msg=msg))
msg = bytes(msg, "utf-8")
for node_host in hosts:
sent = -1
<|code_end|>
. Use current file imports:
import socket
import json
import requests
from time import sleep
from luckydonaldUtils.logger import logging
from ..env import NODE_PORT, DATABASE_URL
from ..messages import Message
from ..todo import logger
from ..dockerus import ServiceInfos
and context (classes, functions, or code) from other files:
# Path: code/node/env.py
# NODE_PORT = int(os.environ.get("NODE_PORT", None))
#
# DATABASE_URL = "http://api/dump/"
#
# Path: code/node/messages.py
# class Message(object):
# def __init__(self, type, sequence_no, node):
# if type is None:
# type = UNSET
# # end if
# assert isinstance(type, int)
# self.type = type
# self.sequence_no = sequence_no
# self.node = node # i
#
#
# @classmethod
# def from_dict(cls, data):
# assert "type" in data
# type = data["type"]
# assert type in [UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE]
# if type == INIT:
# return InitMessage.from_dict(data)
# # end def
# if type == LEADER_CHANGE: # pragma: no cover
# return LeaderChangeMessage.from_dict(data)
# # end def
# if type == PROPOSE:
# return ProposeMessage.from_dict(data)
# # end def
# if type == PREVOTE:
# return PrevoteMessage.from_dict(data)
# # end def
# if type == VOTE:
# return VoteMessage.from_dict(data)
# # end def
# if type == ACKNOWLEDGE:
# return Acknowledge.from_dict(data)
# # end def
# return cls(**{
# "type": data["type"],
# "sequence_no": data["sequence_no"],
# "node": data["node"]
# })
# # end def
#
# def to_dict(self):
# return {
# "type": self.type,
# "sequence_no": self.sequence_no,
# "node": self.node
# }
# # end def
#
# def __str__(self):
# data = self.to_dict()
# return "{class_name}({values})".format(
# class_name=self.__class__.__name__,
# values=", ".join(["{key}={value!r}".format(key=k, value=data[k]) for k in sorted(data)])
# )
#
# Path: code/node/todo.py
# def get_sensor_value():
# def timeout():
. Output only the next line. | while not sent == 1: |
Here is a snippet: <|code_start|> while (True):
try:
requests.put(DATABASE_URL, data=data_string, hooks=dict(response=print_url))
break
except requests.RequestException as e:
logger.warning("Failed to report message to db: {e}".format(e=e))
# end def
return
# end def
def broadcast(message):
if not isinstance(message, str):
raise TypeError("Parameter `message` is not type `str` but {type}: {msg}".format(type=type(message), msg=message))
hosts = ServiceInfos().other_hostnames()
# msg = MSG_FORMAT.format(length=len(message), msg=message)
message += "\n"
msg = "ANSWER " + str(len(message)) + "\n" + message
logger.debug("Prepared sending to *:{port}:\n{msg}".format(port=NODE_PORT, msg=msg))
msg = bytes(msg, "utf-8")
for node_host in hosts:
sent = -1
while not sent == 1:
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: # UDP SOCK_DGRAM
sock.connect((node_host, NODE_PORT))
sock.sendall(msg)
logger.log(
msg="Sending to {host}:{port} succeeded.".format(host=node_host, port=NODE_PORT),
level=(logging.SUCCESS if sent == 0 else logging.DEBUG)
<|code_end|>
. Write the next line using the current file imports:
import socket
import json
import requests
from time import sleep
from luckydonaldUtils.logger import logging
from ..env import NODE_PORT, DATABASE_URL
from ..messages import Message
from ..todo import logger
from ..dockerus import ServiceInfos
and context from other files:
# Path: code/node/env.py
# NODE_PORT = int(os.environ.get("NODE_PORT", None))
#
# DATABASE_URL = "http://api/dump/"
#
# Path: code/node/messages.py
# class Message(object):
# def __init__(self, type, sequence_no, node):
# if type is None:
# type = UNSET
# # end if
# assert isinstance(type, int)
# self.type = type
# self.sequence_no = sequence_no
# self.node = node # i
#
#
# @classmethod
# def from_dict(cls, data):
# assert "type" in data
# type = data["type"]
# assert type in [UNSET, INIT, LEADER_CHANGE, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE]
# if type == INIT:
# return InitMessage.from_dict(data)
# # end def
# if type == LEADER_CHANGE: # pragma: no cover
# return LeaderChangeMessage.from_dict(data)
# # end def
# if type == PROPOSE:
# return ProposeMessage.from_dict(data)
# # end def
# if type == PREVOTE:
# return PrevoteMessage.from_dict(data)
# # end def
# if type == VOTE:
# return VoteMessage.from_dict(data)
# # end def
# if type == ACKNOWLEDGE:
# return Acknowledge.from_dict(data)
# # end def
# return cls(**{
# "type": data["type"],
# "sequence_no": data["sequence_no"],
# "node": data["node"]
# })
# # end def
#
# def to_dict(self):
# return {
# "type": self.type,
# "sequence_no": self.sequence_no,
# "node": self.node
# }
# # end def
#
# def __str__(self):
# data = self.to_dict()
# return "{class_name}({values})".format(
# class_name=self.__class__.__name__,
# values=", ".join(["{key}={value!r}".format(key=k, value=data[k]) for k in sorted(data)])
# )
#
# Path: code/node/todo.py
# def get_sensor_value():
# def timeout():
, which may include functions, classes, or code. Output only the next line. | ) |
Continue the code snippet: <|code_start|>
class DBProposeMessage(DBMessage):
_discriminator_ = PROPOSE
proposal = orm.Required(VALUE_TYPE)
value_store = orm.Required(orm.Json) # json
def from_db(self):
return messages.ProposeMessage(
sequence_no=self.sequence_no, node=self.node, leader=self.leader, proposal=self.proposal,
value_store=self.value_store
)
# end def
@classmethod
def to_db(cls, msg):
assert isinstance(msg, messages.ProposeMessage)
return super().to_db(msg)
# end def
# end class
class DBPrevoteMessage(DBMessage):
_discriminator_ = PREVOTE
@classmethod
def to_db(cls, msg):
assert isinstance(msg, messages.PrevoteMessage)
return super().to_db(msg)
# end def
<|code_end|>
. Use current file imports:
from datetime import datetime
from pony import orm
from node import messages
from node.enums import UNSET, INIT, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
from .env import POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS, POSTGRES_DB
import logging
and context (classes, functions, or code) from other files:
# Path: code/api/env.py
# POSTGRES_HOST = os.environ.get("POSTGRES_HOST", None)
#
# POSTGRES_USER = os.environ.get("POSTGRES_USER", None)
#
# POSTGRES_PASS = os.environ.get("POSTGRES_PASS", None)
#
# POSTGRES_DB = os.environ.get("POSTGRES_DB", None)
. Output only the next line. | def from_db(self): |
Given the code snippet: <|code_start|>class DBVoteMessage(DBMessage):
_discriminator_ = VOTE
@classmethod
def to_db(cls, msg):
assert isinstance(msg, messages.VoteMessage)
return super().to_db(msg)
# end def
def from_db(self):
return messages.VoteMessage(sequence_no=self.sequence_no, node=self.node, leader=self.leader, value=self.value)
# end def
# end class
class DBAcknowledge(DBMessage):
_discriminator_ = ACKNOWLEDGE
@classmethod
def to_db(cls, msg):
assert isinstance(msg, messages.Acknowledge)
return super().to_db(msg)
# end def
def from_db(self):
return messages.Acknowledge(sequence_no=self.sequence_no, node=self.node, sender=self.sender, raw=self.raw)
# end def
# end class
<|code_end|>
, generate the next line using the imports in this file:
from datetime import datetime
from pony import orm
from node import messages
from node.enums import UNSET, INIT, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
from .env import POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS, POSTGRES_DB
import logging
and context (functions, classes, or occasionally code) from other files:
# Path: code/api/env.py
# POSTGRES_HOST = os.environ.get("POSTGRES_HOST", None)
#
# POSTGRES_USER = os.environ.get("POSTGRES_USER", None)
#
# POSTGRES_PASS = os.environ.get("POSTGRES_PASS", None)
#
# POSTGRES_DB = os.environ.get("POSTGRES_DB", None)
. Output only the next line. | MSG_TYPE_CLASS_MAP = { |
Given the code snippet: <|code_start|> return clazz.from_dict(self.as_dict())
# end def
@classmethod
def to_db(cls, msg):
return cls(**msg.to_dict())
# end def
# end class
class DBInitMessage(DBMessage):
_discriminator_ = INIT
def from_db(self):
return messages.InitMessage(sequence_no=self.sequence_no, node=self.node, value=self.value)
# end def
@classmethod
def to_db(cls, msg):
assert isinstance(msg, messages.InitMessage)
return super().to_db(msg)
# end def
# end class
class DBProposeMessage(DBMessage):
_discriminator_ = PROPOSE
proposal = orm.Required(VALUE_TYPE)
value_store = orm.Required(orm.Json) # json
<|code_end|>
, generate the next line using the imports in this file:
from datetime import datetime
from pony import orm
from node import messages
from node.enums import UNSET, INIT, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
from .env import POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS, POSTGRES_DB
import logging
and context (functions, classes, or occasionally code) from other files:
# Path: code/api/env.py
# POSTGRES_HOST = os.environ.get("POSTGRES_HOST", None)
#
# POSTGRES_USER = os.environ.get("POSTGRES_USER", None)
#
# POSTGRES_PASS = os.environ.get("POSTGRES_PASS", None)
#
# POSTGRES_DB = os.environ.get("POSTGRES_DB", None)
. Output only the next line. | def from_db(self): |
Predict the next line for this snippet: <|code_start|>VALUE_TYPE = float
MSG_TYPE_TYPE = int
NODE_TYPE = int
SEQUENCE_TYPE = int
# https://editor.ponyorm.com/user/luckydonald/pbft
# Last permalink:
# https://editor.ponyorm.com/user/luckydonald/pbft_2
class DBMessage(db.Entity):
type = orm.Discriminator(MSG_TYPE_TYPE)
date = orm.Required(datetime, sql_default='CURRENT_TIMESTAMP')
sequence_no = orm.Required(SEQUENCE_TYPE)
node = orm.Optional(NODE_TYPE)
value = orm.Optional(VALUE_TYPE)
leader = orm.Optional(NODE_TYPE)
sender = orm.Optional(NODE_TYPE)
raw = orm.Optional(orm.Json)
_discriminator_ = UNSET
def from_db(self):
clazz = MSG_TYPE_CLASS_MAP[self.type]
assert issubclass(clazz, messages.Message)
return clazz.from_dict(self.as_dict())
# end def
@classmethod
def to_db(cls, msg):
<|code_end|>
with the help of current file imports:
from datetime import datetime
from pony import orm
from node import messages
from node.enums import UNSET, INIT, PROPOSE, PREVOTE, VOTE, ACKNOWLEDGE
from .env import POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS, POSTGRES_DB
import logging
and context from other files:
# Path: code/api/env.py
# POSTGRES_HOST = os.environ.get("POSTGRES_HOST", None)
#
# POSTGRES_USER = os.environ.get("POSTGRES_USER", None)
#
# POSTGRES_PASS = os.environ.get("POSTGRES_PASS", None)
#
# POSTGRES_DB = os.environ.get("POSTGRES_DB", None)
, which may contain function names, class names, or code. Output only the next line. | return cls(**msg.to_dict()) |
Given snippet: <|code_start|> logger.info("Added {}: {id}".format(msg, id=msg.id))
return "ok: {}".format(msg)
else:
return "fail: None"
# end if
except Exception as e:
logger.exception("lel")
raise
# end def
@app.route(API_V1+"/get_value")
@app.route(API_V1+"/get_value/")
@orm.db_session
def get_value():
"""
Gets latest value they decided on, and the most recent measured value of each node.
Only considers events in the last 10 seconds.
> {"summary": 3.456, "1": 2.345, "2": 3.456, "3": 4.567, "4": 5.678}}
:return:
"""
latest_vote = orm.select(m for m in DBVoteMessage if m.date > orm.raw_sql("NOW() - '10 seconds'::INTERVAL")).order_by(orm.desc(DBVoteMessage.date)).first()
if not latest_vote:
return jsonify({}, allow_all_origin=True)
# end if
assert isinstance(latest_vote, DBVoteMessage)
latest_values = DBMessage.select_by_sql("""
SELECT DISTINCT ON (m.node) * FROM (
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and context:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
which might include code, classes, or functions. Output only the next line. | SELECT * FROM DBmessage |
Given the code snippet: <|code_start|> logger.info("Added {}: {id}".format(msg, id=msg.id))
return "ok: {}".format(msg)
else:
return "fail: None"
# end if
except Exception as e:
logger.exception("lel")
raise
# end def
@app.route(API_V1+"/get_value")
@app.route(API_V1+"/get_value/")
@orm.db_session
def get_value():
"""
Gets latest value they decided on, and the most recent measured value of each node.
Only considers events in the last 10 seconds.
> {"summary": 3.456, "1": 2.345, "2": 3.456, "3": 4.567, "4": 5.678}}
:return:
"""
latest_vote = orm.select(m for m in DBVoteMessage if m.date > orm.raw_sql("NOW() - '10 seconds'::INTERVAL")).order_by(orm.desc(DBVoteMessage.date)).first()
if not latest_vote:
return jsonify({}, allow_all_origin=True)
# end if
assert isinstance(latest_vote, DBVoteMessage)
latest_values = DBMessage.select_by_sql("""
SELECT DISTINCT ON (m.node) * FROM (
<|code_end|>
, generate the next line using the imports in this file:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and context (functions, classes, or occasionally code) from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
. Output only the next line. | SELECT * FROM DBmessage |
Predict the next line after this snippet: <|code_start|># -*- coding: utf-8 -*-
__author__ = 'luckydonald'
logger = logging.getLogger(__name__)
VERSION = "0.0.1"
__version__ = VERSION
assert INIT == INIT # to prevent the unused import warning. Is used in SQL statement.
app = Flask(__name__)
debug = DebuggedApplication(app, console_path="/console/")
API_V1 = ""
API_V2 = "/api/v2"
@app.route(API_V1+"/dump", methods=['POST', 'GET', 'PUT'])
@app.route(API_V1+"/dump/", methods=['POST', 'GET', 'PUT'])
@orm.db_session
def dump_to_db():
try:
logger.info("Incoming: {}".format(request.get_json(force=True)))
msg = to_db(request.get_json(force=True))
if msg:
db.commit()
logger.info("Added {}: {id}".format(msg, id=msg.id))
return "ok: {}".format(msg)
else:
<|code_end|>
using the current file's imports:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and any relevant context from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
. Output only the next line. | return "fail: None" |
Next line prediction: <|code_start|> db.commit()
logger.info("Added {}: {id}".format(msg, id=msg.id))
return "ok: {}".format(msg)
else:
return "fail: None"
# end if
except Exception as e:
logger.exception("lel")
raise
# end def
@app.route(API_V1+"/get_value")
@app.route(API_V1+"/get_value/")
@orm.db_session
def get_value():
"""
Gets latest value they decided on, and the most recent measured value of each node.
Only considers events in the last 10 seconds.
> {"summary": 3.456, "1": 2.345, "2": 3.456, "3": 4.567, "4": 5.678}}
:return:
"""
latest_vote = orm.select(m for m in DBVoteMessage if m.date > orm.raw_sql("NOW() - '10 seconds'::INTERVAL")).order_by(orm.desc(DBVoteMessage.date)).first()
if not latest_vote:
return jsonify({}, allow_all_origin=True)
# end if
assert isinstance(latest_vote, DBVoteMessage)
latest_values = DBMessage.select_by_sql("""
<|code_end|>
. Use current file imports:
(from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication)
and context including class names, function names, or small code snippets from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
. Output only the next line. | SELECT DISTINCT ON (m.node) * FROM ( |
Based on the snippet: <|code_start|>
__author__ = 'luckydonald'
logger = logging.getLogger(__name__)
VERSION = "0.0.1"
__version__ = VERSION
assert INIT == INIT # to prevent the unused import warning. Is used in SQL statement.
app = Flask(__name__)
debug = DebuggedApplication(app, console_path="/console/")
API_V1 = ""
API_V2 = "/api/v2"
@app.route(API_V1+"/dump", methods=['POST', 'GET', 'PUT'])
@app.route(API_V1+"/dump/", methods=['POST', 'GET', 'PUT'])
@orm.db_session
def dump_to_db():
try:
logger.info("Incoming: {}".format(request.get_json(force=True)))
msg = to_db(request.get_json(force=True))
if msg:
db.commit()
logger.info("Added {}: {id}".format(msg, id=msg.id))
return "ok: {}".format(msg)
else:
return "fail: None"
# end if
except Exception as e:
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and context (classes, functions, sometimes code) from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
. Output only the next line. | logger.exception("lel") |
Predict the next line after this snippet: <|code_start|># -*- coding: utf-8 -*-
__author__ = 'luckydonald'
logger = logging.getLogger(__name__)
VERSION = "0.0.1"
__version__ = VERSION
<|code_end|>
using the current file's imports:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and any relevant context from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
. Output only the next line. | assert INIT == INIT # to prevent the unused import warning. Is used in SQL statement. |
Based on the snippet: <|code_start|> for msg in latest_values:
assert isinstance(msg, DBInitMessage)
data[str(msg.node)] = msg.value
# end for
return jsonify(data, allow_all_origin=True)
# end def
@app.route(API_V2+"/get_value")
@app.route(API_V2+"/get_value/")
@orm.db_session
def get_value_v2():
"""
Gets latest value they decided on, and the most recent measured value of each node.
Only considers events in the last 10 seconds.
{
"summary": None,
"leader": 1, # done later via observing latest LeaderChange events.
"nodes": []
}
:return:
"""
latest_vote = orm.select(m for m in DBVoteMessage if m.date > orm.raw_sql("NOW() - '10 seconds'::INTERVAL")).order_by(orm.desc(DBVoteMessage.date)).first()
latest_values = DBMessage.select_by_sql("""
SELECT DISTINCT ON (m.node) * FROM (
SELECT * FROM DBmessage
WHERE type = $INIT
AND date >= NOW() - '10 seconds'::INTERVAL
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and context (classes, functions, sometimes code) from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
. Output only the next line. | ) as m ORDER BY m.node, m.date DESC |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
__author__ = 'luckydonald'
logger = logging.getLogger(__name__)
VERSION = "0.0.1"
__version__ = VERSION
assert INIT == INIT # to prevent the unused import warning. Is used in SQL statement.
app = Flask(__name__)
debug = DebuggedApplication(app, console_path="/console/")
API_V1 = ""
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and context (classes, functions, sometimes code) from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
. Output only the next line. | API_V2 = "/api/v2" |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
__author__ = 'luckydonald'
logger = logging.getLogger(__name__)
VERSION = "0.0.1"
<|code_end|>
with the help of current file imports:
from datetime import datetime
from DictObject import DictObject
from flask import Flask, request
from luckydonaldUtils.logger import logging
from pony import orm
from .enums import JSON_TYPES
from .utils import jsonify
from .database import to_db, db, DBVoteMessage, DBMessage, DBInitMessage, DBPrevoteMessage, DBProposeMessage, DBAcknowledge, \
MSG_TYPE_CLASS_MAP
from node.enums import INIT # noqa # pylint: disable=unused-import
from node.messages import Message # noqa # pylint: disable=unused-import
from werkzeug.debug import DebuggedApplication
and context from other files:
# Path: code/api/enums.py
# JSON_TYPES = {
# INIT: "init",
# PROPOSE: "propose",
# PREVOTE: "prevote",
# VOTE: "vote",
# }
#
# Path: code/api/utils.py
# def jsonify(data, allow_all_origin=False):
# from flask import Response, jsonify as json_ify
# res = json_ify(data)
# assert isinstance(res, Response)
# origin = request.environ.get('HTTP_ORIGIN')
# if not origin:
# origin = request.environ.get('ORIGIN')
# # end if
# if allow_all_origin:
# res.headers["Access-Control-Allow-Origin"] = '*'
# elif origin and origin in ORIGIN_LIST:
# res.headers["Access-Control-Allow-Origin"] = origin
# # end if
# return res
#
# Path: code/api/database.py
# VALUE_TYPE = float
# MSG_TYPE_TYPE = int
# NODE_TYPE = int
# SEQUENCE_TYPE = int
# MSG_TYPE_CLASS_MAP = {
# INIT: DBInitMessage,
# PROPOSE: DBProposeMessage,
# PREVOTE: DBPrevoteMessage,
# VOTE: DBVoteMessage,
# # ...
# ACKNOWLEDGE: DBAcknowledge,
# }
# class DBMessage(db.Entity):
# class DBInitMessage(DBMessage):
# class DBProposeMessage(DBMessage):
# class DBPrevoteMessage(DBMessage):
# class DBVoteMessage(DBMessage):
# class DBAcknowledge(DBMessage):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(cls, msg):
# def from_db(self):
# def to_db(msg):
, which may contain function names, class names, or code. Output only the next line. | __version__ = VERSION |
Given the code snippet: <|code_start|>
logger = logging.getLogger('sync_point')
sync_points = datastore.Datastore('SyncPoint',
'key', 'satisfied', 'stack_key')
KEY_SEPARATOR = ':'
def _dump_list(items, separator=', '):
return separator.join(map(str, items))
def make_key(*components):
assert len(components) >= 2
return _dump_list(components, KEY_SEPARATOR)
def create(key, stack_key):
sync_points.create_with_key(key, satisfied={}, stack_key=stack_key)
def sync(key, propagate, target, predecessors, new_data):
sync_point = sync_points.read(key)
satisfied = dict(sync_point.satisfied)
satisfied.update(new_data)
<|code_end|>
, generate the next line using the imports in this file:
import functools
import logging
from .framework import datastore
and context (functions, classes, or occasionally code) from other files:
# Path: converge/framework/datastore.py
# class Datastore(object):
# class NotFound(KeyError):
# def __new__(cls, *args):
# def __init__(self, name, *fields):
# def clear_all():
# def clear(self):
# def find(self, **kwargs):
# def create_with_key(self, key, **kwargs):
# def create(self, **kwargs):
# def read(self, key):
# def update(self, key, **kwargs):
# def delete(self, key):
. Output only the next line. | waiting = predecessors - set(satisfied) |
Given the following code snippet before the placeholder: <|code_start|>
class RealityStore(object):
def __init__(self):
self.store = datastore.Datastore('PhysicalResource', 'uuid',
<|code_end|>
, predict the next line using imports from the current file:
from .framework import datastore
import copy
import uuid
and context including class names, function names, and sometimes code from other files:
# Path: converge/framework/datastore.py
# class Datastore(object):
# class NotFound(KeyError):
# def __new__(cls, *args):
# def __init__(self, name, *fields):
# def clear_all():
# def clear(self):
# def find(self, **kwargs):
# def create_with_key(self, key, **kwargs):
# def create(self, **kwargs):
# def read(self, key):
# def update(self, key, **kwargs):
# def delete(self, key):
. Output only the next line. | 'logical_name', 'properties') |
Here is a snippet: <|code_start|>
def with_scenarios(TestCase):
loader = unittest.defaultTestLoader
def create_test_func(generic_test, params):
@functools.wraps(generic_test)
def test_func(testcase, *args, **kwargs):
for key, value in params.items():
setattr(testcase, key, value)
return generic_test(testcase, *args, **kwargs)
return test_func
for test_name in loader.getTestCaseNames(TestCase):
base_test = getattr(TestCase, test_name)
for scenario in getattr(TestCase, 'scenarios', []):
name, parameters = scenario
test_func = create_test_func(base_test, parameters)
setattr(TestCase, '%s(%s)' % (test_name, name), test_func)
delattr(TestCase, test_name)
TestCase.scenarios = None
return TestCase
@with_scenarios
<|code_end|>
. Write the next line using the current file imports:
import functools
import logging
import unittest
import converge
import converge.processes
from converge.framework import datastore
from converge.framework import scenario
and context from other files:
# Path: converge/framework/datastore.py
# class Datastore(object):
# class NotFound(KeyError):
# def __new__(cls, *args):
# def __init__(self, name, *fields):
# def clear_all():
# def clear(self):
# def find(self, **kwargs):
# def create_with_key(self, key, **kwargs):
# def create(self, **kwargs):
# def read(self, key):
# def update(self, key, **kwargs):
# def delete(self, key):
#
# Path: converge/framework/scenario.py
# def list_all(directory):
# def load_all(cls, directory):
# def __init__(self, name, path):
# def __call__(self, _event_loop, **global_env):
# class Scenario(object):
, which may include functions, classes, or code. Output only the next line. | class ScenarioTest(unittest.TestCase): |
Using the snippet: <|code_start|># Tally range-style addresses on a per address file level.
def findRanges(buildings):
def isQueens(housenumber):
return bool(re.search(r'(\w+-\w+)', housenumber))
onlyRanges = 0 ## Buildings with only range house numbers
<|code_end|>
, determine the next line of code. You have imports:
from sys import argv
from glob import glob
from merge import merge
from pprint import pprint
import re
and context (class names, function names, or code) available:
# Path: merge.py
# def merge(buildingIn, addressIn, mergedOut):
# addresses = []
#
# with collection(addressIn, "r") as input:
# for address in input:
# shape = asShape(address['geometry'])
# shape.original = address
# addresses.append(shape)
#
# # Load and index all buildings.
# buildings = []
# buildingShapes = []
# buildingIdx = index.Index()
# with collection(buildingIn, "r") as input:
# for building in input:
# shape = asShape(building['geometry'])
# building['properties']['addresses'] = []
# buildings.append(building)
# buildingShapes.append(shape)
# buildingIdx.add(len(buildings) - 1, shape.bounds)
#
# # Map addresses to buildings.
# for address in addresses:
# for i in buildingIdx.intersection(address.bounds):
# if buildingShapes[i].contains(address):
# buildings[i]['properties']['addresses'].append(
# address.original)
#
# with open(mergedOut, 'w') as outFile:
# outFile.writelines(json.dumps(buildings, indent=4))
# print 'Exported ' + mergedOut
. Output only the next line. | onlyRangesQueens = 0 ## Buildings with only range house numbers that are queens style |
Predict the next line after this snippet: <|code_start|> if dbf_file:
self.dbf_file = dbf_file
else:
dbf_file = self.dbf_file
self.dbf_length = None
self.row_count = 0
self.last_row_count = 0
self.progress_last_timestamp = 0
self.open_dbf_start_time = time.time()
self.window.set_title("%s: %s" % (os.path.basename(dbf_file), self.main_title))
self.progress_window_show()
print datetime.today(), 'detaching model'
self.list_view.set_model()
print datetime.today(), 'model detached'
read_dbf = ReadDbf(self)
read_dbf.start()
def progress_window_show(self):
self.progress_window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.progress_window.set_title('Reading DBF file...')
self.progress_window.set_border_width(10)
self.progress_window.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_SPLASHSCREEN)
self.progress_window.set_modal(True)
self.progress_window.set_position(gtk.WIN_POS_CENTER_ALWAYS)
self.progress_window.set_property("skip-taskbar-hint", True)
<|code_end|>
using the current file's imports:
import threading
import thread
import pygtk
import gtk
import gobject
import dbf
import os.path
import time
import os
import string
from datetime import datetime
from readdbf import ReadDbf
and any relevant context from other files:
# Path: readdbf.py
# class ReadDbf(threading.Thread):
# finished = None
# failed = None
# caller = None
#
# def __init__(self, caller):
# super(ReadDbf, self).__init__()
# self.caller = caller
#
# def run(self):
# caller = self.caller
# statusbar_context_id = caller.statusbar.get_context_id('ReadDbf Thread')
#
# if caller.scrolled_window:
# print datetime.today(), "destroying old visualization"
# gobject.idle_add(caller.scrolled_window.destroy)
# print datetime.today(), "old visualization destroyed"
#
# print datetime.today(), "opening dbf file"
# gobject.idle_add(caller.statusbar.push, statusbar_context_id, 'opening %s' % caller.dbf_file)
# try:
# caller.dbf_table = dbf.Table(caller.dbf_file, read_only = True)
# except:
# self.finished = true;
# self.failed = true;
# return
#
# print datetime.today(), "dbf file opened"
#
# caller.dbf_length = len(caller.dbf_table)
#
# print datetime.today(), "retrieving fields"
# fields = dbf.get_fields(caller.dbf_table)
# print datetime.today(), "fields retrieved"
#
# print datetime.today(), "creating new visualization"
#
# store_param= [str] * len(fields)
# store_param.insert(0, int)
#
# store = gtk.ListStore(*store_param)
# caller.list_view = gtk.TreeView(store)
# caller.list_view.set_grid_lines(gtk.TREE_VIEW_GRID_LINES_HORIZONTAL)
#
# caller.scrolled_window = gtk.ScrolledWindow()
# caller.scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
# caller.scrolled_window.add(caller.list_view)
# caller.content_box.pack_start(caller.scrolled_window, True, True, 0)
#
# i = 0
# print datetime.today(), "populating list view"
# column = gtk.TreeViewColumn("No.")
# caller.list_view.append_column(column)
# cell = gtk.CellRendererText()
# cell.set_alignment(1, 0)
# column.pack_start(cell, True)
# column.add_attribute(cell, 'text', i)
#
# print datetime.today(), "creating columns for fields"
# for field_name in fields:
# i += 1
# column = gtk.TreeViewColumn(field_name)
# caller.list_view.append_column(column)
# cell = gtk.CellRendererText()
# column.pack_start(cell, True)
# column.add_attribute(cell, 'text', i)
# column.set_resizable(True)
#
# print datetime.today(), "retrieving old model"
# model = caller.list_view.get_model()
# print datetime.today(), "unset model"
# caller.list_view.set_model()
#
# caller.row_count = 0
# print datetime.today(), "iterating table"
# for row in caller.dbf_table:
# caller.row_count += 1
# try:
# data = list(row)
# data.insert(0, caller.row_count)
# model.append(data)
# except:
# print datetime.today(), 'error detected (64b68)'
# print row
# #time.sleep(1)
#
# print datetime.today(), "setting list view model"
# caller.list_view.set_model(model)
# print datetime.today(), "queue to show list view"
# gobject.idle_add(caller.list_view.show)
# print datetime.today(), "queue to show scrolled window"
# gobject.idle_add(caller.scrolled_window.show)
# print datetime.today(), "queue to destroy progress window"
# gobject.idle_add(caller.progress_window.destroy)
#
# print datetime.today(), "removing progress pulse"
# gobject.source_remove(caller.progress_timeout_source_id)
#
# gobject.idle_add(caller.statusbar.push, statusbar_context_id, "%s (%d rows)" % (os.path.basename(caller.dbf_file), caller.dbf_length))
# gobject.idle_add(caller.table_info_menu_item.set_sensitive, True)
#
# print datetime.today(), 'finished on %f seconds' % (time.time() - caller.open_dbf_start_time)
#
# self.finished = True
. Output only the next line. | self.progress_window.set_transient_for(self.window) |
Given the code snippet: <|code_start|># noinspection PyUnusedLocal
timezone = pytz.timezone('Europe/Helsinki')
def describe_fmi_api():
daily_4_days = load_xml('./tests/fmiapi/testdata/daily_4_days.xml')
daily_12_days = load_xml('./tests/fmiapi/testdata/daily_12_days.xml')
html_apikey_error = load_txt('./tests/fmiapi/testdata/invalid_apikey_error.html')
<|code_end|>
, generate the next line using the imports in this file:
from fmiapi.fmiapi import FMIApi
from datetime import datetime
from fmiapi.fmierrors import InvalidApikeyException, QueryLimitException, NoDataException
from tests.testUtils import *
from tests.fmiapi.commonmocks import *
from unittest import mock
from lxml import etree
from tests.fmiapi.testdata.expected_data import *
import pytz
import pytest
import copy
and context (functions, classes, or occasionally code) from other files:
# Path: fmiapi/fmiapi.py
# class FMIApi:
# """
# Provides a simple interface to interact with FMI API by providing basic functions to get
# data from FMI's open data service.
# """
#
# def __init__(self, api_key=''):
# self._api_key = api_key
# self._request_handler = FMIRequestHandler(self._api_key)
# self._PATH_TO_STATIONS_CSV = "data/stations.csv"
# self._PATH_TO_QUERY_METADATA = "data/supported_queries.json"
# self._stations = self._load_station_metadata()
# self._supported_queries = self._load_supported_queries_metadata()
# self._parser = FMIxmlParser()
#
# def set_apikey(self, api_key):
# self._api_key = api_key
# self._request_handler = FMIRequestHandler(self._api_key)
#
# def get_apikey(self):
# return self._api_key
#
# def get_data(self, params, callback_function=None, change_to_parsing=None):
# if params["storedquery_id"] == "fmi::observations::weather::daily::multipointcoverage":
# # Special logic for daily observations
# params['endtime'] += datetime.timedelta(days=1) # add one day to end time to get final day into result too
#
# data = self._request_handler.request(params, max_timespan=params['max_hours_range'],
# progress_callback=callback_function)
# # Notify ui that moving to parsing phase
# if change_to_parsing is not None:
# change_to_parsing()
#
# try:
# return self._parser.parse(data, progress_callback=callback_function)
# except NoDataException:
# # Augment date data to exception and raise it again
# raise NoDataException(starttime=params['starttime'], endtime=params['endtime'])
#
# def _load_station_metadata(self):
# """ FMI apparently didn't provide an API-endpoint to get list of all the stations. For now, we load the
# required station information from CSV-file. Change to a api-endpoint if one becomes (or is already?) available.
# """
# stations = []
# with open(self._PATH_TO_STATIONS_CSV, "r", encoding="utf8") as file:
# reader = csv.DictReader(file, ["Name", "FMISID", "LPNN", "WMO", "lat", "lon", "Altitude", "Group", "Since"],
# delimiter=";")
# for row in reader:
# stations.append(row)
# return stations
#
# def _load_supported_queries_metadata(self):
# with open(self._PATH_TO_QUERY_METADATA, "r", encoding="utf8") as file:
# queries = json.load(file)
# return queries
#
# def get_stations(self):
# return self._stations
#
# def get_supported_queries(self):
# return self._supported_queries
#
# def get_catalogue_of_station(self, fmisid):
# # Add extra metadata for each dataset which are required for queries and translations
# # in short data which is not provided by catalogue service. See supported_queries.json
# datasets = fmicatalogservice.get_station_metadata(fmisid)
# augmented = []
# for ds in datasets:
# for sq in self._supported_queries:
# if re.search(sq['id'], ds['identifier']):
# augmented.append({**ds, **sq})
# break
#
# return augmented
#
# def get_index_of_station(self, place_name):
# for i in range(0, len(self._stations)):
# if self._stations[i]["Name"] == place_name:
# return i
# return -1
#
# Path: fmiapi/fmierrors.py
# class InvalidApikeyException(Exception):
#
# def __init__(self):
# self.message = "ERROR in data-retrieving. Your API-key is invalid."
# self.error_code = "APIKEY"
#
# def __str__(self):
# return self.message
#
# class QueryLimitException(Exception):
#
# def __init__(self, wait_time=None, wait_unit=None):
# self.message = "ERROR in data-retrieving. Query limit exceeded. Please wait."
# self.error_code = "QUERYLIMIT"
# self.wait_unit = wait_unit
# self.wait_time = wait_time
#
# def __str__(self):
# return self.message
#
# class NoDataException(Exception):
#
# def __init__(self, starttime=None, endtime=None):
# self.message = "ERROR in data-retrieving. Did not find any data in range {} - {}".format(starttime, endtime)
# self.error_code = "NODATA"
#
# def __str__(self):
# return self.message
. Output only the next line. | html_apikey_missing_error = load_txt('./tests/fmiapi/testdata/apikey_missing.html') |
Using the snippet: <|code_start|>
def describe_fmi_catalog_service():
lammi_catalog_metadata = load_xml('./tests/fmiapi/testdata/lammi_catalog_metadata.xml')
search_exception_response = load_xml('./tests/fmiapi/testdata/search_exception_response.xml')
@mock.patch('http.client.HTTPConnection', spec=True)
def should_do_request_to_catalog_service_with_provided_query_params_and_return_data(mock_httpconn):
mock_connection = mock_httpconn.return_value
mock_connection.getresponse.return_value = MockResponse(200, etree.tostring(lammi_catalog_metadata))
result = get_station_metadata('1234')
assert_equal(1, mock_connection.getresponse.call_count)
for i, record in enumerate(EXPECTED_LAMMI_CATALOG_METADATA):
assert_equal(result[i]['latitude'], record['latitude'])
assert_equal(result[i]['longitude'], record['longitude'])
assert_equal(result[i]['link'], record['link'])
assert_equal(result[i]['identifier'], record['identifier'])
assert_equal(result[i]['title_fi'], record['title_fi'])
assert result[i]['starttime'] == record['starttime']
assert result[i]['endtime'] == record['endtime']
<|code_end|>
, determine the next line of code. You have imports:
from fmiapi.fmicatalogservice import get_station_metadata
from tests.fmiapi.testdata.expected_data import EXPECTED_LAMMI_CATALOG_METADATA
from tests.testUtils import *
from tests.fmiapi.commonmocks import *
from unittest import mock
from lxml import etree
from fmiapi.fmierrors import RequestException, NoDataSetsException
import pytest
and context (class names, function names, or code) available:
# Path: fmiapi/fmicatalogservice.py
# def get_station_metadata(fmisid):
# result = _parse_data(_retrieve_metadata_by_fmisid(fmisid))
# if len(result) == 0:
# raise NoDataSetsException(fmisid)
# return result
#
# Path: fmiapi/fmierrors.py
# class RequestException(Exception):
#
# def __init__(self, text, error_code, html=""):
# self.error_code = 0
# self.message = text
# self.error_code = error_code
# self.html = html
#
# def __str__(self):
# if self.html != "":
# return self.html
# else:
# return self.message
#
# class NoDataSetsException(Exception):
#
# def __init__(self, fmisid):
# self.message = "No data sets found for {}".format(fmisid)
# self.error_code = "NODATASETS"
#
# def __str__(self):
# return self.message
. Output only the next line. | def describe_fmi_returns_search_exception(): |
Based on the snippet: <|code_start|>
def describe_fmi_catalog_service():
lammi_catalog_metadata = load_xml('./tests/fmiapi/testdata/lammi_catalog_metadata.xml')
search_exception_response = load_xml('./tests/fmiapi/testdata/search_exception_response.xml')
@mock.patch('http.client.HTTPConnection', spec=True)
def should_do_request_to_catalog_service_with_provided_query_params_and_return_data(mock_httpconn):
mock_connection = mock_httpconn.return_value
mock_connection.getresponse.return_value = MockResponse(200, etree.tostring(lammi_catalog_metadata))
result = get_station_metadata('1234')
assert_equal(1, mock_connection.getresponse.call_count)
for i, record in enumerate(EXPECTED_LAMMI_CATALOG_METADATA):
assert_equal(result[i]['latitude'], record['latitude'])
assert_equal(result[i]['longitude'], record['longitude'])
assert_equal(result[i]['link'], record['link'])
assert_equal(result[i]['identifier'], record['identifier'])
assert_equal(result[i]['title_fi'], record['title_fi'])
<|code_end|>
, predict the immediate next line with the help of imports:
from fmiapi.fmicatalogservice import get_station_metadata
from tests.fmiapi.testdata.expected_data import EXPECTED_LAMMI_CATALOG_METADATA
from tests.testUtils import *
from tests.fmiapi.commonmocks import *
from unittest import mock
from lxml import etree
from fmiapi.fmierrors import RequestException, NoDataSetsException
import pytest
and context (classes, functions, sometimes code) from other files:
# Path: fmiapi/fmicatalogservice.py
# def get_station_metadata(fmisid):
# result = _parse_data(_retrieve_metadata_by_fmisid(fmisid))
# if len(result) == 0:
# raise NoDataSetsException(fmisid)
# return result
#
# Path: fmiapi/fmierrors.py
# class RequestException(Exception):
#
# def __init__(self, text, error_code, html=""):
# self.error_code = 0
# self.message = text
# self.error_code = error_code
# self.html = html
#
# def __str__(self):
# if self.html != "":
# return self.html
# else:
# return self.message
#
# class NoDataSetsException(Exception):
#
# def __init__(self, fmisid):
# self.message = "No data sets found for {}".format(fmisid)
# self.error_code = "NODATASETS"
#
# def __str__(self):
# return self.message
. Output only the next line. | assert result[i]['starttime'] == record['starttime'] |
Predict the next line after this snippet: <|code_start|>
class Settings(QSettings):
def __init__(self):
super().__init__("fmidownloader", "fmidownloader")
def load_qsettings(self, app):
<|code_end|>
using the current file's imports:
from PyQt5.QtCore import QSettings
from gui.messages import Messages
and any relevant context from other files:
# Path: gui/messages.py
# class Messages:
# """ To me this seems a bit like hack. This class has functions to get translated strings to be shown in UI. The
# reason for them being in functions is that apparently Qt translate will not work in multiple contexts if translations
# were saved in regular variables. With functions it seems to be ok. Better solutions are welcome. """
#
# @staticmethod
# def no_datasets_found():
# return QCoreApplication.translate("nodatasets_error",
# "Saatavilla olevia aineistoja ei löytynyt valitulle asemalle.")
#
# @staticmethod
# def fmicatalogue_error():
# return QCoreApplication.translate("fmicatalogue_error", "Saatavilla olevan aineiston listaus ei onnistunut FMI katalogipalvelusta.\nYritä myöhemmin uudestaan tai kokeile toista asemaa.")
#
# @staticmethod
# def unknown_error():
# return QCoreApplication.translate("unknown_error", "Tuntematon virhe: ")
#
# @staticmethod
# def date_not_found_error():
# return QCoreApplication.translate("datenotfound_error", "Määritettyä ajanjaksoa ei löytynyt.\nTodennäköisesti ilmatieteenlaitoksella ei ole dataa tälle ajanjaksolle.\nKokeile "
# "pitempää ajanjaksoa, esim. yhtä vuotta tai myöhäisempää aloituspäivämäärää.")
#
# @staticmethod
# def end_date_warning():
# return QCoreApplication.translate("enddate_warning", "Lopetus päivämäärä ei saa edeltää aloitus päivämäärää")
#
# @staticmethod
# def start_end_date_warning():
# return QCoreApplication.translate("startenddate_warning","Aloitus ja lopetuspäivämäärät eivät saa olla samoja")
#
# @staticmethod
# def set_apikey_message():
# return QCoreApplication.translate("setapikeymessage", "Tunnisteavainta ei ole maaritetty. Aseta se valikossa Tiedosto->Aseta tunnisteavain")
#
# @staticmethod
# def set_apikey_dialog():
# return QCoreApplication.translate("setapikeyinstruction", "Käyttääksesi sovellusta tarvitset ilmatieteenlaitoksen avoimen datan tunnisteavaimen.\nMene osoitteeseen http://ilmatieteenlaitos.fi/avoin-data saadaksesi lisätietoa avaimen hankkimisesta.\n\n"
# "Kun olet rekisteröitynyt ja saanut tekstimuotoisen tunnisteavaimen, kopioi se tähän:")
#
# @staticmethod
# def save_weatherdata_csv():
# return QCoreApplication.translate("save_weather_data", "Tallenna säädata csv-muodossa:")
#
# @staticmethod
# def downloading_weatherdata():
# return QCoreApplication.translate("downloading_weatherdata","Ladataan säädataa...")
#
# @staticmethod
# def parsing_weatherdata():
# return QCoreApplication.translate("processing_weatherdata", "Käsitellään säädataa...")
#
# @staticmethod
# def weatherstation_error():
# return QCoreApplication.translate("weatherstationnotfound_error", "Määritettyä sääasemaa ei löydetty.\nIlmatieteenlaitoksen palvelussa on häiriö tai "
# "mikäli ongelma toistuu muillakin kohteilla, saattaa tämä ohjelma vaatia päivitystä. Katso tiedot yhteydenotosta Tiedosto->Tietoa valikosta.\n\nVirheen kuvaus:\n")
#
# @staticmethod
# def request_failed_error():
# return QCoreApplication.translate("requestfailed_error", "Datapyyntö ei onnistunut.\nOletko asettanut vaadittavan tunnisteavaimen tai onko se virheellinen?\n\nIlmatieteenlaitos vaatii rekisteröitymistä palveluun "
# "ennen sen käyttöä. Katso lisätietoa valikosta Tiedosto->Aseta tunnisteavain.")
#
# @staticmethod
# def query_limit_error():
# return QCoreApplication.translate("querylimit_error",
# "Ilmatieteenlaitoksen latausraja ylitetty.\nOlet tehnyt liikaa datapyyntöjä lyhyessä ajassa. Jatkaaksesi lataamista, odota {} sekuntia ennen seuraavaa datapyyntöä.")
#
# @staticmethod
# def failed_to_get_version():
# return QCoreApplication.translate("version_retrieve_error", "Päivitysten haku epäonnistui.")
. Output only the next line. | self._load_lang_settings(app) |
Next line prediction: <|code_start|>timezone = pytz.timezone('Europe/Helsinki')
def describe_fmi_request_handler():
fmi_handler = FMIRequestHandler('apikey')
_DAILY_REQUEST_MAX_RANGE_HOURS = 8928
_REALTIME_REQUEST_MAX_RANGE_HOURS = 168
@mock.patch('fmiapi.fmirequesthandler.FMIRequest', spec=True)
def should_get_year_in_one_request(mock_fmirequest):
query = {'request': 'getFeature',
'storedquery_id': 'fmi::observations::weather::daily::multipointcoverage',
'fmisid': '1234',
'starttime': datetime(2010, 1, 1, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone),
'endtime': datetime(2011, 1, 5, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone)
}
expected = {'starttime': datetime(2010, 1, 1, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone),
'endtime': datetime(2011, 1, 5, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone),
'fmisid': '1234',
'request': 'getFeature',
'storedquery_id': 'fmi::observations::weather::daily::multipointcoverage'
}
mock_instance = mock_fmirequest.return_value
mock_instance.get.return_value = 'data'
handler = FMIRequestHandler('apikey')
result = handler.request(query, max_timespan=_DAILY_REQUEST_MAX_RANGE_HOURS, progress_callback=None)
<|code_end|>
. Use current file imports:
(from fmiapi.fmirequesthandler import FMIRequestHandler
from datetime import datetime
from tests.testUtils import *
from unittest import mock
from unittest.mock import call
from fmiapi.fmierrors import RequestException
import pytz
import pytest)
and context including class names, function names, or small code snippets from other files:
# Path: fmiapi/fmirequesthandler.py
# class FMIRequestHandler:
# """
# This class takes a data request and splits it to multiple http-requests if required and
# does the request by using FMIRequest class.
# """
#
# def __init__(self, api_key):
# self._api_key = api_key
# self._FMI_request = FMIRequest(self._api_key)
# self._callbackFunction = None
#
# def request(self, params, max_timespan, progress_callback=None):
# requests = self._divide_to_multiple_requests(params, max_timespan)
# return self._execute_requests(requests, progress_callback)
#
# def _execute_requests(self, requests, progress_callback):
# all_requests = len(requests)
# responses = []
# for i, r in enumerate(requests):
# try:
# responses.append(self._do_request(r))
# if progress_callback is not None:
# progress_callback(i, all_requests)
# except RequestException as e:
# # If result is 400, hope that the next request in batch will work. Raise other errors normally.
# # Handles case where beginning of a multipart request won't contain data
# # FIXME: Could be done in a way where after new lowerlimit is found, a new batch of requests is calculated instead of doing
# # FIXME: bunch of useless requests.
# print('Exception on request', e)
# if e.error_code != 400:
# raise e
# if progress_callback is not None:
# progress_callback(i, all_requests)
#
# return responses
#
# def _do_request(self, request):
# return self._FMI_request.get(request)
#
# @staticmethod
# def _divide_to_multiple_requests(params, max_timespan):
# requests = []
# done = False
# i = 0
# while not done:
# request_params = copy.copy(params)
# request_params["starttime"] += datetime.timedelta(hours=max_timespan)*i
# request_params["endtime"] = request_params["starttime"] + datetime.timedelta(hours=max_timespan)
#
# # This additional minute to starting time is to prevent requests from fetching same time twice in
# # the splitting point. Otherwise previous request's last time will be fetched as first in the next.
# # FMI's service recognizes minutes as smallest significate time step so seconds or milliseconds could not
# # be used.
# if i > 0:
# request_params["starttime"] += datetime.timedelta(minutes=1)
#
# requests.append(request_params)
#
# if request_params["endtime"] > params["endtime"]:
# done = True
# request_params["endtime"] = params["endtime"]
# i += 1
# return requests
#
# Path: fmiapi/fmierrors.py
# class RequestException(Exception):
#
# def __init__(self, text, error_code, html=""):
# self.error_code = 0
# self.message = text
# self.error_code = error_code
# self.html = html
#
# def __str__(self):
# if self.html != "":
# return self.html
# else:
# return self.message
. Output only the next line. | mock_instance.get.assert_has_calls([call(expected)]) |
Next line prediction: <|code_start|> handler = FMIRequestHandler('apikey')
result = handler.request(query, max_timespan=_DAILY_REQUEST_MAX_RANGE_HOURS, progress_callback=None)
mock_instance.get.assert_has_calls([call(expected)])
assert_equal(1, mock_instance.get.call_count)
assert_equal(1, len(result))
@mock.patch('fmiapi.fmirequesthandler.FMIRequest', spec=True)
def should_call_fmirequest_in_two_parts_for_372_day_time_span(mock_fmirequest):
query = create_daily_query(datetime(2010, 1, 1, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone),
datetime(2011, 1, 23, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone))
expected = [create_daily_query(datetime(2010, 1, 1, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone),
datetime(2011, 1, 8, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone)),
create_daily_query(datetime(2011, 1, 8, hour=0, minute=2, second=0, microsecond=0, tzinfo=timezone),
datetime(2011, 1, 23, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone))]
expected_calls = [call(expected[0]), call(expected[1])]
mock_instance = mock_fmirequest.return_value
mock_instance.get.return_value = 'data'
handler = FMIRequestHandler('apikey')
result = handler.request(query, max_timespan=_DAILY_REQUEST_MAX_RANGE_HOURS, progress_callback=None)
mock_instance.get.assert_has_calls(expected_calls)
assert_equal(2, mock_instance.get.call_count)
assert_equal(2, len(result))
@mock.patch('fmiapi.fmirequesthandler.FMIRequest', spec=True)
def should_return_available_data_if_first_part_of_request_does_not_exist(mock_fmirequest):
<|code_end|>
. Use current file imports:
(from fmiapi.fmirequesthandler import FMIRequestHandler
from datetime import datetime
from tests.testUtils import *
from unittest import mock
from unittest.mock import call
from fmiapi.fmierrors import RequestException
import pytz
import pytest)
and context including class names, function names, or small code snippets from other files:
# Path: fmiapi/fmirequesthandler.py
# class FMIRequestHandler:
# """
# This class takes a data request and splits it to multiple http-requests if required and
# does the request by using FMIRequest class.
# """
#
# def __init__(self, api_key):
# self._api_key = api_key
# self._FMI_request = FMIRequest(self._api_key)
# self._callbackFunction = None
#
# def request(self, params, max_timespan, progress_callback=None):
# requests = self._divide_to_multiple_requests(params, max_timespan)
# return self._execute_requests(requests, progress_callback)
#
# def _execute_requests(self, requests, progress_callback):
# all_requests = len(requests)
# responses = []
# for i, r in enumerate(requests):
# try:
# responses.append(self._do_request(r))
# if progress_callback is not None:
# progress_callback(i, all_requests)
# except RequestException as e:
# # If result is 400, hope that the next request in batch will work. Raise other errors normally.
# # Handles case where beginning of a multipart request won't contain data
# # FIXME: Could be done in a way where after new lowerlimit is found, a new batch of requests is calculated instead of doing
# # FIXME: bunch of useless requests.
# print('Exception on request', e)
# if e.error_code != 400:
# raise e
# if progress_callback is not None:
# progress_callback(i, all_requests)
#
# return responses
#
# def _do_request(self, request):
# return self._FMI_request.get(request)
#
# @staticmethod
# def _divide_to_multiple_requests(params, max_timespan):
# requests = []
# done = False
# i = 0
# while not done:
# request_params = copy.copy(params)
# request_params["starttime"] += datetime.timedelta(hours=max_timespan)*i
# request_params["endtime"] = request_params["starttime"] + datetime.timedelta(hours=max_timespan)
#
# # This additional minute to starting time is to prevent requests from fetching same time twice in
# # the splitting point. Otherwise previous request's last time will be fetched as first in the next.
# # FMI's service recognizes minutes as smallest significate time step so seconds or milliseconds could not
# # be used.
# if i > 0:
# request_params["starttime"] += datetime.timedelta(minutes=1)
#
# requests.append(request_params)
#
# if request_params["endtime"] > params["endtime"]:
# done = True
# request_params["endtime"] = params["endtime"]
# i += 1
# return requests
#
# Path: fmiapi/fmierrors.py
# class RequestException(Exception):
#
# def __init__(self, text, error_code, html=""):
# self.error_code = 0
# self.message = text
# self.error_code = error_code
# self.html = html
#
# def __str__(self):
# if self.html != "":
# return self.html
# else:
# return self.message
. Output only the next line. | query = create_daily_query(datetime(2010, 1, 1, hour=0, minute=1, second=0, microsecond=0, tzinfo=timezone), |
Next line prediction: <|code_start|> self.client_manager = self.cs
self.app.client_manager.tackerclient = self.client_manager
@ddt.ddt
class TestVnfLcmVersions(TestVnfLcm):
def setUp(self):
super(TestVnfLcmVersions, self).setUp()
self.vnflcm_versions = vnflcm_versions.VnfLcmVersions(
self.app, self.app_args, cmd_name='vnflcm versions')
def _versions_response(self, major_version=None):
if major_version is None:
return {"uriPrefix": "/vnflcm",
"apiVersions": [{"version": "1.3.0",
"isDeprecated": False},
{"version": "2.0.0",
"isDeprecated": False}]}
elif major_version == "1":
return {"uriPrefix": "/vnflcm/v1",
"apiVersions": [{"version": "1.3.0",
"isDeprecated": False}]}
elif major_version == "2":
return {"uriPrefix": "/vnflcm/v2",
"apiVersions": [{"version": "2.0.0",
"isDeprecated": False}]}
def test_invalid_major_version(self):
parser = self.vnflcm_versions.get_parser('vnflcm versions')
<|code_end|>
. Use current file imports:
(import os
import ddt
from unittest import mock
from tackerclient.common import exceptions
from tackerclient.osc.common.vnflcm import vnflcm_versions
from tackerclient.tests.unit.osc import base
from tackerclient.tests.unit.osc.v1.fixture_data import client)
and context including class names, function names, or small code snippets from other files:
# Path: tackerclient/common/exceptions.py
# class TackerException(Exception):
# class TackerClientException(TackerException):
# class BadRequest(TackerClientException):
# class Unauthorized(TackerClientException):
# class Forbidden(TackerClientException):
# class NotFound(TackerClientException):
# class Conflict(TackerClientException):
# class InternalServerError(TackerClientException):
# class ServiceUnavailable(TackerClientException):
# class NetworkNotFoundClient(NotFound):
# class PortNotFoundClient(NotFound):
# class StateInvalidClient(BadRequest):
# class NetworkInUseClient(Conflict):
# class PortInUseClient(Conflict):
# class IpAddressInUseClient(Conflict):
# class InvalidIpForNetworkClient(BadRequest):
# class OverQuotaClient(Conflict):
# class IpAddressGenerationFailureClient(Conflict):
# class MacAddressInUseClient(Conflict):
# class ExternalIpAddressExhaustedClient(BadRequest):
# class NoAuthURLProvided(Unauthorized):
# class EndpointNotFound(TackerClientException):
# class EndpointTypeNotFound(TackerClientException):
# class AmbiguousEndpoints(TackerClientException):
# class RequestURITooLong(TackerClientException):
# class ConnectionFailed(TackerClientException):
# class SslCertificateValidationError(TackerClientException):
# class MalformedResponseBody(TackerClientException):
# class InvalidContentType(TackerClientException):
# class InvalidInput(TackerClientException):
# class UnsupportedCommandVersion(TackerClientException):
# class TackerCLIError(TackerException):
# class CommandError(TackerCLIError):
# class UnsupportedVersion(TackerCLIError):
# class TackerClientNoUniqueMatch(TackerCLIError):
# def __init__(self, message=None, **kwargs):
# def __str__(self):
# def __init__(self, message=None, **kwargs):
# def __init__(self, **kwargs):
# HTTP_EXCEPTION_MAP = {
# 400: BadRequest,
# 401: Unauthorized,
# 403: Forbidden,
# 404: NotFound,
# 409: Conflict,
# 500: InternalServerError,
# 503: ServiceUnavailable,
# }
#
# Path: tackerclient/osc/common/vnflcm/vnflcm_versions.py
# SUPPORTED_VERSIONS = [1, 2]
# class VnfLcmVersions(command.ShowOne):
# def get_parser(self, prog_name):
# def take_action(self, parsed_args):
#
# Path: tackerclient/tests/unit/osc/base.py
# class FixturedTestCase(testtools.TestCase):
# class ParserException(Exception):
# def setUp(self):
# def check_parser(self, cmd, args, verify_args):
# def assertNotCalled(self, m, msg=None):
# def assertListItemsEqual(self, expected, actual):
#
# Path: tackerclient/tests/unit/osc/v1/fixture_data/client.py
# IDENTITY_URL = 'http://identityserver:5000/v3'
# TACKER_URL = 'http://nfv-orchestration'
# class ClientFixture(fixtures.Fixture):
# def __init__(self, requests_mock, identity_url=IDENTITY_URL,
# api_version='1'):
# def setUp(self):
# def new_client(self):
. Output only the next line. | parsed_args = parser.parse_args(["--major-version", "3"]) |
Predict the next line for this snippet: <|code_start|># under the License.
class TestVnfLcm(base.FixturedTestCase):
client_fixture_class = client.ClientFixture
def setUp(self):
super(TestVnfLcm, self).setUp()
self.url = client.TACKER_URL
self.header = {'content-type': 'application/json'}
self.app = mock.Mock()
self.app_args = mock.Mock()
self.client_manager = self.cs
self.app.client_manager.tackerclient = self.client_manager
@ddt.ddt
class TestVnfLcmVersions(TestVnfLcm):
def setUp(self):
super(TestVnfLcmVersions, self).setUp()
self.vnflcm_versions = vnflcm_versions.VnfLcmVersions(
self.app, self.app_args, cmd_name='vnflcm versions')
def _versions_response(self, major_version=None):
if major_version is None:
return {"uriPrefix": "/vnflcm",
<|code_end|>
with the help of current file imports:
import os
import ddt
from unittest import mock
from tackerclient.common import exceptions
from tackerclient.osc.common.vnflcm import vnflcm_versions
from tackerclient.tests.unit.osc import base
from tackerclient.tests.unit.osc.v1.fixture_data import client
and context from other files:
# Path: tackerclient/common/exceptions.py
# class TackerException(Exception):
# class TackerClientException(TackerException):
# class BadRequest(TackerClientException):
# class Unauthorized(TackerClientException):
# class Forbidden(TackerClientException):
# class NotFound(TackerClientException):
# class Conflict(TackerClientException):
# class InternalServerError(TackerClientException):
# class ServiceUnavailable(TackerClientException):
# class NetworkNotFoundClient(NotFound):
# class PortNotFoundClient(NotFound):
# class StateInvalidClient(BadRequest):
# class NetworkInUseClient(Conflict):
# class PortInUseClient(Conflict):
# class IpAddressInUseClient(Conflict):
# class InvalidIpForNetworkClient(BadRequest):
# class OverQuotaClient(Conflict):
# class IpAddressGenerationFailureClient(Conflict):
# class MacAddressInUseClient(Conflict):
# class ExternalIpAddressExhaustedClient(BadRequest):
# class NoAuthURLProvided(Unauthorized):
# class EndpointNotFound(TackerClientException):
# class EndpointTypeNotFound(TackerClientException):
# class AmbiguousEndpoints(TackerClientException):
# class RequestURITooLong(TackerClientException):
# class ConnectionFailed(TackerClientException):
# class SslCertificateValidationError(TackerClientException):
# class MalformedResponseBody(TackerClientException):
# class InvalidContentType(TackerClientException):
# class InvalidInput(TackerClientException):
# class UnsupportedCommandVersion(TackerClientException):
# class TackerCLIError(TackerException):
# class CommandError(TackerCLIError):
# class UnsupportedVersion(TackerCLIError):
# class TackerClientNoUniqueMatch(TackerCLIError):
# def __init__(self, message=None, **kwargs):
# def __str__(self):
# def __init__(self, message=None, **kwargs):
# def __init__(self, **kwargs):
# HTTP_EXCEPTION_MAP = {
# 400: BadRequest,
# 401: Unauthorized,
# 403: Forbidden,
# 404: NotFound,
# 409: Conflict,
# 500: InternalServerError,
# 503: ServiceUnavailable,
# }
#
# Path: tackerclient/osc/common/vnflcm/vnflcm_versions.py
# SUPPORTED_VERSIONS = [1, 2]
# class VnfLcmVersions(command.ShowOne):
# def get_parser(self, prog_name):
# def take_action(self, parsed_args):
#
# Path: tackerclient/tests/unit/osc/base.py
# class FixturedTestCase(testtools.TestCase):
# class ParserException(Exception):
# def setUp(self):
# def check_parser(self, cmd, args, verify_args):
# def assertNotCalled(self, m, msg=None):
# def assertListItemsEqual(self, expected, actual):
#
# Path: tackerclient/tests/unit/osc/v1/fixture_data/client.py
# IDENTITY_URL = 'http://identityserver:5000/v3'
# TACKER_URL = 'http://nfv-orchestration'
# class ClientFixture(fixtures.Fixture):
# def __init__(self, requests_mock, identity_url=IDENTITY_URL,
# api_version='1'):
# def setUp(self):
# def new_client(self):
, which may contain function names, class names, or code. Output only the next line. | "apiVersions": [{"version": "1.3.0", |
Using the snippet: <|code_start|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class TestVnfLcm(base.FixturedTestCase):
client_fixture_class = client.ClientFixture
def setUp(self):
super(TestVnfLcm, self).setUp()
self.url = client.TACKER_URL
self.header = {'content-type': 'application/json'}
self.app = mock.Mock()
self.app_args = mock.Mock()
self.client_manager = self.cs
self.app.client_manager.tackerclient = self.client_manager
<|code_end|>
, determine the next line of code. You have imports:
import os
import ddt
from unittest import mock
from tackerclient.common import exceptions
from tackerclient.osc.common.vnflcm import vnflcm_versions
from tackerclient.tests.unit.osc import base
from tackerclient.tests.unit.osc.v1.fixture_data import client
and context (class names, function names, or code) available:
# Path: tackerclient/common/exceptions.py
# class TackerException(Exception):
# class TackerClientException(TackerException):
# class BadRequest(TackerClientException):
# class Unauthorized(TackerClientException):
# class Forbidden(TackerClientException):
# class NotFound(TackerClientException):
# class Conflict(TackerClientException):
# class InternalServerError(TackerClientException):
# class ServiceUnavailable(TackerClientException):
# class NetworkNotFoundClient(NotFound):
# class PortNotFoundClient(NotFound):
# class StateInvalidClient(BadRequest):
# class NetworkInUseClient(Conflict):
# class PortInUseClient(Conflict):
# class IpAddressInUseClient(Conflict):
# class InvalidIpForNetworkClient(BadRequest):
# class OverQuotaClient(Conflict):
# class IpAddressGenerationFailureClient(Conflict):
# class MacAddressInUseClient(Conflict):
# class ExternalIpAddressExhaustedClient(BadRequest):
# class NoAuthURLProvided(Unauthorized):
# class EndpointNotFound(TackerClientException):
# class EndpointTypeNotFound(TackerClientException):
# class AmbiguousEndpoints(TackerClientException):
# class RequestURITooLong(TackerClientException):
# class ConnectionFailed(TackerClientException):
# class SslCertificateValidationError(TackerClientException):
# class MalformedResponseBody(TackerClientException):
# class InvalidContentType(TackerClientException):
# class InvalidInput(TackerClientException):
# class UnsupportedCommandVersion(TackerClientException):
# class TackerCLIError(TackerException):
# class CommandError(TackerCLIError):
# class UnsupportedVersion(TackerCLIError):
# class TackerClientNoUniqueMatch(TackerCLIError):
# def __init__(self, message=None, **kwargs):
# def __str__(self):
# def __init__(self, message=None, **kwargs):
# def __init__(self, **kwargs):
# HTTP_EXCEPTION_MAP = {
# 400: BadRequest,
# 401: Unauthorized,
# 403: Forbidden,
# 404: NotFound,
# 409: Conflict,
# 500: InternalServerError,
# 503: ServiceUnavailable,
# }
#
# Path: tackerclient/osc/common/vnflcm/vnflcm_versions.py
# SUPPORTED_VERSIONS = [1, 2]
# class VnfLcmVersions(command.ShowOne):
# def get_parser(self, prog_name):
# def take_action(self, parsed_args):
#
# Path: tackerclient/tests/unit/osc/base.py
# class FixturedTestCase(testtools.TestCase):
# class ParserException(Exception):
# def setUp(self):
# def check_parser(self, cmd, args, verify_args):
# def assertNotCalled(self, m, msg=None):
# def assertListItemsEqual(self, expected, actual):
#
# Path: tackerclient/tests/unit/osc/v1/fixture_data/client.py
# IDENTITY_URL = 'http://identityserver:5000/v3'
# TACKER_URL = 'http://nfv-orchestration'
# class ClientFixture(fixtures.Fixture):
# def __init__(self, requests_mock, identity_url=IDENTITY_URL,
# api_version='1'):
# def setUp(self):
# def new_client(self):
. Output only the next line. | @ddt.ddt |
Given the code snippet: <|code_start|> self.client_manager = self.cs
self.app.client_manager.tackerclient = self.client_manager
@ddt.ddt
class TestVnfLcmVersions(TestVnfLcm):
def setUp(self):
super(TestVnfLcmVersions, self).setUp()
self.vnflcm_versions = vnflcm_versions.VnfLcmVersions(
self.app, self.app_args, cmd_name='vnflcm versions')
def _versions_response(self, major_version=None):
if major_version is None:
return {"uriPrefix": "/vnflcm",
"apiVersions": [{"version": "1.3.0",
"isDeprecated": False},
{"version": "2.0.0",
"isDeprecated": False}]}
elif major_version == "1":
return {"uriPrefix": "/vnflcm/v1",
"apiVersions": [{"version": "1.3.0",
"isDeprecated": False}]}
elif major_version == "2":
return {"uriPrefix": "/vnflcm/v2",
"apiVersions": [{"version": "2.0.0",
"isDeprecated": False}]}
def test_invalid_major_version(self):
parser = self.vnflcm_versions.get_parser('vnflcm versions')
<|code_end|>
, generate the next line using the imports in this file:
import os
import ddt
from unittest import mock
from tackerclient.common import exceptions
from tackerclient.osc.common.vnflcm import vnflcm_versions
from tackerclient.tests.unit.osc import base
from tackerclient.tests.unit.osc.v1.fixture_data import client
and context (functions, classes, or occasionally code) from other files:
# Path: tackerclient/common/exceptions.py
# class TackerException(Exception):
# class TackerClientException(TackerException):
# class BadRequest(TackerClientException):
# class Unauthorized(TackerClientException):
# class Forbidden(TackerClientException):
# class NotFound(TackerClientException):
# class Conflict(TackerClientException):
# class InternalServerError(TackerClientException):
# class ServiceUnavailable(TackerClientException):
# class NetworkNotFoundClient(NotFound):
# class PortNotFoundClient(NotFound):
# class StateInvalidClient(BadRequest):
# class NetworkInUseClient(Conflict):
# class PortInUseClient(Conflict):
# class IpAddressInUseClient(Conflict):
# class InvalidIpForNetworkClient(BadRequest):
# class OverQuotaClient(Conflict):
# class IpAddressGenerationFailureClient(Conflict):
# class MacAddressInUseClient(Conflict):
# class ExternalIpAddressExhaustedClient(BadRequest):
# class NoAuthURLProvided(Unauthorized):
# class EndpointNotFound(TackerClientException):
# class EndpointTypeNotFound(TackerClientException):
# class AmbiguousEndpoints(TackerClientException):
# class RequestURITooLong(TackerClientException):
# class ConnectionFailed(TackerClientException):
# class SslCertificateValidationError(TackerClientException):
# class MalformedResponseBody(TackerClientException):
# class InvalidContentType(TackerClientException):
# class InvalidInput(TackerClientException):
# class UnsupportedCommandVersion(TackerClientException):
# class TackerCLIError(TackerException):
# class CommandError(TackerCLIError):
# class UnsupportedVersion(TackerCLIError):
# class TackerClientNoUniqueMatch(TackerCLIError):
# def __init__(self, message=None, **kwargs):
# def __str__(self):
# def __init__(self, message=None, **kwargs):
# def __init__(self, **kwargs):
# HTTP_EXCEPTION_MAP = {
# 400: BadRequest,
# 401: Unauthorized,
# 403: Forbidden,
# 404: NotFound,
# 409: Conflict,
# 500: InternalServerError,
# 503: ServiceUnavailable,
# }
#
# Path: tackerclient/osc/common/vnflcm/vnflcm_versions.py
# SUPPORTED_VERSIONS = [1, 2]
# class VnfLcmVersions(command.ShowOne):
# def get_parser(self, prog_name):
# def take_action(self, parsed_args):
#
# Path: tackerclient/tests/unit/osc/base.py
# class FixturedTestCase(testtools.TestCase):
# class ParserException(Exception):
# def setUp(self):
# def check_parser(self, cmd, args, verify_args):
# def assertNotCalled(self, m, msg=None):
# def assertListItemsEqual(self, expected, actual):
#
# Path: tackerclient/tests/unit/osc/v1/fixture_data/client.py
# IDENTITY_URL = 'http://identityserver:5000/v3'
# TACKER_URL = 'http://nfv-orchestration'
# class ClientFixture(fixtures.Fixture):
# def __init__(self, requests_mock, identity_url=IDENTITY_URL,
# api_version='1'):
# def setUp(self):
# def new_client(self):
. Output only the next line. | parsed_args = parser.parse_args(["--major-version", "3"]) |
Based on the snippet: <|code_start|>
class CLITestV10VmVNFFGDJSON(test_cli10.CLITestV10Base):
_RESOURCE = 'vnffgd'
_RESOURCES = 'vnffgds'
def setUp(self):
plurals = {'vnffgds': 'vnffgd'}
super(CLITestV10VmVNFFGDJSON, self).setUp(plurals=plurals)
@patch("tackerclient.tacker.v1_0.nfvo.vnffgd.open",
side_effect=mock_open(read_data="vnffgd"),
create=True)
def test_create_vnffgd_all_params(self, mo):
cmd = vnffgd.CreateVNFFGD(test_cli10.MyApp(sys.stdout), None)
my_id = 'my-id'
name = 'my-name'
attr_key = 'vnffgd'
attr_val = 'vnffgd'
description = 'vnffgd description'
args = [
name,
'--vnffgd-file', 'vnffgd_file',
'--description', description,
]
position_names = ['name', 'description']
position_values = [name, description]
extra_body = {
'template': {attr_key: attr_val},
<|code_end|>
, predict the immediate next line with the help of imports:
import sys
from tackerclient.tacker.v1_0.nfvo import vnffgd
from tackerclient.tests.unit import test_cli10
from unittest.mock import mock_open
from unittest.mock import patch
and context (classes, functions, sometimes code) from other files:
# Path: tackerclient/tests/unit/test_cli10.py
# API_VERSION = "1.0"
# FORMAT = 'json'
# TOKEN = 'testtoken'
# ENDURL = 'localurl'
# def capture_std_streams():
# def __init__(self):
# def write(self, text):
# def make_string(self):
# def __init__(self, status_code, headers=None, reason=None):
# def __init__(self, _stdout):
# def end_url(path, query=None, format=FORMAT):
# def __init__(self, lhs, client):
# def equals(self, rhs):
# def __str__(self):
# def __repr__(self):
# def __eq__(self, rhs):
# def __ne__(self, rhs):
# def __init__(self, lhs, client):
# def _com_dict(self, lhs, rhs):
# def _com_list(self, lhs, rhs):
# def _com(self, lhs, rhs):
# def equals(self, rhs):
# def __repr__(self):
# def __eq__(self, rhs):
# def __ne__(self, rhs):
# def _find_resourceid(self, client, resource, name_or_id):
# def setUp(self, plurals={}):
# def _test_create_resource(self, resource, cmd,
# name, myid, args,
# position_names, position_values, mock_get,
# tenant_id=None, get_client_called_count=1,
# tags=None, admin_state_up=True, extra_body=None,
# **kwargs):
# def _test_list_columns(self, cmd, resources_collection,
# resources_out, mock_get,
# args=['-f', 'json']):
# def _test_list_resources(self, resources, cmd, detail=False, tags=[],
# fields_1=[], fields_2=[], page_size=None,
# sort_key=[], sort_dir=[], response_contents=None,
# base_args=None, path=None,
# template_source=None):
# def _test_list_sub_resources(self, resources, api_resource, cmd, myid,
# mock_get, detail=False,
# tags=[], fields_1=[], fields_2=[],
# page_size=None, sort_key=[], sort_dir=[],
# response_contents=None, base_args=None,
# path=None):
# def _test_update_resource(self, resource, cmd, myid, args, extrafields,
# mock_get, get_client_called_count=1):
# def _test_show_resource(self, resource, cmd, myid, args, fields=[]):
# def _test_delete_resource(self, resource, cmd, myid, args, mock_get):
# def _test_update_resource_action(self, resource, cmd, myid, action, args,
# body, mock_get, retval=None):
# def test_do_request_unicode(self):
# def test_do_request_error_without_response_body(self):
# def _test_exception_handler_v10(
# self, expected_exception, status_code, expected_msg,
# error_type=None, error_msg=None, error_detail=None,
# error_content=None):
# def test_exception_handler_v10_ip_address_in_use(self):
# def test_exception_handler_v10_tacker_known_error(self):
# def test_exception_handler_v10_tacker_known_error_without_detail(self):
# def test_exception_handler_v10_unknown_error_to_per_code_exception(self):
# def test_exception_handler_v10_tacker_unknown_status_code(self):
# def test_exception_handler_v10_bad_tacker_error(self):
# def test_exception_handler_v10_error_dict_contains_message(self):
# def test_exception_handler_v10_error_dict_not_contain_message(self):
# def test_exception_handler_v10_default_fallback(self):
# def test_exception_handler_v10_tacker_etsi_error(self):
# class FakeStdout(object):
# class MyResp(object):
# class MyApp(object):
# class MyUrlComparator(object):
# class MyComparator(object):
# class CLITestV10Base(testtools.TestCase):
# class ClientV1TestJson(CLITestV10Base):
# class CLITestV10ExceptionHandler(CLITestV10Base):
. Output only the next line. | } |
Here is a snippet: <|code_start|> help=_("Instantiate VNF subsequently after it's creation. "
"Specify instantiate request parameters in a json file."))
return parser
def args2body(self, parsed_args, file_path=None):
body = {}
if file_path:
return jsonfile2body(file_path)
body['vnfdId'] = parsed_args.vnfd_id
if parsed_args.description:
body['vnfInstanceDescription'] = parsed_args.description
if parsed_args.name:
body['vnfInstanceName'] = parsed_args.name
return body
def take_action(self, parsed_args):
client = self.app.client_manager.tackerclient
vnf = client.create_vnf_instance(self.args2body(parsed_args))
if parsed_args.I:
# Instantiate VNF instance.
result = client.instantiate_vnf_instance(
vnf['id'],
self.args2body(parsed_args, file_path=parsed_args.I))
if not result:
print((_('VNF Instance %(id)s is created and instantiation'
<|code_end|>
. Write the next line using the current file imports:
import json
import logging
import os
import time
from osc_lib.command import command
from osc_lib import utils
from tackerclient.common import exceptions
from tackerclient.i18n import _
from tackerclient.osc import sdk_utils
from tackerclient.osc import utils as tacker_osc_utils
and context from other files:
# Path: tackerclient/common/exceptions.py
# class TackerException(Exception):
# class TackerClientException(TackerException):
# class BadRequest(TackerClientException):
# class Unauthorized(TackerClientException):
# class Forbidden(TackerClientException):
# class NotFound(TackerClientException):
# class Conflict(TackerClientException):
# class InternalServerError(TackerClientException):
# class ServiceUnavailable(TackerClientException):
# class NetworkNotFoundClient(NotFound):
# class PortNotFoundClient(NotFound):
# class StateInvalidClient(BadRequest):
# class NetworkInUseClient(Conflict):
# class PortInUseClient(Conflict):
# class IpAddressInUseClient(Conflict):
# class InvalidIpForNetworkClient(BadRequest):
# class OverQuotaClient(Conflict):
# class IpAddressGenerationFailureClient(Conflict):
# class MacAddressInUseClient(Conflict):
# class ExternalIpAddressExhaustedClient(BadRequest):
# class NoAuthURLProvided(Unauthorized):
# class EndpointNotFound(TackerClientException):
# class EndpointTypeNotFound(TackerClientException):
# class AmbiguousEndpoints(TackerClientException):
# class RequestURITooLong(TackerClientException):
# class ConnectionFailed(TackerClientException):
# class SslCertificateValidationError(TackerClientException):
# class MalformedResponseBody(TackerClientException):
# class InvalidContentType(TackerClientException):
# class InvalidInput(TackerClientException):
# class UnsupportedCommandVersion(TackerClientException):
# class TackerCLIError(TackerException):
# class CommandError(TackerCLIError):
# class UnsupportedVersion(TackerCLIError):
# class TackerClientNoUniqueMatch(TackerCLIError):
# def __init__(self, message=None, **kwargs):
# def __str__(self):
# def __init__(self, message=None, **kwargs):
# def __init__(self, **kwargs):
# HTTP_EXCEPTION_MAP = {
# 400: BadRequest,
# 401: Unauthorized,
# 403: Forbidden,
# 404: NotFound,
# 409: Conflict,
# 500: InternalServerError,
# 503: ServiceUnavailable,
# }
, which may include functions, classes, or code. Output only the next line. | ' request has been accepted.') % {'id': vnf['id']})) |
Predict the next line for this snippet: <|code_start|># a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
SUPPORTED_VERSIONS = [1, 2]
class VnfLcmVersions(command.ShowOne):
_description = _("Show VnfLcm Api versions")
def get_parser(self, prog_name):
parser = super(VnfLcmVersions, self).get_parser(prog_name)
parser.add_argument(
'--major-version',
metavar="<major-version>",
type=int,
help=_('Show only specify major version.'))
return parser
def take_action(self, parsed_args):
v = None
<|code_end|>
with the help of current file imports:
from osc_lib.command import command
from tackerclient.common import exceptions
from tackerclient.i18n import _
and context from other files:
# Path: tackerclient/common/exceptions.py
# class TackerException(Exception):
# class TackerClientException(TackerException):
# class BadRequest(TackerClientException):
# class Unauthorized(TackerClientException):
# class Forbidden(TackerClientException):
# class NotFound(TackerClientException):
# class Conflict(TackerClientException):
# class InternalServerError(TackerClientException):
# class ServiceUnavailable(TackerClientException):
# class NetworkNotFoundClient(NotFound):
# class PortNotFoundClient(NotFound):
# class StateInvalidClient(BadRequest):
# class NetworkInUseClient(Conflict):
# class PortInUseClient(Conflict):
# class IpAddressInUseClient(Conflict):
# class InvalidIpForNetworkClient(BadRequest):
# class OverQuotaClient(Conflict):
# class IpAddressGenerationFailureClient(Conflict):
# class MacAddressInUseClient(Conflict):
# class ExternalIpAddressExhaustedClient(BadRequest):
# class NoAuthURLProvided(Unauthorized):
# class EndpointNotFound(TackerClientException):
# class EndpointTypeNotFound(TackerClientException):
# class AmbiguousEndpoints(TackerClientException):
# class RequestURITooLong(TackerClientException):
# class ConnectionFailed(TackerClientException):
# class SslCertificateValidationError(TackerClientException):
# class MalformedResponseBody(TackerClientException):
# class InvalidContentType(TackerClientException):
# class InvalidInput(TackerClientException):
# class UnsupportedCommandVersion(TackerClientException):
# class TackerCLIError(TackerException):
# class CommandError(TackerCLIError):
# class UnsupportedVersion(TackerCLIError):
# class TackerClientNoUniqueMatch(TackerCLIError):
# def __init__(self, message=None, **kwargs):
# def __str__(self):
# def __init__(self, message=None, **kwargs):
# def __init__(self, **kwargs):
# HTTP_EXCEPTION_MAP = {
# 400: BadRequest,
# 401: Unauthorized,
# 403: Forbidden,
# 404: NotFound,
# 409: Conflict,
# 500: InternalServerError,
# 503: ServiceUnavailable,
# }
, which may contain function names, class names, or code. Output only the next line. | if parsed_args.major_version: |
Given the code snippet: <|code_start|>
def test_convert_pil_image(image):
converted_image = convert_pil_image(image, 4, 4)
assert converted_image.mode == '1'
<|code_end|>
, generate the next line using the imports in this file:
import pytest
from simple_zpl2.utils import convert_pil_image
and context (functions, classes, or occasionally code) from other files:
# Path: simple_zpl2/utils.py
# def convert_pil_image(image, width, height, compression_type='A'):
# """
# Converts PIL.Image to compression type
#
# :param image: PIL image
# :param width: border to 99999
# :param height: border to 99999
# :param compression_type: * 'A' - ASCII hexadecimal
# * 'B' - binary
# * 'C' - compressed binary
# """
# if compression_type != 'A':
# raise NotImplementedError('Compreesion Type {} not implemented'.format(compression_type))
# image = image.resize((int(width), int(height)))
# image = image.convert("RGBA")
# alpha = image.getchannel("A")
#
# image_without_alpha = Image.new("RGBA", image.size, (255, 255, 255) + (255,))
# image_without_alpha.paste(image, mask=alpha)
# image_without_alpha = image_without_alpha.convert('L')
# image_without_alpha = ImageOps.invert(image_without_alpha)
# image_without_alpha = image_without_alpha.point(lambda x: 255 if x > 5 else 0, '1')
#
# return image_without_alpha
. Output only the next line. | @pytest.mark.parametrize('compression_type', ( |
Using the snippet: <|code_start|> name = "kiwix-desktop"
force_build = True
class Source(GitClone):
git_remote = "https://github.com/kiwix/kiwix-desktop.git"
git_dir = "kiwix-desktop"
class Builder(QMakeBuilder):
dependencies = ["qt", "qtwebengine", "libkiwix", "aria2"]
make_install_target = 'install'
configure_env = None
flatpack_build_options = {
"env": [
"QMAKEPATH=/app/lib"
]
}
@property
def configure_option(self):
if self.buildEnv.platformInfo.name == 'flatpak':
options = [
'QMAKE_INCDIR+=/app/include/QtWebEngine',
'QMAKE_INCDIR+=/app/include/QtWebEngineCore',
'QMAKE_INCDIR+=/app/include/QtWebEngineWidgets'
]
else:
options = ["PREFIX={}".format(self.buildEnv.install_dir)]
if self.buildEnv.platformInfo.static:
options.append('"CONFIG+=static"')
<|code_end|>
, determine the next line of code. You have imports:
from .base import (
Dependency,
GitClone,
QMakeBuilder)
and context (class names, function names, or code) available:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class GitClone(Source):
# base_git_ref = "master"
# force_full_clone = False
#
# @property
# def release_git_ref(self):
# return main_project_versions.get(self.name, "master")
#
# @property
# def source_dir(self):
# if option('make_release'):
# return "{}_release".format(self.git_dir)
# else:
# return self.git_dir
#
# @property
# def git_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# @property
# def git_ref(self):
# if option('make_release'):
# return self.release_git_ref
# else:
# return self.base_git_ref
#
# def _git_init(self, context):
# if option('fast_clone') and self.force_full_clone == False:
# command = "git clone --depth=1 --branch {} {} {}".format(
# self.git_ref, self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# else:
# command = "git clone {} {}".format(self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# command = "git checkout {}".format(self.git_ref)
# run_command(command, self.git_path, context)
#
# def _git_update(self, context):
# command = "git fetch origin {}".format(self.git_ref)
# run_command(command, self.git_path, context)
# try:
# command = "git merge --ff-only origin/{}".format(self.git_ref)
# run_command(command, self.git_path, context)
# except subprocess.CalledProcessError:
# raise WarningMessage("Cannot update, please check log for information")
#
# def prepare(self):
# if not os.path.exists(self.git_path):
# self.command('gitinit', self._git_init)
# else:
# self.command('gitupdate', self._git_update)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class QMakeBuilder(MakeBuilder):
# qmake_target = ""
# flatpak_buildsystem = 'qmake'
#
# @property
# def env_option(self):
# options = ""
# if 'QMAKE_CC' in os.environ:
# options += 'QMAKE_CC={} '.format(os.environ['QMAKE_CC'])
# if 'QMAKE_CXX' in os.environ:
# options += 'QMAKE_CXX={} '.format(os.environ['QMAKE_CXX'])
# return options
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# cross_option = ""
# command = ("{command} {configure_option}"
# " {env_option}"
# " {source_path}"
# " {cross_option}")
# command = command.format(
# command = neutralEnv('qmake_command'),
# configure_option=self.configure_option,
# env_option=self.env_option,
# source_path=self.source_path,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=False, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# command = "git archive -o {build_dir}/{name}.tar.gz --prefix={name}/ HEAD"
# command = command.format(
# build_dir = self.build_path,
# name = self.target.full_name()
# )
# run_command(command, self.source_path, context)
. Output only the next line. | return " ".join(options) |
Next line prediction: <|code_start|>
class KiwixDesktop(Dependency):
name = "kiwix-desktop"
force_build = True
class Source(GitClone):
git_remote = "https://github.com/kiwix/kiwix-desktop.git"
git_dir = "kiwix-desktop"
class Builder(QMakeBuilder):
dependencies = ["qt", "qtwebengine", "libkiwix", "aria2"]
<|code_end|>
. Use current file imports:
(from .base import (
Dependency,
GitClone,
QMakeBuilder))
and context including class names, function names, or small code snippets from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class GitClone(Source):
# base_git_ref = "master"
# force_full_clone = False
#
# @property
# def release_git_ref(self):
# return main_project_versions.get(self.name, "master")
#
# @property
# def source_dir(self):
# if option('make_release'):
# return "{}_release".format(self.git_dir)
# else:
# return self.git_dir
#
# @property
# def git_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# @property
# def git_ref(self):
# if option('make_release'):
# return self.release_git_ref
# else:
# return self.base_git_ref
#
# def _git_init(self, context):
# if option('fast_clone') and self.force_full_clone == False:
# command = "git clone --depth=1 --branch {} {} {}".format(
# self.git_ref, self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# else:
# command = "git clone {} {}".format(self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# command = "git checkout {}".format(self.git_ref)
# run_command(command, self.git_path, context)
#
# def _git_update(self, context):
# command = "git fetch origin {}".format(self.git_ref)
# run_command(command, self.git_path, context)
# try:
# command = "git merge --ff-only origin/{}".format(self.git_ref)
# run_command(command, self.git_path, context)
# except subprocess.CalledProcessError:
# raise WarningMessage("Cannot update, please check log for information")
#
# def prepare(self):
# if not os.path.exists(self.git_path):
# self.command('gitinit', self._git_init)
# else:
# self.command('gitupdate', self._git_update)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class QMakeBuilder(MakeBuilder):
# qmake_target = ""
# flatpak_buildsystem = 'qmake'
#
# @property
# def env_option(self):
# options = ""
# if 'QMAKE_CC' in os.environ:
# options += 'QMAKE_CC={} '.format(os.environ['QMAKE_CC'])
# if 'QMAKE_CXX' in os.environ:
# options += 'QMAKE_CXX={} '.format(os.environ['QMAKE_CXX'])
# return options
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# cross_option = ""
# command = ("{command} {configure_option}"
# " {env_option}"
# " {source_path}"
# " {cross_option}")
# command = command.format(
# command = neutralEnv('qmake_command'),
# configure_option=self.configure_option,
# env_option=self.env_option,
# source_path=self.source_path,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=False, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# command = "git archive -o {build_dir}/{name}.tar.gz --prefix={name}/ HEAD"
# command = command.format(
# build_dir = self.build_path,
# name = self.target.full_name()
# )
# run_command(command, self.source_path, context)
. Output only the next line. | make_install_target = 'install' |
Given the code snippet: <|code_start|>
class KiwixDesktop(Dependency):
name = "kiwix-desktop"
force_build = True
class Source(GitClone):
git_remote = "https://github.com/kiwix/kiwix-desktop.git"
git_dir = "kiwix-desktop"
class Builder(QMakeBuilder):
dependencies = ["qt", "qtwebengine", "libkiwix", "aria2"]
make_install_target = 'install'
configure_env = None
flatpack_build_options = {
"env": [
"QMAKEPATH=/app/lib"
<|code_end|>
, generate the next line using the imports in this file:
from .base import (
Dependency,
GitClone,
QMakeBuilder)
and context (functions, classes, or occasionally code) from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class GitClone(Source):
# base_git_ref = "master"
# force_full_clone = False
#
# @property
# def release_git_ref(self):
# return main_project_versions.get(self.name, "master")
#
# @property
# def source_dir(self):
# if option('make_release'):
# return "{}_release".format(self.git_dir)
# else:
# return self.git_dir
#
# @property
# def git_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# @property
# def git_ref(self):
# if option('make_release'):
# return self.release_git_ref
# else:
# return self.base_git_ref
#
# def _git_init(self, context):
# if option('fast_clone') and self.force_full_clone == False:
# command = "git clone --depth=1 --branch {} {} {}".format(
# self.git_ref, self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# else:
# command = "git clone {} {}".format(self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# command = "git checkout {}".format(self.git_ref)
# run_command(command, self.git_path, context)
#
# def _git_update(self, context):
# command = "git fetch origin {}".format(self.git_ref)
# run_command(command, self.git_path, context)
# try:
# command = "git merge --ff-only origin/{}".format(self.git_ref)
# run_command(command, self.git_path, context)
# except subprocess.CalledProcessError:
# raise WarningMessage("Cannot update, please check log for information")
#
# def prepare(self):
# if not os.path.exists(self.git_path):
# self.command('gitinit', self._git_init)
# else:
# self.command('gitupdate', self._git_update)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class QMakeBuilder(MakeBuilder):
# qmake_target = ""
# flatpak_buildsystem = 'qmake'
#
# @property
# def env_option(self):
# options = ""
# if 'QMAKE_CC' in os.environ:
# options += 'QMAKE_CC={} '.format(os.environ['QMAKE_CC'])
# if 'QMAKE_CXX' in os.environ:
# options += 'QMAKE_CXX={} '.format(os.environ['QMAKE_CXX'])
# return options
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# cross_option = ""
# command = ("{command} {configure_option}"
# " {env_option}"
# " {source_path}"
# " {cross_option}")
# command = command.format(
# command = neutralEnv('qmake_command'),
# configure_option=self.configure_option,
# env_option=self.env_option,
# source_path=self.source_path,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=False, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# command = "git archive -o {build_dir}/{name}.tar.gz --prefix={name}/ HEAD"
# command = command.format(
# build_dir = self.build_path,
# name = self.target.full_name()
# )
# run_command(command, self.source_path, context)
. Output only the next line. | ] |
Continue the code snippet: <|code_start|>
class NativePlatformInfo(PlatformInfo):
build = 'native'
def get_env(self):
env = super().get_env()
if neutralEnv('distname') == 'fedora':
<|code_end|>
. Use current file imports:
from .base import PlatformInfo
from kiwixbuild.utils import pj
from kiwixbuild._global import option, neutralEnv
and context (classes, functions, or code) from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def option(what):
# return getattr(_options, what)
#
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
. Output only the next line. | env['QT_SELECT'] = "5-64" |
Here is a snippet: <|code_start|>
class NativePlatformInfo(PlatformInfo):
build = 'native'
def get_env(self):
<|code_end|>
. Write the next line using the current file imports:
from .base import PlatformInfo
from kiwixbuild.utils import pj
from kiwixbuild._global import option, neutralEnv
and context from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def option(what):
# return getattr(_options, what)
#
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
, which may include functions, classes, or code. Output only the next line. | env = super().get_env() |
Based on the snippet: <|code_start|>
class FlatpakPlatformInfo(PlatformInfo):
name = 'flatpak'
build = 'flatpak'
static = ''
<|code_end|>
, predict the immediate next line with the help of imports:
from .base import PlatformInfo
from kiwixbuild._global import option, neutralEnv
and context (classes, functions, sometimes code) from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/_global.py
# def option(what):
# return getattr(_options, what)
#
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
. Output only the next line. | toolchain_names = ['org.kde', 'io.qt.qtwebengine'] |
Based on the snippet: <|code_start|>
class AndroidPlatformInfo(PlatformInfo):
build = 'android'
static = True
toolchain_names = ['android-ndk']
compatible_hosts = ['fedora', 'debian']
<|code_end|>
, predict the immediate next line with the help of imports:
from .base import PlatformInfo, MetaPlatformInfo
from kiwixbuild.utils import pj
from kiwixbuild._global import get_target_step, option
and context (classes, functions, sometimes code) from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# class MetaPlatformInfo(PlatformInfo):
# subPlatformNames = []
#
# def add_targets(self, targetName, targets):
# targetDefs = []
# for platformName in self.subPlatformNames:
# platform = self.get_platform(platformName, targets)
# targetDefs += platform.add_targets(targetName, targets)
# return targetDefs
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
#
# def option(what):
# return getattr(_options, what)
. Output only the next line. | def __str__(self): |
Continue the code snippet: <|code_start|>
class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
def __new__(cls, name, sha256, url=None):
if url is None:
url = REMOTE_PREFIX + name
return super().__new__(cls, name, sha256, url)
class Context:
def __init__(self, command_name, log_file, force_native_build):
self.command_name = command_name
self.log_file = log_file
self.force_native_build = force_native_build
self.autoskip_file = None
self.no_skip = False
def try_skip(self, path, extra_name=""):
if self.no_skip:
return
if extra_name:
extra_name = "_{}".format(extra_name)
self.autoskip_file = pj(path, ".{}{}_ok".format(self.command_name, extra_name))
if os.path.exists(self.autoskip_file):
raise SkipCommand()
def _finalise(self):
if self.autoskip_file is not None:
os.makedirs(os.path.dirname(self.autoskip_file), exist_ok=True)
with open(self.autoskip_file, 'w'):
<|code_end|>
. Use current file imports:
import os.path
import hashlib
import tarfile, zipfile
import tempfile
import shutil
import os, stat, sys
import urllib.request
import urllib.error
import ssl
import subprocess
from collections import namedtuple, defaultdict
from kiwixbuild._global import neutralEnv, option
and context (classes, functions, or code) from other files:
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
. Output only the next line. | pass |
Here is a snippet: <|code_start|> file_path = str(HOME / base_name)
batch_size = 1024 * 1024 * 8
with urlopen(url) as resource, open(file_path, "wb") as file:
while True:
batch = resource.read(batch_size)
if not batch:
break
print(".", end="", flush=True)
file.write(batch)
return file_path
ARCHIVE_NAME_TEMPLATE = "base_deps2_{os}_{platform}_{version}.tar.xz"
if PLATFORM_TARGET == 'flatpak':
base_dep_archive_name = "base_deps2_{}_flatpak.tar.xz".format(OS_NAME)
else:
base_dep_archive_name = ARCHIVE_NAME_TEMPLATE.format(
os=OS_NAME,
platform=PLATFORM_TARGET,
version=base_deps_meta_version,
)
print_message("Getting archive {}", base_dep_archive_name)
try:
local_filename = download_base_archive(base_dep_archive_name)
with tarfile.open(local_filename) as f:
f.extractall(str(HOME))
os.remove(str(local_filename))
except URLError:
print_message("Cannot get archive. Build dependencies")
<|code_end|>
. Write the next line using the current file imports:
import os
import tarfile
from urllib.request import urlopen
from urllib.error import URLError
from kiwixbuild.versions import base_deps_meta_version
from common import (
print_message,
run_kiwix_build,
upload,
make_deps_archive,
HOME,
PLATFORM_TARGET,
OS_NAME,
)
and context from other files:
# Path: kiwixbuild/versions.py
, which may include functions, classes, or code. Output only the next line. | if PLATFORM_TARGET == "android": |
Predict the next line after this snippet: <|code_start|>
class armhf_toolchain(Dependency):
dont_skip = True
neutral = True
name = 'armhf'
class Source(ReleaseDownload):
archive = Remotefile('raspberrypi-tools.tar.gz',
<|code_end|>
using the current file's imports:
from .base import Dependency, ReleaseDownload, NoopBuilder
from kiwixbuild.utils import Remotefile
and any relevant context from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class NoopBuilder(Builder):
# def build(self):
# pass
#
# def make_dist(self):
# pass
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | 'e72b35436f2f23f2f7df322d6c318b9be57b21596b5ff0b8936af4ad94e04f2e') |
Given snippet: <|code_start|>
class zstd(Dependency):
name = 'zstd'
class Source(ReleaseDownload):
archive = Remotefile('zstd-1.5.1.tar.gz',
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from .base import (
Dependency,
ReleaseDownload,
MesonBuilder)
from kiwixbuild.utils import Remotefile
and context:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MesonBuilder(Builder):
# configure_option = ""
# test_option = ""
# flatpak_buildsystem = 'meson'
#
# @property
# def build_type(self):
# return 'release' if option('make_release') else 'debug'
#
# @property
# def strip_option(self):
# return '--strip' if option('make_release') else ''
#
# @property
# def library_type(self):
# return 'static' if self.buildEnv.platformInfo.static else 'shared'
#
# def _configure(self, context):
# context.no_skip = False
# context.try_skip(self.build_path)
# if os.path.exists(self.build_path):
# shutil.rmtree(self.build_path)
# os.makedirs(self.build_path)
# configure_option = self.configure_option.format(buildEnv=self.buildEnv)
# cross_option = ""
# if not self.target.force_native_build and self.buildEnv.meson_crossfile:
# cross_option = "--cross-file {}".format(
# self.buildEnv.meson_crossfile)
# command = ("{command} . {build_path}"
# " --buildtype={build_type} {strip_option}"
# " --default-library={library_type}"
# " {configure_option}"
# " --prefix={buildEnv.install_dir}"
# " --libdir={buildEnv.libprefix}"
# " {cross_option}")
# command = command.format(
# command=neutralEnv('meson_command'),
# build_type=self.build_type,
# strip_option=self.strip_option,
# library_type=self.library_type,
# configure_option=configure_option,
# build_path=self.build_path,
# buildEnv=self.buildEnv,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.source_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "{} -v".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _test(self, context):
# context.try_skip(self.build_path)
# if ( self.buildEnv.platformInfo.build == 'android'
# or (self.buildEnv.platformInfo.build != 'native'
# and not self.buildEnv.platformInfo.static)
# ):
# raise SkipCommand()
# command = "{} --verbose {}".format(neutralEnv('mesontest_command'), self.test_option)
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "{} -v install".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# command = "{} -v dist".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
which might include code, classes, or functions. Output only the next line. | 'e28b2f2ed5710ea0d3a1ecac3f6a947a016b972b9dd30242369010e5f53d7002', |
Given the code snippet: <|code_start|>
class ApplePlatformInfo(PlatformInfo):
build = 'iOS'
static = True
compatible_hosts = ['Darwin']
arch = None
<|code_end|>
, generate the next line using the imports in this file:
import subprocess
from kiwixbuild._global import option
from kiwixbuild.utils import pj, xrun_find
from .base import PlatformInfo, MetaPlatformInfo
and context (functions, classes, or occasionally code) from other files:
# Path: kiwixbuild/_global.py
# def option(what):
# return getattr(_options, what)
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# class MetaPlatformInfo(PlatformInfo):
# subPlatformNames = []
#
# def add_targets(self, targetName, targets):
# targetDefs = []
# for platformName in self.subPlatformNames:
# platform = self.get_platform(platformName, targets)
# targetDefs += platform.add_targets(targetName, targets)
# return targetDefs
. Output only the next line. | host = None |
Using the snippet: <|code_start|>def fix_macos_rpath(project):
base_dir, export_files = EXPORT_FILES[project]
for file in filter(lambda f: f.endswith(".dylib"), export_files):
lib = base_dir / file
if lib.is_symlink():
continue
command = ["install_name_tool", "-id", lib.name, str(lib)]
print_message("call {}", " ".join(command))
subprocess.check_call(command, env=os.environ)
def trigger_workflow(repo, workflow="docker.yml", ref="master", inputs=None):
"""triggers a `workflow_dispatch` event to the specified workflow on its repo
repo: {user}/{repo} format
workflow: workflow ID or workflow file name
ref: branch or tag name
inputs: dict of inputs to pass to the workflow"""
print_message(
"triggering workflow `{workflow}` on {repo}@{ref} "
"with inputs={inputs}", workflow=workflow, repo=repo, ref=ref, inputs=inputs)
url = "{base_url}/repos/{repo}/actions/workflows/{workflow}/dispatches".format(
base_url=os.getenv("GITHUB_API_URL", "https://api.github.com"),
repo=repo, workflow=workflow)
resp = requests.post(url, headers={
"Content-Type": "application/json",
"Authorization": "token {token}".format(
<|code_end|>
, determine the next line of code. You have imports:
import os
import tarfile
import zipfile
import subprocess
import re
import shutil
import requests
from os import environ as _environ
from pathlib import Path
from datetime import date
from kiwixbuild.versions import (
main_project_versions,
release_versions,
base_deps_versions,
)
and context (class names, function names, or code) available:
# Path: kiwixbuild/versions.py
. Output only the next line. | token=os.getenv('GITHUB_PAT', '')), |
Continue the code snippet: <|code_start|>
PLATFORM_TARGET = _environ["PLATFORM_TARGET"]
if PLATFORM_TARGET == "native_desktop":
PLATFORM_TARGET = "native_dyn"
DESKTOP = True
else:
<|code_end|>
. Use current file imports:
import os
import tarfile
import zipfile
import subprocess
import re
import shutil
import requests
from os import environ as _environ
from pathlib import Path
from datetime import date
from kiwixbuild.versions import (
main_project_versions,
release_versions,
base_deps_versions,
)
and context (classes, functions, or code) from other files:
# Path: kiwixbuild/versions.py
. Output only the next line. | DESKTOP = False |
Predict the next line for this snippet: <|code_start|> return self._detect_binary('qmake-qt5', 'qmake')
class BuildEnv:
def __init__(self, platformInfo):
build_dir = "BUILD_{}".format(platformInfo.name)
self.platformInfo = platformInfo
self.build_dir = pj(option('working_dir'), build_dir)
self.install_dir = pj(self.build_dir, "INSTALL")
self.toolchain_dir = pj(self.build_dir, "TOOLCHAINS")
self.log_dir = pj(self.build_dir, 'LOGS')
for d in (self.build_dir,
self.install_dir,
self.toolchain_dir,
self.log_dir):
os.makedirs(d, exist_ok=True)
self.libprefix = option('libprefix') or self._detect_libdir()
def clean_intermediate_directories(self):
for subdir in os.listdir(self.build_dir):
subpath = pj(self.build_dir, subdir)
if subpath == self.install_dir:
continue
if os.path.isdir(subpath):
shutil.rmtree(subpath)
else:
os.remove(subpath)
def _is_debianlike(self):
<|code_end|>
with the help of current file imports:
import os, sys, shutil
import subprocess
import platform
import distro
from .utils import pj, download_remote, Defaultdict
from ._global import neutralEnv, option
and context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
, which may contain function names, class names, or code. Output only the next line. | return os.path.isfile('/etc/debian_version') |
Predict the next line for this snippet: <|code_start|> self.archive_dir,
self.toolchain_dir,
self.log_dir):
os.makedirs(d, exist_ok=True)
self.detect_platform()
self.ninja_command = self._detect_ninja()
if not self.ninja_command:
sys.exit("ERROR: ninja command not found.")
self.meson_command = self._detect_meson()
if not self.meson_command:
sys.exit("ERROR: meson command not found.")
self.qmake_command = self._detect_qmake()
if not self.qmake_command:
print("WARNING: qmake command not found.", file=sys.stderr)
self.mesontest_command = "{} test".format(self.meson_command)
def detect_platform(self):
_platform = platform.system()
self.distname = _platform
if _platform == 'Windows':
print('ERROR: kiwix-build is not intented to run on Windows platform.\n'
'It should probably not work, but well, you still can have a try.')
cont = input('Do you want to continue ? [y/N]')
if cont.lower() != 'y':
sys.exit(0)
if _platform == 'Linux':
self.distname = distro.id()
if self.distname == 'ubuntu':
self.distname = 'debian'
<|code_end|>
with the help of current file imports:
import os, sys, shutil
import subprocess
import platform
import distro
from .utils import pj, download_remote, Defaultdict
from ._global import neutralEnv, option
and context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
, which may contain function names, class names, or code. Output only the next line. | def download(self, what, where=None): |
Here is a snippet: <|code_start|> self.toolchain_dir,
self.log_dir):
os.makedirs(d, exist_ok=True)
self.detect_platform()
self.ninja_command = self._detect_ninja()
if not self.ninja_command:
sys.exit("ERROR: ninja command not found.")
self.meson_command = self._detect_meson()
if not self.meson_command:
sys.exit("ERROR: meson command not found.")
self.qmake_command = self._detect_qmake()
if not self.qmake_command:
print("WARNING: qmake command not found.", file=sys.stderr)
self.mesontest_command = "{} test".format(self.meson_command)
def detect_platform(self):
_platform = platform.system()
self.distname = _platform
if _platform == 'Windows':
print('ERROR: kiwix-build is not intented to run on Windows platform.\n'
'It should probably not work, but well, you still can have a try.')
cont = input('Do you want to continue ? [y/N]')
if cont.lower() != 'y':
sys.exit(0)
if _platform == 'Linux':
self.distname = distro.id()
if self.distname == 'ubuntu':
self.distname = 'debian'
def download(self, what, where=None):
<|code_end|>
. Write the next line using the current file imports:
import os, sys, shutil
import subprocess
import platform
import distro
from .utils import pj, download_remote, Defaultdict
from ._global import neutralEnv, option
and context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
, which may include functions, classes, or code. Output only the next line. | where = where or self.archive_dir |
Predict the next line for this snippet: <|code_start|>
class AllBaseDependencies(Dependency):
name = "alldependencies"
Source = NoopSource
class Builder(NoopBuilder):
@classmethod
def get_dependencies(cls, platformInfo, allDeps):
base_deps = ['zlib', 'lzma', 'zstd', 'xapian-core', 'pugixml', 'libcurl', 'icu4c', 'mustache', 'libmicrohttpd', 'zim-testing-suite']
# Add specific dependencies depending of the platform
if platformInfo.build not in ('android', 'iOS'):
# For zimtools
<|code_end|>
with the help of current file imports:
from .base import (
Dependency,
NoopSource,
NoopBuilder)
from kiwixbuild._global import neutralEnv
and context from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class NoopSource(Source):
# def prepare(self):
# pass
#
# class NoopBuilder(Builder):
# def build(self):
# pass
#
# def make_dist(self):
# pass
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
, which may contain function names, class names, or code. Output only the next line. | base_deps += ['docoptcpp'] |
Predict the next line after this snippet: <|code_start|>
class AllBaseDependencies(Dependency):
name = "alldependencies"
Source = NoopSource
class Builder(NoopBuilder):
@classmethod
<|code_end|>
using the current file's imports:
from .base import (
Dependency,
NoopSource,
NoopBuilder)
from kiwixbuild._global import neutralEnv
and any relevant context from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class NoopSource(Source):
# def prepare(self):
# pass
#
# class NoopBuilder(Builder):
# def build(self):
# pass
#
# def make_dist(self):
# pass
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
. Output only the next line. | def get_dependencies(cls, platformInfo, allDeps): |
Given snippet: <|code_start|>
class AllBaseDependencies(Dependency):
name = "alldependencies"
Source = NoopSource
class Builder(NoopBuilder):
@classmethod
def get_dependencies(cls, platformInfo, allDeps):
base_deps = ['zlib', 'lzma', 'zstd', 'xapian-core', 'pugixml', 'libcurl', 'icu4c', 'mustache', 'libmicrohttpd', 'zim-testing-suite']
# Add specific dependencies depending of the platform
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from .base import (
Dependency,
NoopSource,
NoopBuilder)
from kiwixbuild._global import neutralEnv
and context:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class NoopSource(Source):
# def prepare(self):
# pass
#
# class NoopBuilder(Builder):
# def build(self):
# pass
#
# def make_dist(self):
# pass
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
which might include code, classes, or functions. Output only the next line. | if platformInfo.build not in ('android', 'iOS'): |
Based on the snippet: <|code_start|>
class AllBaseDependencies(Dependency):
name = "alldependencies"
Source = NoopSource
class Builder(NoopBuilder):
<|code_end|>
, predict the immediate next line with the help of imports:
from .base import (
Dependency,
NoopSource,
NoopBuilder)
from kiwixbuild._global import neutralEnv
and context (classes, functions, sometimes code) from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class NoopSource(Source):
# def prepare(self):
# pass
#
# class NoopBuilder(Builder):
# def build(self):
# pass
#
# def make_dist(self):
# pass
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
. Output only the next line. | @classmethod |
Predict the next line for this snippet: <|code_start|>
class MicroHttpd(Dependency):
name = "libmicrohttpd"
class Source(ReleaseDownload):
archive = Remotefile('libmicrohttpd-0.9.72.tar.gz',
'0ae825f8e0d7f41201fd44a0df1cf454c1cb0bc50fe9d59c26552260264c2ff8',
<|code_end|>
with the help of current file imports:
from .base import (
Dependency,
ReleaseDownload,
MakeBuilder)
from kiwixbuild.utils import Remotefile
and context from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MakeBuilder(Builder):
# configure_option_template = "{dep_options} {static_option} {env_option} --prefix {install_dir} --libdir {libdir}"
# configure_option = ""
# dynamic_configure_option = "--enable-shared --disable-static"
# static_configure_option = "--enable-static --disable-shared"
# make_option = ""
# install_option = ""
# configure_script = "configure"
# configure_env = {
# '_format_CFLAGS' : '{env[CFLAGS]} -O3',
# '_format_CXXFLAGS': '{env[CXXFLAGS]} -O3'
# }
# make_target = ""
# flatpak_buildsystem = None
#
# @property
# def make_install_target(self):
# if self.buildEnv.platformInfo.build == 'iOS':
# return 'install'
# return 'install-strip'
#
# @property
# def all_configure_option(self):
# option = self.configure_option_template.format(
# dep_options=self.configure_option,
# static_option=self.static_configure_option if self.buildEnv.platformInfo.static else self.dynamic_configure_option,
# env_option=self.buildEnv.platformInfo.configure_option if not self.target.force_native_build else "",
# install_dir=self.buildEnv.install_dir,
# libdir=pj(self.buildEnv.install_dir, self.buildEnv.libprefix)
# )
# return option
#
# def set_configure_env(self, env):
# dep_conf_env = self.configure_env
# if not dep_conf_env:
# return
# for k, v in dep_conf_env.items():
# if k.startswith('_format_'):
# v = v.format(buildEnv=self.buildEnv, env=env)
# env[k[8:]] = v
# else:
# env[k] = v
#
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# command = "{configure_script} {configure_option}"
# command = command.format(
# configure_script=pj(self.source_path, self.configure_script),
# configure_option=self.all_configure_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "make -j4 {make_target} {make_option}".format(
# make_target=self.make_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "make {make_install_target} {make_option}".format(
# make_install_target=self.make_install_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# context.try_skip(self.build_path)
# command = "make dist"
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
, which may contain function names, class names, or code. Output only the next line. | 'https://ftp.gnu.org/gnu/libmicrohttpd/libmicrohttpd-0.9.72.tar.gz') |
Based on the snippet: <|code_start|>
class MicroHttpd(Dependency):
name = "libmicrohttpd"
class Source(ReleaseDownload):
archive = Remotefile('libmicrohttpd-0.9.72.tar.gz',
<|code_end|>
, predict the immediate next line with the help of imports:
from .base import (
Dependency,
ReleaseDownload,
MakeBuilder)
from kiwixbuild.utils import Remotefile
and context (classes, functions, sometimes code) from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MakeBuilder(Builder):
# configure_option_template = "{dep_options} {static_option} {env_option} --prefix {install_dir} --libdir {libdir}"
# configure_option = ""
# dynamic_configure_option = "--enable-shared --disable-static"
# static_configure_option = "--enable-static --disable-shared"
# make_option = ""
# install_option = ""
# configure_script = "configure"
# configure_env = {
# '_format_CFLAGS' : '{env[CFLAGS]} -O3',
# '_format_CXXFLAGS': '{env[CXXFLAGS]} -O3'
# }
# make_target = ""
# flatpak_buildsystem = None
#
# @property
# def make_install_target(self):
# if self.buildEnv.platformInfo.build == 'iOS':
# return 'install'
# return 'install-strip'
#
# @property
# def all_configure_option(self):
# option = self.configure_option_template.format(
# dep_options=self.configure_option,
# static_option=self.static_configure_option if self.buildEnv.platformInfo.static else self.dynamic_configure_option,
# env_option=self.buildEnv.platformInfo.configure_option if not self.target.force_native_build else "",
# install_dir=self.buildEnv.install_dir,
# libdir=pj(self.buildEnv.install_dir, self.buildEnv.libprefix)
# )
# return option
#
# def set_configure_env(self, env):
# dep_conf_env = self.configure_env
# if not dep_conf_env:
# return
# for k, v in dep_conf_env.items():
# if k.startswith('_format_'):
# v = v.format(buildEnv=self.buildEnv, env=env)
# env[k[8:]] = v
# else:
# env[k] = v
#
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# command = "{configure_script} {configure_option}"
# command = command.format(
# configure_script=pj(self.source_path, self.configure_script),
# configure_option=self.all_configure_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "make -j4 {make_target} {make_option}".format(
# make_target=self.make_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "make {make_install_target} {make_option}".format(
# make_install_target=self.make_install_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# context.try_skip(self.build_path)
# command = "make dist"
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | '0ae825f8e0d7f41201fd44a0df1cf454c1cb0bc50fe9d59c26552260264c2ff8', |
Given snippet: <|code_start|>
class MicroHttpd(Dependency):
name = "libmicrohttpd"
class Source(ReleaseDownload):
archive = Remotefile('libmicrohttpd-0.9.72.tar.gz',
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from .base import (
Dependency,
ReleaseDownload,
MakeBuilder)
from kiwixbuild.utils import Remotefile
and context:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MakeBuilder(Builder):
# configure_option_template = "{dep_options} {static_option} {env_option} --prefix {install_dir} --libdir {libdir}"
# configure_option = ""
# dynamic_configure_option = "--enable-shared --disable-static"
# static_configure_option = "--enable-static --disable-shared"
# make_option = ""
# install_option = ""
# configure_script = "configure"
# configure_env = {
# '_format_CFLAGS' : '{env[CFLAGS]} -O3',
# '_format_CXXFLAGS': '{env[CXXFLAGS]} -O3'
# }
# make_target = ""
# flatpak_buildsystem = None
#
# @property
# def make_install_target(self):
# if self.buildEnv.platformInfo.build == 'iOS':
# return 'install'
# return 'install-strip'
#
# @property
# def all_configure_option(self):
# option = self.configure_option_template.format(
# dep_options=self.configure_option,
# static_option=self.static_configure_option if self.buildEnv.platformInfo.static else self.dynamic_configure_option,
# env_option=self.buildEnv.platformInfo.configure_option if not self.target.force_native_build else "",
# install_dir=self.buildEnv.install_dir,
# libdir=pj(self.buildEnv.install_dir, self.buildEnv.libprefix)
# )
# return option
#
# def set_configure_env(self, env):
# dep_conf_env = self.configure_env
# if not dep_conf_env:
# return
# for k, v in dep_conf_env.items():
# if k.startswith('_format_'):
# v = v.format(buildEnv=self.buildEnv, env=env)
# env[k[8:]] = v
# else:
# env[k] = v
#
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# command = "{configure_script} {configure_option}"
# command = command.format(
# configure_script=pj(self.source_path, self.configure_script),
# configure_option=self.all_configure_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "make -j4 {make_target} {make_option}".format(
# make_target=self.make_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "make {make_install_target} {make_option}".format(
# make_install_target=self.make_install_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# context.try_skip(self.build_path)
# command = "make dist"
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
which might include code, classes, or functions. Output only the next line. | '0ae825f8e0d7f41201fd44a0df1cf454c1cb0bc50fe9d59c26552260264c2ff8', |
Based on the snippet: <|code_start|>
class docoptcpp(Dependency):
name = 'docoptcpp'
class Source(GitClone):
<|code_end|>
, predict the immediate next line with the help of imports:
from .base import (
Dependency,
GitClone,
CMakeBuilder)
from kiwixbuild.utils import Remotefile
and context (classes, functions, sometimes code) from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class GitClone(Source):
# base_git_ref = "master"
# force_full_clone = False
#
# @property
# def release_git_ref(self):
# return main_project_versions.get(self.name, "master")
#
# @property
# def source_dir(self):
# if option('make_release'):
# return "{}_release".format(self.git_dir)
# else:
# return self.git_dir
#
# @property
# def git_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# @property
# def git_ref(self):
# if option('make_release'):
# return self.release_git_ref
# else:
# return self.base_git_ref
#
# def _git_init(self, context):
# if option('fast_clone') and self.force_full_clone == False:
# command = "git clone --depth=1 --branch {} {} {}".format(
# self.git_ref, self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# else:
# command = "git clone {} {}".format(self.git_remote, self.source_dir)
# run_command(command, neutralEnv('source_dir'), context)
# command = "git checkout {}".format(self.git_ref)
# run_command(command, self.git_path, context)
#
# def _git_update(self, context):
# command = "git fetch origin {}".format(self.git_ref)
# run_command(command, self.git_path, context)
# try:
# command = "git merge --ff-only origin/{}".format(self.git_ref)
# run_command(command, self.git_path, context)
# except subprocess.CalledProcessError:
# raise WarningMessage("Cannot update, please check log for information")
#
# def prepare(self):
# if not os.path.exists(self.git_path):
# self.command('gitinit', self._git_init)
# else:
# self.command('gitupdate', self._git_update)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class CMakeBuilder(MakeBuilder):
# flatpak_buildsystem = 'cmake'
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# cross_option = ""
# if not self.target.force_native_build and self.buildEnv.cmake_crossfile:
# cross_option = "-DCMAKE_TOOLCHAIN_FILE={}".format(self.buildEnv.cmake_crossfile)
# command = ("cmake {configure_option}"
# " -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON"
# " -DCMAKE_INSTALL_PREFIX={install_dir}"
# " -DCMAKE_INSTALL_LIBDIR={libdir}"
# " {source_path}"
# " {cross_option}")
# command = command.format(
# configure_option=self.configure_option,
# install_dir=self.buildEnv.install_dir,
# libdir=self.buildEnv.libprefix,
# source_path=self.source_path,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=False, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def set_flatpak_buildsystem(self, module):
# super().set_flatpak_buildsystem( module)
# module['buildir'] = True
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | git_remote = "https://github.com/docopt/docopt.cpp.git" |
Continue the code snippet: <|code_start|>
class ZimTestingSuite(Dependency):
name = "zim-testing-suite"
dont_skip = True
class Source(ReleaseDownload):
archive = Remotefile('zim-testing-suite-0.3.tar.gz',
<|code_end|>
. Use current file imports:
from .base import (
Dependency,
ReleaseDownload,
NoopBuilder
)
from kiwixbuild.utils import Remotefile
and context (classes, functions, or code) from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class NoopBuilder(Builder):
# def build(self):
# pass
#
# def make_dist(self):
# pass
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | 'cd7d1ccc48af3783af9156cb6bf3c18d9a3319a73fdeefe65f0b4cae402d3d66', |
Predict the next line for this snippet: <|code_start|>
class I586PlatformInfo(PlatformInfo):
build = 'i586'
arch_full = 'i586-linux-gnu'
compatible_hosts = ['fedora', 'debian']
def get_cross_config(self):
return {
'binaries': self.binaries,
'exe_wrapper_def': '',
'extra_libs': ['-m32', '-march=i586', '-mno-sse'],
'extra_cflags': ['-m32', '-march=i586', '-mno-sse', '-I{}'.format(pj(self.buildEnv.install_dir, 'include'))],
'host_machine': {
'system': 'linux',
'lsystem': 'linux',
'cpu_family': 'x86',
'cpu': 'i586',
'endian': 'little',
'abi': ''
}
}
@property
def configure_option(self):
<|code_end|>
with the help of current file imports:
import os
from .base import PlatformInfo
from kiwixbuild.utils import which, pj
and context from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
, which may contain function names, class names, or code. Output only the next line. | return '--host={}'.format(self.arch_full) |
Based on the snippet: <|code_start|>
class I586PlatformInfo(PlatformInfo):
build = 'i586'
arch_full = 'i586-linux-gnu'
compatible_hosts = ['fedora', 'debian']
def get_cross_config(self):
return {
'binaries': self.binaries,
'exe_wrapper_def': '',
'extra_libs': ['-m32', '-march=i586', '-mno-sse'],
'extra_cflags': ['-m32', '-march=i586', '-mno-sse', '-I{}'.format(pj(self.buildEnv.install_dir, 'include'))],
'host_machine': {
'system': 'linux',
'lsystem': 'linux',
'cpu_family': 'x86',
'cpu': 'i586',
'endian': 'little',
'abi': ''
<|code_end|>
, predict the immediate next line with the help of imports:
import os
from .base import PlatformInfo
from kiwixbuild.utils import which, pj
and context (classes, functions, sometimes code) from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
. Output only the next line. | } |
Given the code snippet: <|code_start|>
class Win32PlatformInfo(PlatformInfo):
build = 'win32'
compatible_hosts = ['fedora', 'debian']
arch_full = 'i686-w64-mingw32'
<|code_end|>
, generate the next line using the imports in this file:
import subprocess
from .base import PlatformInfo
from kiwixbuild.utils import which, pj
from kiwixbuild._global import neutralEnv
and context (functions, classes, or occasionally code) from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
. Output only the next line. | extra_libs = ['-lwinmm', '-lshlwapi', '-lws2_32', '-lssp'] |
Continue the code snippet: <|code_start|>
class Win64PlatformInfo(PlatformInfo):
extra_libs = ['-lmingw32', '-lwinmm', '-lws2_32', '-lshlwapi', '-lrpcrt4', '-lmsvcr100', '-liphlpapi', '-lshell32', '-lkernel32']
build = 'win64'
compatible_hosts = ['fedora', 'debian']
arch_full = 'x86_64-w64-mingw32'
def get_cross_config(self):
return {
'exe_wrapper_def': self.exe_wrapper_def,
'binaries': self.binaries,
'root_path': self.root_path,
'extra_libs': self.extra_libs,
<|code_end|>
. Use current file imports:
import subprocess
from .base import PlatformInfo
from kiwixbuild.utils import which, pj
from kiwixbuild._global import neutralEnv
and context (classes, functions, or code) from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
. Output only the next line. | 'extra_cflags': ['-DWIN32'], |
Continue the code snippet: <|code_start|>
class Win64PlatformInfo(PlatformInfo):
extra_libs = ['-lmingw32', '-lwinmm', '-lws2_32', '-lshlwapi', '-lrpcrt4', '-lmsvcr100', '-liphlpapi', '-lshell32', '-lkernel32']
build = 'win64'
compatible_hosts = ['fedora', 'debian']
arch_full = 'x86_64-w64-mingw32'
def get_cross_config(self):
return {
'exe_wrapper_def': self.exe_wrapper_def,
'binaries': self.binaries,
'root_path': self.root_path,
'extra_libs': self.extra_libs,
'extra_cflags': ['-DWIN32'],
'host_machine': {
<|code_end|>
. Use current file imports:
import subprocess
from .base import PlatformInfo
from kiwixbuild.utils import which, pj
from kiwixbuild._global import neutralEnv
and context (classes, functions, or code) from other files:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
. Output only the next line. | 'system': 'Windows', |
Given snippet: <|code_start|>
class Win64PlatformInfo(PlatformInfo):
extra_libs = ['-lmingw32', '-lwinmm', '-lws2_32', '-lshlwapi', '-lrpcrt4', '-lmsvcr100', '-liphlpapi', '-lshell32', '-lkernel32']
build = 'win64'
compatible_hosts = ['fedora', 'debian']
arch_full = 'x86_64-w64-mingw32'
def get_cross_config(self):
return {
'exe_wrapper_def': self.exe_wrapper_def,
'binaries': self.binaries,
'root_path': self.root_path,
'extra_libs': self.extra_libs,
'extra_cflags': ['-DWIN32'],
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import subprocess
from .base import PlatformInfo
from kiwixbuild.utils import which, pj
from kiwixbuild._global import neutralEnv
and context:
# Path: kiwixbuild/platforms/base.py
# class PlatformInfo(metaclass=_MetaPlatform):
# all_platforms = {}
# all_running_platforms = {}
# toolchain_names = []
# configure_option = ""
#
# @classmethod
# def get_platform(cls, name, targets=None):
# if name not in cls.all_running_platforms:
# if targets is None:
# print("Should not got there.")
# print(cls.all_running_platforms)
# raise KeyError(name)
# cls.all_running_platforms[name] = cls.all_platforms[name](targets)
# return cls.all_running_platforms[name]
#
# def __init__(self, targets):
# self.all_running_platforms[self.name] = self
# self.buildEnv = BuildEnv(self)
# self.setup_toolchains(targets)
#
# def __str__(self):
# return "{}_{}".format(self.build, 'static' if self.static else 'dyn')
#
# def setup_toolchains(self, targets):
# for tlc_name in self.toolchain_names:
# ToolchainClass = Dependency.all_deps[tlc_name]
# targets[('source', tlc_name)] = ToolchainClass.Source
# plt_name = 'neutral' if ToolchainClass.neutral else self.name
# targets[(plt_name, tlc_name)] = ToolchainClass.Builder
#
# def add_targets(self, targetName, targets):
# if (self.name, targetName) in targets:
# return []
# targetClass = Dependency.all_deps[targetName]
# targets[('source', targetName)] = targetClass.Source
# targets[(self.name, targetName)] = targetClass.Builder
# for dep in targetClass.Builder.get_dependencies(self, False):
# if isinstance(dep, tuple):
# depPlatformName, depName = dep
# else:
# depPlatformName, depName = self.name, dep
# depPlatform = self.get_platform(depPlatformName, targets)
# depPlatform.add_targets(depName, targets)
# return [(self.name, targetName)]
#
# def get_fully_qualified_dep(self, dep):
# if isinstance(dep, tuple):
# return dep
# else:
# return self.name, dep
#
#
# def get_cross_config(self):
# return {}
#
#
# def get_env(self):
# return DefaultEnv()
#
#
# def get_bin_dir(self):
# return []
#
#
# def set_compiler(self, env):
# pass
#
#
# def set_comp_flags(self, env):
# if self.static:
# env['CFLAGS'] = env['CFLAGS'] + ' -fPIC'
# env['CXXFLAGS'] = env['CXXFLAGS'] + ' -fPIC'
#
#
# def _gen_crossfile(self, name, outname=None):
# if outname is None:
# outname = name
# crossfile = pj(self.buildEnv.build_dir, outname)
# template_file = pj(TEMPLATES_DIR, name)
# with open(template_file, 'r') as f:
# template = f.read()
# content = template.format(
# **self.get_cross_config()
# )
# with open(crossfile, 'w') as outfile:
# outfile.write(content)
# return crossfile
#
# def finalize_setup(self):
# self.buildEnv.cross_config = self.get_cross_config()
# self.buildEnv.meson_crossfile = None
# self.buildEnv.cmake_crossfile = None
#
# def clean_intermediate_directories(self):
# self.buildEnv.clean_intermediate_directories()
#
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
which might include code, classes, or functions. Output only the next line. | 'host_machine': { |
Using the snippet: <|code_start|>
class UUID(Dependency):
name = 'uuid'
class Source(ReleaseDownload):
archive = Remotefile('e2fsprogs-libs-1.43.4.tar.gz',
'eed4516325768255c9745e7b82c9d7d0393abce302520a5b2cde693204b0e419',
<|code_end|>
, determine the next line of code. You have imports:
from .base import (
Dependency,
ReleaseDownload,
MakeBuilder
)
from kiwixbuild.utils import Remotefile
and context (class names, function names, or code) available:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MakeBuilder(Builder):
# configure_option_template = "{dep_options} {static_option} {env_option} --prefix {install_dir} --libdir {libdir}"
# configure_option = ""
# dynamic_configure_option = "--enable-shared --disable-static"
# static_configure_option = "--enable-static --disable-shared"
# make_option = ""
# install_option = ""
# configure_script = "configure"
# configure_env = {
# '_format_CFLAGS' : '{env[CFLAGS]} -O3',
# '_format_CXXFLAGS': '{env[CXXFLAGS]} -O3'
# }
# make_target = ""
# flatpak_buildsystem = None
#
# @property
# def make_install_target(self):
# if self.buildEnv.platformInfo.build == 'iOS':
# return 'install'
# return 'install-strip'
#
# @property
# def all_configure_option(self):
# option = self.configure_option_template.format(
# dep_options=self.configure_option,
# static_option=self.static_configure_option if self.buildEnv.platformInfo.static else self.dynamic_configure_option,
# env_option=self.buildEnv.platformInfo.configure_option if not self.target.force_native_build else "",
# install_dir=self.buildEnv.install_dir,
# libdir=pj(self.buildEnv.install_dir, self.buildEnv.libprefix)
# )
# return option
#
# def set_configure_env(self, env):
# dep_conf_env = self.configure_env
# if not dep_conf_env:
# return
# for k, v in dep_conf_env.items():
# if k.startswith('_format_'):
# v = v.format(buildEnv=self.buildEnv, env=env)
# env[k[8:]] = v
# else:
# env[k] = v
#
#
# def _configure(self, context):
# context.try_skip(self.build_path)
# command = "{configure_script} {configure_option}"
# command = command.format(
# configure_script=pj(self.source_path, self.configure_script),
# configure_option=self.all_configure_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# self.set_configure_env(env)
# run_command(command, self.build_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "make -j4 {make_target} {make_option}".format(
# make_target=self.make_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "make {make_install_target} {make_option}".format(
# make_install_target=self.make_install_target,
# make_option=self.make_option
# )
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# context.try_skip(self.build_path)
# command = "make dist"
# env = self.get_env(cross_comp_flags=True, cross_compilers=True, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | 'https://www.kernel.org/pub/linux/kernel/people/tytso/e2fsprogs/v1.43.4/e2fsprogs-libs-1.43.4.tar.gz') |
Next line prediction: <|code_start|>
class Pugixml(Dependency):
name = "pugixml"
class Source(ReleaseDownload):
archive = Remotefile('pugixml-1.2.tar.gz',
<|code_end|>
. Use current file imports:
(from .base import (
Dependency,
ReleaseDownload,
MesonBuilder)
from kiwixbuild.utils import Remotefile)
and context including class names, function names, or small code snippets from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MesonBuilder(Builder):
# configure_option = ""
# test_option = ""
# flatpak_buildsystem = 'meson'
#
# @property
# def build_type(self):
# return 'release' if option('make_release') else 'debug'
#
# @property
# def strip_option(self):
# return '--strip' if option('make_release') else ''
#
# @property
# def library_type(self):
# return 'static' if self.buildEnv.platformInfo.static else 'shared'
#
# def _configure(self, context):
# context.no_skip = False
# context.try_skip(self.build_path)
# if os.path.exists(self.build_path):
# shutil.rmtree(self.build_path)
# os.makedirs(self.build_path)
# configure_option = self.configure_option.format(buildEnv=self.buildEnv)
# cross_option = ""
# if not self.target.force_native_build and self.buildEnv.meson_crossfile:
# cross_option = "--cross-file {}".format(
# self.buildEnv.meson_crossfile)
# command = ("{command} . {build_path}"
# " --buildtype={build_type} {strip_option}"
# " --default-library={library_type}"
# " {configure_option}"
# " --prefix={buildEnv.install_dir}"
# " --libdir={buildEnv.libprefix}"
# " {cross_option}")
# command = command.format(
# command=neutralEnv('meson_command'),
# build_type=self.build_type,
# strip_option=self.strip_option,
# library_type=self.library_type,
# configure_option=configure_option,
# build_path=self.build_path,
# buildEnv=self.buildEnv,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.source_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "{} -v".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _test(self, context):
# context.try_skip(self.build_path)
# if ( self.buildEnv.platformInfo.build == 'android'
# or (self.buildEnv.platformInfo.build != 'native'
# and not self.buildEnv.platformInfo.static)
# ):
# raise SkipCommand()
# command = "{} --verbose {}".format(neutralEnv('mesontest_command'), self.test_option)
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "{} -v install".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# command = "{} -v dist".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | '0f422dad86da0a2e56a37fb2a88376aae6e931f22cc8b956978460c9db06136b') |
Continue the code snippet: <|code_start|>
class Pugixml(Dependency):
name = "pugixml"
class Source(ReleaseDownload):
archive = Remotefile('pugixml-1.2.tar.gz',
<|code_end|>
. Use current file imports:
from .base import (
Dependency,
ReleaseDownload,
MesonBuilder)
from kiwixbuild.utils import Remotefile
and context (classes, functions, or code) from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MesonBuilder(Builder):
# configure_option = ""
# test_option = ""
# flatpak_buildsystem = 'meson'
#
# @property
# def build_type(self):
# return 'release' if option('make_release') else 'debug'
#
# @property
# def strip_option(self):
# return '--strip' if option('make_release') else ''
#
# @property
# def library_type(self):
# return 'static' if self.buildEnv.platformInfo.static else 'shared'
#
# def _configure(self, context):
# context.no_skip = False
# context.try_skip(self.build_path)
# if os.path.exists(self.build_path):
# shutil.rmtree(self.build_path)
# os.makedirs(self.build_path)
# configure_option = self.configure_option.format(buildEnv=self.buildEnv)
# cross_option = ""
# if not self.target.force_native_build and self.buildEnv.meson_crossfile:
# cross_option = "--cross-file {}".format(
# self.buildEnv.meson_crossfile)
# command = ("{command} . {build_path}"
# " --buildtype={build_type} {strip_option}"
# " --default-library={library_type}"
# " {configure_option}"
# " --prefix={buildEnv.install_dir}"
# " --libdir={buildEnv.libprefix}"
# " {cross_option}")
# command = command.format(
# command=neutralEnv('meson_command'),
# build_type=self.build_type,
# strip_option=self.strip_option,
# library_type=self.library_type,
# configure_option=configure_option,
# build_path=self.build_path,
# buildEnv=self.buildEnv,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.source_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "{} -v".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _test(self, context):
# context.try_skip(self.build_path)
# if ( self.buildEnv.platformInfo.build == 'android'
# or (self.buildEnv.platformInfo.build != 'native'
# and not self.buildEnv.platformInfo.static)
# ):
# raise SkipCommand()
# command = "{} --verbose {}".format(neutralEnv('mesontest_command'), self.test_option)
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "{} -v install".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# command = "{} -v dist".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | '0f422dad86da0a2e56a37fb2a88376aae6e931f22cc8b956978460c9db06136b') |
Predict the next line after this snippet: <|code_start|>
class Pugixml(Dependency):
name = "pugixml"
class Source(ReleaseDownload):
archive = Remotefile('pugixml-1.2.tar.gz',
<|code_end|>
using the current file's imports:
from .base import (
Dependency,
ReleaseDownload,
MesonBuilder)
from kiwixbuild.utils import Remotefile
and any relevant context from other files:
# Path: kiwixbuild/dependencies/base.py
# class Dependency(metaclass=_MetaDependency):
# all_deps = {}
# force_build = False
# force_native_build = False
# dont_skip = False
#
# @classmethod
# def version(cls):
# if cls.name in base_deps_versions:
# return base_deps_versions[cls.name]
# elif option('make_release'):
# return main_project_versions.get(cls.name, None)
# return None
#
# @classmethod
# def full_name(cls):
# if cls.version():
# return "{}-{}".format(cls.name, cls.version())
# return cls.name
#
# class ReleaseDownload(Source):
# archive_top_dir = None
#
# @property
# def extract_path(self):
# return pj(neutralEnv('source_dir'), self.source_dir)
#
# def _download(self, context):
# context.try_skip(neutralEnv('archive_dir'), self.full_name)
# neutralEnv('download')(self.archive)
#
# def _extract(self, context):
# context.try_skip(self.extract_path)
# if os.path.exists(self.extract_path):
# shutil.rmtree(self.extract_path)
# extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
# neutralEnv('source_dir'),
# topdir=self.archive_top_dir,
# name=self.source_dir)
#
# def prepare(self):
# self.command('download', self._download)
# self.command('extract', self._extract)
# if hasattr(self, 'patches'):
# self.command('patch', self._patch)
# if hasattr(self, '_post_prepare_script'):
# self.command('post_prepare_script', self._post_prepare_script)
#
# class MesonBuilder(Builder):
# configure_option = ""
# test_option = ""
# flatpak_buildsystem = 'meson'
#
# @property
# def build_type(self):
# return 'release' if option('make_release') else 'debug'
#
# @property
# def strip_option(self):
# return '--strip' if option('make_release') else ''
#
# @property
# def library_type(self):
# return 'static' if self.buildEnv.platformInfo.static else 'shared'
#
# def _configure(self, context):
# context.no_skip = False
# context.try_skip(self.build_path)
# if os.path.exists(self.build_path):
# shutil.rmtree(self.build_path)
# os.makedirs(self.build_path)
# configure_option = self.configure_option.format(buildEnv=self.buildEnv)
# cross_option = ""
# if not self.target.force_native_build and self.buildEnv.meson_crossfile:
# cross_option = "--cross-file {}".format(
# self.buildEnv.meson_crossfile)
# command = ("{command} . {build_path}"
# " --buildtype={build_type} {strip_option}"
# " --default-library={library_type}"
# " {configure_option}"
# " --prefix={buildEnv.install_dir}"
# " --libdir={buildEnv.libprefix}"
# " {cross_option}")
# command = command.format(
# command=neutralEnv('meson_command'),
# build_type=self.build_type,
# strip_option=self.strip_option,
# library_type=self.library_type,
# configure_option=configure_option,
# build_path=self.build_path,
# buildEnv=self.buildEnv,
# cross_option=cross_option
# )
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.source_path, context, env=env)
#
# def _compile(self, context):
# context.try_skip(self.build_path)
# command = "{} -v".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _test(self, context):
# context.try_skip(self.build_path)
# if ( self.buildEnv.platformInfo.build == 'android'
# or (self.buildEnv.platformInfo.build != 'native'
# and not self.buildEnv.platformInfo.static)
# ):
# raise SkipCommand()
# command = "{} --verbose {}".format(neutralEnv('mesontest_command'), self.test_option)
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _install(self, context):
# context.try_skip(self.build_path)
# command = "{} -v install".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# def _make_dist(self, context):
# command = "{} -v dist".format(neutralEnv('ninja_command'))
# env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
# run_command(command, self.build_path, context, env=env)
#
# Path: kiwixbuild/utils.py
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# def __new__(cls, name, sha256, url=None):
# if url is None:
# url = REMOTE_PREFIX + name
# return super().__new__(cls, name, sha256, url)
. Output only the next line. | '0f422dad86da0a2e56a37fb2a88376aae6e931f22cc8b956978460c9db06136b') |
Predict the next line after this snippet: <|code_start|> force_build = False
force_native_build = False
dont_skip = False
@classmethod
def version(cls):
if cls.name in base_deps_versions:
return base_deps_versions[cls.name]
elif option('make_release'):
return main_project_versions.get(cls.name, None)
return None
@classmethod
def full_name(cls):
if cls.version():
return "{}-{}".format(cls.name, cls.version())
return cls.name
class Source:
"""Base Class to the real preparator
A source preparator must install source in the self.source_dir attribute
inside the neutralEnv.source_dir."""
def __init__(self, target):
self.target = target
@property
def name(self):
return self.target.name
<|code_end|>
using the current file's imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and any relevant context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
. Output only the next line. | @property |
Using the snippet: <|code_start|> options = ""
if 'QMAKE_CC' in os.environ:
options += 'QMAKE_CC={} '.format(os.environ['QMAKE_CC'])
if 'QMAKE_CXX' in os.environ:
options += 'QMAKE_CXX={} '.format(os.environ['QMAKE_CXX'])
return options
def _configure(self, context):
context.try_skip(self.build_path)
cross_option = ""
command = ("{command} {configure_option}"
" {env_option}"
" {source_path}"
" {cross_option}")
command = command.format(
command = neutralEnv('qmake_command'),
configure_option=self.configure_option,
env_option=self.env_option,
source_path=self.source_path,
cross_option=cross_option
)
env = self.get_env(cross_comp_flags=True, cross_compilers=False, cross_path=True)
self.set_configure_env(env)
run_command(command, self.build_path, context, env=env)
def _make_dist(self, context):
command = "git archive -o {build_dir}/{name}.tar.gz --prefix={name}/ HEAD"
command = command.format(
build_dir = self.build_path,
name = self.target.full_name()
<|code_end|>
, determine the next line of code. You have imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context (class names, function names, or code) available:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
. Output only the next line. | ) |
Here is a snippet: <|code_start|> return base_source_path
@property
def build_path(self):
return pj(self.buildEnv.build_dir, self.target.full_name())
@property
def _log_dir(self):
return self.buildEnv.log_dir
def command(self, name, function, *args):
print(" {} {} : ".format(name, self.name), end="", flush=True)
log = pj(self._log_dir, 'cmd_{}_{}.log'.format(name, self.name))
context = Context(name, log, self.target.force_native_build)
if self.target.force_build:
context.no_skip = True
try:
start_time = time.time()
ret = function(*args, context=context)
context._finalise()
duration = time.time() - start_time
print(colorize("OK"), "({:.1f}s)".format(duration))
return ret
except SkipCommand as e:
print(e)
except WarningMessage as e:
print(e)
except subprocess.CalledProcessError:
print(colorize("ERROR"))
try:
<|code_end|>
. Write the next line using the current file imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
, which may include functions, classes, or code. Output only the next line. | with open(log, 'r') as f: |
Given snippet: <|code_start|>
@property
def strip_option(self):
return '--strip' if option('make_release') else ''
@property
def library_type(self):
return 'static' if self.buildEnv.platformInfo.static else 'shared'
def _configure(self, context):
context.no_skip = False
context.try_skip(self.build_path)
if os.path.exists(self.build_path):
shutil.rmtree(self.build_path)
os.makedirs(self.build_path)
configure_option = self.configure_option.format(buildEnv=self.buildEnv)
cross_option = ""
if not self.target.force_native_build and self.buildEnv.meson_crossfile:
cross_option = "--cross-file {}".format(
self.buildEnv.meson_crossfile)
command = ("{command} . {build_path}"
" --buildtype={build_type} {strip_option}"
" --default-library={library_type}"
" {configure_option}"
" --prefix={buildEnv.install_dir}"
" --libdir={buildEnv.libprefix}"
" {cross_option}")
command = command.format(
command=neutralEnv('meson_command'),
build_type=self.build_type,
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
which might include code, classes, or functions. Output only the next line. | strip_option=self.strip_option, |
Using the snippet: <|code_start|> return pj(neutralEnv('source_dir'), self.source_dir)
@property
def _log_dir(self):
return neutralEnv('log_dir')
def _patch(self, context):
context.try_skip(self.source_path)
for p in self.patches:
with open(pj(SCRIPT_DIR, 'patches', p), 'r') as patch_input:
run_command("patch -p1", self.source_path, context, input=patch_input.read())
def command(self, name, function, *args):
print(" {} {} : ".format(name, self.name), end="", flush=True)
log = pj(self._log_dir, 'cmd_{}_{}.log'.format(name, self.name))
context = Context(name, log, True)
try:
start_time = time.time()
ret = function(*args, context=context)
context._finalise()
duration = time.time() - start_time
print(colorize("OK"), "({:.1f}s)".format(duration))
return ret
except WarningMessage as e:
print(e)
except SkipCommand as e:
print(e)
except subprocess.CalledProcessError:
print(colorize("ERROR"))
try:
<|code_end|>
, determine the next line of code. You have imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context (class names, function names, or code) available:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
. Output only the next line. | with open(log, 'r') as f: |
Predict the next line after this snippet: <|code_start|> force_native_build = False
dont_skip = False
@classmethod
def version(cls):
if cls.name in base_deps_versions:
return base_deps_versions[cls.name]
elif option('make_release'):
return main_project_versions.get(cls.name, None)
return None
@classmethod
def full_name(cls):
if cls.version():
return "{}-{}".format(cls.name, cls.version())
return cls.name
class Source:
"""Base Class to the real preparator
A source preparator must install source in the self.source_dir attribute
inside the neutralEnv.source_dir."""
def __init__(self, target):
self.target = target
@property
def name(self):
return self.target.name
@property
<|code_end|>
using the current file's imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and any relevant context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
. Output only the next line. | def full_name(self): |
Next line prediction: <|code_start|> ):
raise SkipCommand()
command = "{} --verbose {}".format(neutralEnv('mesontest_command'), self.test_option)
env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
run_command(command, self.build_path, context, env=env)
def _install(self, context):
context.try_skip(self.build_path)
command = "{} -v install".format(neutralEnv('ninja_command'))
env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
run_command(command, self.build_path, context, env=env)
def _make_dist(self, context):
command = "{} -v dist".format(neutralEnv('ninja_command'))
env = self.get_env(cross_comp_flags=False, cross_compilers=False, cross_path=True)
run_command(command, self.build_path, context, env=env)
class GradleBuilder(Builder):
gradle_target = "assembleKiwixRelease assembleKiwixDebug"
gradle_option = "-i --no-daemon --build-cache"
def build(self):
self.command('configure', self._configure)
if hasattr(self, '_pre_compile_script'):
self.command('pre_compile_script', self._pre_compile_script)
self.command('compile', self._compile)
def _configure(self, context):
# We don't have a lot to configure by itself
<|code_end|>
. Use current file imports:
(import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step)
and context including class names, function names, or small code snippets from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
. Output only the next line. | context.try_skip(self.build_path) |
Next line prediction: <|code_start|> context.no_skip = True
try:
start_time = time.time()
ret = function(*args, context=context)
context._finalise()
duration = time.time() - start_time
print(colorize("OK"), "({:.1f}s)".format(duration))
return ret
except SkipCommand as e:
print(e)
except WarningMessage as e:
print(e)
except subprocess.CalledProcessError:
print(colorize("ERROR"))
try:
with open(log, 'r') as f:
print(f.read())
except:
pass
raise StopBuild()
except:
print(colorize("ERROR"))
raise
def build(self):
if hasattr(self, '_pre_build_script'):
self.command('pre_build_script', self._pre_build_script)
self.command('configure', self._configure)
self.command('compile', self._compile)
if hasattr(self, '_test'):
<|code_end|>
. Use current file imports:
(import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step)
and context including class names, function names, or small code snippets from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
. Output only the next line. | self.command('test', self._test) |
Next line prediction: <|code_start|> def strip_option(self):
return '--strip' if option('make_release') else ''
@property
def library_type(self):
return 'static' if self.buildEnv.platformInfo.static else 'shared'
def _configure(self, context):
context.no_skip = False
context.try_skip(self.build_path)
if os.path.exists(self.build_path):
shutil.rmtree(self.build_path)
os.makedirs(self.build_path)
configure_option = self.configure_option.format(buildEnv=self.buildEnv)
cross_option = ""
if not self.target.force_native_build and self.buildEnv.meson_crossfile:
cross_option = "--cross-file {}".format(
self.buildEnv.meson_crossfile)
command = ("{command} . {build_path}"
" --buildtype={build_type} {strip_option}"
" --default-library={library_type}"
" {configure_option}"
" --prefix={buildEnv.install_dir}"
" --libdir={buildEnv.libprefix}"
" {cross_option}")
command = command.format(
command=neutralEnv('meson_command'),
build_type=self.build_type,
strip_option=self.strip_option,
library_type=self.library_type,
<|code_end|>
. Use current file imports:
(import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step)
and context including class names, function names, or small code snippets from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
. Output only the next line. | configure_option=configure_option, |
Here is a snippet: <|code_start|> context.no_skip = True
try:
start_time = time.time()
ret = function(*args, context=context)
context._finalise()
duration = time.time() - start_time
print(colorize("OK"), "({:.1f}s)".format(duration))
return ret
except SkipCommand as e:
print(e)
except WarningMessage as e:
print(e)
except subprocess.CalledProcessError:
print(colorize("ERROR"))
try:
with open(log, 'r') as f:
print(f.read())
except:
pass
raise StopBuild()
except:
print(colorize("ERROR"))
raise
def build(self):
if hasattr(self, '_pre_build_script'):
self.command('pre_build_script', self._pre_build_script)
self.command('configure', self._configure)
self.command('compile', self._compile)
if hasattr(self, '_test'):
<|code_end|>
. Write the next line using the current file imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
, which may include functions, classes, or code. Output only the next line. | self.command('test', self._test) |
Predict the next line for this snippet: <|code_start|> module['buildir'] = True
class QMakeBuilder(MakeBuilder):
qmake_target = ""
flatpak_buildsystem = 'qmake'
@property
def env_option(self):
options = ""
if 'QMAKE_CC' in os.environ:
options += 'QMAKE_CC={} '.format(os.environ['QMAKE_CC'])
if 'QMAKE_CXX' in os.environ:
options += 'QMAKE_CXX={} '.format(os.environ['QMAKE_CXX'])
return options
def _configure(self, context):
context.try_skip(self.build_path)
cross_option = ""
command = ("{command} {configure_option}"
" {env_option}"
" {source_path}"
" {cross_option}")
command = command.format(
command = neutralEnv('qmake_command'),
configure_option=self.configure_option,
env_option=self.env_option,
source_path=self.source_path,
cross_option=cross_option
)
<|code_end|>
with the help of current file imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context from other files:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
, which may contain function names, class names, or code. Output only the next line. | env = self.get_env(cross_comp_flags=True, cross_compilers=False, cross_path=True) |
Given snippet: <|code_start|>class NoopSource(Source):
def prepare(self):
pass
class ReleaseDownload(Source):
archive_top_dir = None
@property
def extract_path(self):
return pj(neutralEnv('source_dir'), self.source_dir)
def _download(self, context):
context.try_skip(neutralEnv('archive_dir'), self.full_name)
neutralEnv('download')(self.archive)
def _extract(self, context):
context.try_skip(self.extract_path)
if os.path.exists(self.extract_path):
shutil.rmtree(self.extract_path)
extract_archive(pj(neutralEnv('archive_dir'), self.archive.name),
neutralEnv('source_dir'),
topdir=self.archive_top_dir,
name=self.source_dir)
def prepare(self):
self.command('download', self._download)
self.command('extract', self._extract)
if hasattr(self, 'patches'):
self.command('patch', self._patch)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
which might include code, classes, or functions. Output only the next line. | if hasattr(self, '_post_prepare_script'): |
Given snippet: <|code_start|>class _MetaDependency(type):
def __new__(cls, name, bases, dct):
_class = type.__new__(cls, name, bases, dct)
if name != 'Dependency':
dep_name = dct['name']
Dependency.all_deps[dep_name] = _class
return _class
class Dependency(metaclass=_MetaDependency):
all_deps = {}
force_build = False
force_native_build = False
dont_skip = False
@classmethod
def version(cls):
if cls.name in base_deps_versions:
return base_deps_versions[cls.name]
elif option('make_release'):
return main_project_versions.get(cls.name, None)
return None
@classmethod
def full_name(cls):
if cls.version():
return "{}-{}".format(cls.name, cls.version())
return cls.name
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import subprocess
import os
import shutil
import time
from kiwixbuild.utils import pj, Context, SkipCommand, WarningMessage, extract_archive, Defaultdict, StopBuild, run_command, colorize
from kiwixbuild.versions import main_project_versions, base_deps_versions
from kiwixbuild._global import neutralEnv, option, get_target_step
and context:
# Path: kiwixbuild/utils.py
# COLORS = {
# 'OK': '\033[92m',
# 'WARNING': '\033[93m',
# 'NEEDED': '\033[93m',
# 'SKIP': '\033[34m',
# 'ERROR': '\033[91m',
# '': '\033[0m',
# }
# REMOTE_PREFIX = 'http://mirror.download.kiwix.org/dev/'
# def which(name):
# def xrun_find(name):
# def __getattr__(self, name):
# def DefaultEnv():
# def remove_duplicates(iterable, key_function=None):
# def get_sha256(path):
# def colorize(text, color=None):
# def print_progress(progress):
# def add_execution_right(file_path):
# def copy_tree(src, dst, post_copy_function=None):
# def download_remote(what, where):
# def __init__(self, msg=""):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __new__(cls, name, sha256, url=None):
# def __init__(self, command_name, log_file, force_native_build):
# def try_skip(self, path, extra_name=""):
# def _finalise(self):
# def extract_archive(archive_path, dest_dir, topdir=None, name=None):
# def run_command(command, cwd, context, *, env=None, input=None):
# class Defaultdict(defaultdict):
# class BaseCommandResult(Exception):
# class SkipCommand(BaseCommandResult):
# class WarningMessage(BaseCommandResult):
# class StopBuild(BaseCommandResult):
# class Remotefile(namedtuple('Remotefile', ('name', 'sha256', 'url'))):
# class Context:
#
# Path: kiwixbuild/versions.py
#
# Path: kiwixbuild/_global.py
# def neutralEnv(what):
# return getattr(_neutralEnv, what)
#
# def option(what):
# return getattr(_options, what)
#
# def get_target_step(key, default_context=None):
# if isinstance(key, tuple):
# context, target = key
# else:
# context, target = default_context, key
# return _target_steps[(context, target)]
which might include code, classes, or functions. Output only the next line. | class Source: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.