Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Given the code snippet: <|code_start|># under the License.
_POSTGRESQL = 'postgresql'
_MYSQL = 'mysql'
_ORM = 'orm'
_REPO_DRIVERS_MAP = {
_POSTGRESQL: 'monasca_notification.common.repositories.'
'postgres.pgsql_repo:PostgresqlRepo',
_MYSQL: 'monasca_notification.common.repositories.'
'mysql.mysql_repo:MysqlRepo',
_ORM: 'monasca_notification.common.repositories.'
'orm.orm_repo:OrmRepo'
}
_ACCEPTABLE_DRIVER_KEYS = set(list(_REPO_DRIVERS_MAP.keys()) +
list(_REPO_DRIVERS_MAP.values()))
_DEFAULT_DB_HOST = '127.0.0.1'
_DEFAULT_DB_USER = 'notification'
_DEFAULT_DB_PASSWORD = 'password' # nosec bandit B105
_DEFAULT_DB_NAME = 'mon'
_DEFAULT_POSTGRESQL_PORT = 5432
_DEFAULT_MYSQL_PORT = 3306
db_group = cfg.OptGroup('database',
title='Database Options',
help='Driver configuration for database connectivity.')
db_opts = [
<|code_end|>
, generate the next line using the imports in this file:
from oslo_config import cfg
from monasca_notification.conf import types
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/conf/types.py
# LOG = log.getLogger(__name__)
# class Plugin(types.String):
# class PluginOpt(cfg.Opt):
# class HostAddressPortType(types.HostAddress):
# def __init__(self, ignore_missing=False, choices=None, plugin_map=None):
# def __call__(self, value):
# def _get_actual_target(self, value):
# def __init__(self, name, choices=None, plugin_map=None, **kwargs):
# def __init__(self, version=None):
# def __call__(self, value):
# def _validate_port(port=80):
# def _validate_addr(self, addr):
. Output only the next line. | types.PluginOpt(name='repo_driver', choices=_ACCEPTABLE_DRIVER_KEYS, |
Next line prediction: <|code_start|>Link to Grafana: {grafana_url}
With dimensions:
{metric_dimensions}'''
EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
alarm_id: {alarm_id}
Lifecycle state: {lifecycle_state}
Link: {link}
Link to Grafana: {grafana_url}
With dimensions:
{metric_dimensions}'''
EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
Alarm_id: {alarm_id}
Lifecycle state: {lifecycle_state}
Link: {link}
Link to Grafana: {grafana_url}
With dimensions
{metric_dimensions}'''
<|code_end|>
. Use current file imports:
(import email.header
import email.mime.text
import email.utils
import smtplib
import time
from debtcollector import removals
from oslo_config import cfg
from monasca_notification.plugins import abstract_notifier)
and context including class names, function names, or small code snippets from other files:
# Path: monasca_notification/plugins/abstract_notifier.py
# class AbstractNotifier(object, metaclass=abc.ABCMeta):
# def __init__(self):
# def statsd_name(self):
# def config(self, config):
# def send_notification(self, notification):
. Output only the next line. | class EmailNotifier(abstract_notifier.AbstractNotifier): |
Given the code snippet: <|code_start|># (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log = logging.getLogger(__name__)
class NotificationProcessor(object):
def __init__(self):
self.statsd = get_statsd_client()
notifiers.init(self.statsd)
notifiers.load_plugins()
notifiers.config()
<|code_end|>
, generate the next line using the imports in this file:
from oslo_log import log as logging
from monasca_notification.common.repositories import exceptions as exc
from monasca_notification.common.utils import get_db_repo
from monasca_notification.common.utils import get_statsd_client
from monasca_notification.types import notifiers
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/common/utils.py
# def get_db_repo():
# repo_driver = CONF.database.repo_driver
# LOG.debug('Enabling the %s RDB repository', repo_driver)
# return repo_driver(CONF)
#
# Path: monasca_notification/common/utils.py
# def get_statsd_client(dimensions=None):
# local_dims = dimensions.copy() if dimensions else {}
# local_dims.update(NOTIFICATION_DIMENSIONS)
# if CONF.statsd.enable:
# LOG.debug("Establishing connection with statsd on {0}:{1}"
# .format(CONF.statsd.host, CONF.statsd.port))
# client = monascastatsd.Client(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# else:
# LOG.warn("StatsD monitoring disabled. Overriding monascastatsd.Client to use it offline")
# client = OfflineClient(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# return client
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
. Output only the next line. | self._db_repo = get_db_repo() |
Using the snippet: <|code_start|># (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log = logging.getLogger(__name__)
class NotificationProcessor(object):
def __init__(self):
<|code_end|>
, determine the next line of code. You have imports:
from oslo_log import log as logging
from monasca_notification.common.repositories import exceptions as exc
from monasca_notification.common.utils import get_db_repo
from monasca_notification.common.utils import get_statsd_client
from monasca_notification.types import notifiers
and context (class names, function names, or code) available:
# Path: monasca_notification/common/utils.py
# def get_db_repo():
# repo_driver = CONF.database.repo_driver
# LOG.debug('Enabling the %s RDB repository', repo_driver)
# return repo_driver(CONF)
#
# Path: monasca_notification/common/utils.py
# def get_statsd_client(dimensions=None):
# local_dims = dimensions.copy() if dimensions else {}
# local_dims.update(NOTIFICATION_DIMENSIONS)
# if CONF.statsd.enable:
# LOG.debug("Establishing connection with statsd on {0}:{1}"
# .format(CONF.statsd.host, CONF.statsd.port))
# client = monascastatsd.Client(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# else:
# LOG.warn("StatsD monitoring disabled. Overriding monascastatsd.Client to use it offline")
# client = OfflineClient(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# return client
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
. Output only the next line. | self.statsd = get_statsd_client() |
Using the snippet: <|code_start|># (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log = logging.getLogger(__name__)
class NotificationProcessor(object):
def __init__(self):
self.statsd = get_statsd_client()
<|code_end|>
, determine the next line of code. You have imports:
from oslo_log import log as logging
from monasca_notification.common.repositories import exceptions as exc
from monasca_notification.common.utils import get_db_repo
from monasca_notification.common.utils import get_statsd_client
from monasca_notification.types import notifiers
and context (class names, function names, or code) available:
# Path: monasca_notification/common/utils.py
# def get_db_repo():
# repo_driver = CONF.database.repo_driver
# LOG.debug('Enabling the %s RDB repository', repo_driver)
# return repo_driver(CONF)
#
# Path: monasca_notification/common/utils.py
# def get_statsd_client(dimensions=None):
# local_dims = dimensions.copy() if dimensions else {}
# local_dims.update(NOTIFICATION_DIMENSIONS)
# if CONF.statsd.enable:
# LOG.debug("Establishing connection with statsd on {0}:{1}"
# .format(CONF.statsd.host, CONF.statsd.port))
# client = monascastatsd.Client(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# else:
# LOG.warn("StatsD monitoring disabled. Overriding monascastatsd.Client to use it offline")
# client = OfflineClient(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# return client
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
. Output only the next line. | notifiers.init(self.statsd) |
Given the code snippet: <|code_start|># Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class TestStatsdConnection(base.BaseTestCase):
extra_dimensions = {'foo': 'bar'}
base_name = 'monasca'
def test_statsd_default_connection(self):
with patch(
'monasca_notification.common.utils.monascastatsd.Client') as c:
<|code_end|>
, generate the next line using the imports in this file:
from unittest.mock import patch
from monasca_notification.common import utils
from tests import base
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/common/utils.py
# LOG = log.getLogger(__name__)
# CONF = cfg.CONF
# NOTIFICATION_DIMENSIONS = {'service': 'monitoring',
# 'component': 'monasca-notification'}
# def get_db_repo():
# def construct_notification_object(db_repo, notification_json):
# def grab_stored_notification_method(db_repo, notification_id):
# def get_statsd_client(dimensions=None):
# def _set_connection(self, connection, host, port):
# def __init__(self, host='localhost', port=8125, max_buffer_size=50):
# def connect(self, host, port):
# def _send_to_server(self, packet):
# def get_keystone_session():
# def get_auth_token():
# class OfflineClient(monascastatsd.Client):
# class OfflineConnection(monascastatsd.Connection):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | utils.get_statsd_client() |
Continue the code snippet: <|code_start|># you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the notification class."""
def test_json():
"""Test the to_json method to verify it behaves as expected.
"""
ts = 1429029121239
alarm = {'alarmId': 'alarmId',
'alarmName': 'alarmName',
'timestamp': ts,
'stateChangeReason': 'stateChangeReason',
'newState': 'newState',
'severity': 'LOW',
"link": "some-link",
"lifecycleState": "OPEN",
'tenantId': 'tenantId',
'metrics': 'cpu_util'}
<|code_end|>
. Use current file imports:
import json
from monasca_notification import notification
and context (classes, functions, or code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
. Output only the next line. | test_notification = notification.Notification(1, 'ntype', 'name', |
Next line prediction: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
_DEFAULT_URL = '127.0.0.1:9092'
_DEFAULT_GROUP = 'monasca-notification'
_DEFAULT_ALARM_TOPIC = 'alarm-state-transitions'
_DEFAULT_NOTIFICATION_TOPIC = 'alarm-notifications'
_DEFAULT_RETRY_TOPIC = 'retry-notifications'
_DEFAULT_PERIODIC_TOPICS = {
'60': '60-seconds-notifications'
}
_DEFAULT_MAX_OFFSET_LAG = 600
kafka_group = cfg.OptGroup('kafka',
title='Kafka Options',
help='Options under this group allow to configure '
'valid connection or Kafka queue.')
kafka_opts = [
<|code_end|>
. Use current file imports:
(from oslo_config import cfg
from monasca_notification.conf import types)
and context including class names, function names, or small code snippets from other files:
# Path: monasca_notification/conf/types.py
# LOG = log.getLogger(__name__)
# class Plugin(types.String):
# class PluginOpt(cfg.Opt):
# class HostAddressPortType(types.HostAddress):
# def __init__(self, ignore_missing=False, choices=None, plugin_map=None):
# def __call__(self, value):
# def _get_actual_target(self, value):
# def __init__(self, name, choices=None, plugin_map=None, **kwargs):
# def __init__(self, version=None):
# def __call__(self, value):
# def _validate_port(port=80):
# def _validate_addr(self, addr):
. Output only the next line. | cfg.ListOpt(name='url', item_type=types.HostAddressPortType(), |
Predict the next line for this snippet: <|code_start|># limitations under the License.
CONF = cfg.CONF
"""
notification.address = https://hipchat.hpcloud.net/v2/room/<room_id>/notification?auth_token=432432
How to get access token?
1) Login to Hipchat with the user account which is used for notification
2) Go to this page. https://hipchat.hpcloud.net/account/api (Replace your hipchat server name)
3) You can see option to "Create token". Use the capability "SendNotification"
How to get the Room ID?
1) Login to Hipchat with the user account which is used for notification
2) Go to this page. https://hipchat.hpcloud.net/account/api (Replace your hipchat server name)
3) Click on the Rooms tab
4) Click on any Room of your choice.
5) Room ID is the API ID field
"""
SEVERITY_COLORS = {"low": 'green',
'medium': 'gray',
'high': 'yellow',
'critical': 'red'}
<|code_end|>
with the help of current file imports:
import requests
import simplejson as json
import urllib
from debtcollector import removals
from oslo_config import cfg
from monasca_notification.plugins import abstract_notifier
and context from other files:
# Path: monasca_notification/plugins/abstract_notifier.py
# class AbstractNotifier(object, metaclass=abc.ABCMeta):
# def __init__(self):
# def statsd_name(self):
# def config(self, config):
# def send_notification(self, notification):
, which may contain function names, class names, or code. Output only the next line. | class HipChatNotifier(abstract_notifier.AbstractNotifier): |
Continue the code snippet: <|code_start|> def _start_processor(self, notifications, mock_log, mock_smtp, mock_statsd, mock_pymsql):
"""Start the processor with the proper mocks
"""
# Since the log runs in another thread I can mock it directly,
# instead change the methods to put to a queue
mock_log.warn = self.trap.append
mock_log.error = self.trap.append
mock_smtp.SMTP = self._smtpStub
np.NotificationProcessor.insert_configured_plugins = mock.Mock()
processor = np.NotificationProcessor()
processor.send(notifications)
def _smtpStub(self, *arg, **kwargs):
return smtpStub(self.trap)
def email_setup(self, metric):
alarm_dict = {"tenantId": "0",
"alarmId": "0",
"alarmName": "test Alarm",
"oldState": "OK",
"newState": "ALARM",
"severity": "LOW",
"link": "some-link",
"lifecycleState": "OPEN",
"stateChangeReason": "I am alarming!",
"timestamp": time.time(),
"metrics": metric}
<|code_end|>
. Use current file imports:
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.processors import notification_processor as np
from tests import base
and context (classes, functions, or code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/processors/notification_processor.py
# class NotificationProcessor(object):
# def __init__(self):
# def _remaining_plugin_types(self):
# def insert_configured_plugins(self):
# def send(self, notifications):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | notification = m_notification.Notification( |
Next line prediction: <|code_start|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests NotificationProcessor"""
class smtpStub(object):
def __init__(self, log_queue):
self.queue = log_queue
def sendmail(self, from_addr, to_addr, msg):
self.queue.put("%s %s %s" % (from_addr, to_addr, msg))
class requestsResponse(object):
def __init__(self, status):
self.status_code = status
class TestNotificationProcessor(base.BaseTestCase):
def setUp(self):
super(TestNotificationProcessor, self).setUp()
self.trap = []
<|code_end|>
. Use current file imports:
(import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.processors import notification_processor as np
from tests import base)
and context including class names, function names, or small code snippets from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/processors/notification_processor.py
# class NotificationProcessor(object):
# def __init__(self):
# def _remaining_plugin_types(self):
# def insert_configured_plugins(self):
# def send(self, notifications):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | email_notifier.register_opts(base.config.CONF) |
Here is a snippet: <|code_start|>
email_notifier.register_opts(base.config.CONF)
self.conf_default(group='email_notifier', server='my.smtp.server',
port=25, user=None, password=None,
timeout=60, from_addr='hpcs.mon@hp.com')
self.conf_default(group='mysql', ssl=None, host='localhost',
port='3306', user='mysql_user', db='dbname',
passwd='mysql_passwd')
self.conf_default(group='statsd', host='localhost', port=8125)
self.conf_default(group='notification_types', enabled=[])
# ------------------------------------------------------------------------
# Test helper functions
# ------------------------------------------------------------------------
@mock.patch('pymysql.connect')
@mock.patch('monasca_notification.common.utils.monascastatsd')
@mock.patch('monasca_notification.plugins.email_notifier.smtplib')
@mock.patch('monasca_notification.processors.notification_processor.notifiers.log')
def _start_processor(self, notifications, mock_log, mock_smtp, mock_statsd, mock_pymsql):
"""Start the processor with the proper mocks
"""
# Since the log runs in another thread I can mock it directly,
# instead change the methods to put to a queue
mock_log.warn = self.trap.append
mock_log.error = self.trap.append
mock_smtp.SMTP = self._smtpStub
<|code_end|>
. Write the next line using the current file imports:
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.processors import notification_processor as np
from tests import base
and context from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/processors/notification_processor.py
# class NotificationProcessor(object):
# def __init__(self):
# def _remaining_plugin_types(self):
# def insert_configured_plugins(self):
# def send(self, notifications):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
, which may include functions, classes, or code. Output only the next line. | np.NotificationProcessor.insert_configured_plugins = mock.Mock() |
Here is a snippet: <|code_start|># you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests NotificationProcessor"""
class smtpStub(object):
def __init__(self, log_queue):
self.queue = log_queue
def sendmail(self, from_addr, to_addr, msg):
self.queue.put("%s %s %s" % (from_addr, to_addr, msg))
class requestsResponse(object):
def __init__(self, status):
self.status_code = status
<|code_end|>
. Write the next line using the current file imports:
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.processors import notification_processor as np
from tests import base
and context from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/processors/notification_processor.py
# class NotificationProcessor(object):
# def __init__(self):
# def _remaining_plugin_types(self):
# def insert_configured_plugins(self):
# def send(self, notifications):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
, which may include functions, classes, or code. Output only the next line. | class TestNotificationProcessor(base.BaseTestCase): |
Continue the code snippet: <|code_start|> def setUp(self):
super(TestSlack, self).setUp(
slack_notifier.register_opts
)
self.conf_default(group='slack_notifier', timeout=50,
ca_certs='/etc/ssl/certs/ca-bundle.crt',
proxy='http://yourid:password@proxyserver:8080',
insecure=False)
self._trap = queue.Queue()
mock_log = mock.Mock()
mock_log.info = self._trap.put
mock_log.warn = self._trap.put
mock_log.error = self._trap.put
mock_log.exception = self._trap.put
self._slk = slack_notifier.SlackNotifier(mock_log)
slack_notifier.SlackNotifier._raw_data_url_caches = []
@mock.patch('monasca_notification.plugins.slack_notifier.requests')
def _notify(self, response_list, mock_requests):
mock_requests.post = mock.Mock(side_effect=response_list)
metric = []
metric_data = {'dimensions': {'hostname': 'foo1', 'service': 'bar1'}}
metric.append(metric_data)
alarm_dict = alarm(metric)
<|code_end|>
. Use current file imports:
import json
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import slack_notifier
from tests import base
and context (classes, functions, or code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/slack_notifier.py
# CONF = cfg.CONF
# MAX_CACHE_SIZE = 100
# RESPONSE_OK = 'ok'
# class SlackNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def _build_slack_message(self, notification):
# def _render_message_template(self, params):
# def _check_response(self, result):
# def _send_message(self, request_options):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | notification = m_notification.Notification(0, 'slack', 'slack notification', |
Given the code snippet: <|code_start|> return {'old_state': 'OK',
'alarm_description': 'test Alarm description',
'message': 'I am alarming!',
'alarm_definition_id': 0,
'alarm_name': 'test Alarm',
'tenant_id': '0',
'metrics': [
{'dimensions': {
'hostname': 'foo1',
'service': 'bar1'}}
],
'alarm_id': '0',
'state': 'ALARM',
'alarm_timestamp': 1429023453}
class RequestsResponse(object):
def __init__(self, status, text, headers):
self.status_code = status
self.text = text
self.headers = headers
def json(self):
return json.loads(self.text)
class TestSlack(base.PluginTestCase):
def setUp(self):
super(TestSlack, self).setUp(
<|code_end|>
, generate the next line using the imports in this file:
import json
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import slack_notifier
from tests import base
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/slack_notifier.py
# CONF = cfg.CONF
# MAX_CACHE_SIZE = 100
# RESPONSE_OK = 'ok'
# class SlackNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def _build_slack_message(self, notification):
# def _render_message_template(self, params):
# def _check_response(self, result):
# def _send_message(self, request_options):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | slack_notifier.register_opts |
Using the snippet: <|code_start|> 'metrics': metrics}
def slack_text():
return {'old_state': 'OK',
'alarm_description': 'test Alarm description',
'message': 'I am alarming!',
'alarm_definition_id': 0,
'alarm_name': 'test Alarm',
'tenant_id': '0',
'metrics': [
{'dimensions': {
'hostname': 'foo1',
'service': 'bar1'}}
],
'alarm_id': '0',
'state': 'ALARM',
'alarm_timestamp': 1429023453}
class RequestsResponse(object):
def __init__(self, status, text, headers):
self.status_code = status
self.text = text
self.headers = headers
def json(self):
return json.loads(self.text)
<|code_end|>
, determine the next line of code. You have imports:
import json
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import slack_notifier
from tests import base
and context (class names, function names, or code) available:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/slack_notifier.py
# CONF = cfg.CONF
# MAX_CACHE_SIZE = 100
# RESPONSE_OK = 'ok'
# class SlackNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def _build_slack_message(self, notification):
# def _render_message_template(self, params):
# def _check_response(self, result):
# def _send_message(self, request_options):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | class TestSlack(base.PluginTestCase): |
Using the snippet: <|code_start|># (C) Copyright 2015-2016 Hewlett Packard Enterprise Development LP
# Copyright 2017 Fujitsu LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log = logging.getLogger(__name__)
CONF = cfg.CONF
class NotificationEngine(object):
def __init__(self):
<|code_end|>
, determine the next line of code. You have imports:
import time
from oslo_config import cfg
from oslo_log import log as logging
from monasca_common.kafka import client_factory
from monasca_notification.common.utils import get_statsd_client
from monasca_notification.processors import alarm_processor as ap
from monasca_notification.processors import notification_processor as np
and context (class names, function names, or code) available:
# Path: monasca_notification/common/utils.py
# def get_statsd_client(dimensions=None):
# local_dims = dimensions.copy() if dimensions else {}
# local_dims.update(NOTIFICATION_DIMENSIONS)
# if CONF.statsd.enable:
# LOG.debug("Establishing connection with statsd on {0}:{1}"
# .format(CONF.statsd.host, CONF.statsd.port))
# client = monascastatsd.Client(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# else:
# LOG.warn("StatsD monitoring disabled. Overriding monascastatsd.Client to use it offline")
# client = OfflineClient(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# return client
#
# Path: monasca_notification/processors/alarm_processor.py
# CONF = cfg.CONF
# class AlarmProcessor(object):
# def __init__(self):
# def _parse_alarm(alarm_data):
# def _alarm_is_valid(self, alarm):
# def _build_notification(self, alarm):
# def to_notification(self, raw_alarm):
#
# Path: monasca_notification/processors/notification_processor.py
# class NotificationProcessor(object):
# def __init__(self):
# def _remaining_plugin_types(self):
# def insert_configured_plugins(self):
# def send(self, notifications):
. Output only the next line. | self._statsd = get_statsd_client() |
Given the code snippet: <|code_start|>#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log = logging.getLogger(__name__)
CONF = cfg.CONF
class NotificationEngine(object):
def __init__(self):
self._statsd = get_statsd_client()
self._consumer = client_factory.get_kafka_consumer(
CONF.kafka.url,
CONF.kafka.group,
CONF.kafka.alarm_topic,
CONF.zookeeper.url,
CONF.zookeeper.notification_path,
CONF.kafka.legacy_kafka_client_enabled)
self._producer = client_factory.get_kafka_producer(
CONF.kafka.url,
CONF.kafka.legacy_kafka_client_enabled)
<|code_end|>
, generate the next line using the imports in this file:
import time
from oslo_config import cfg
from oslo_log import log as logging
from monasca_common.kafka import client_factory
from monasca_notification.common.utils import get_statsd_client
from monasca_notification.processors import alarm_processor as ap
from monasca_notification.processors import notification_processor as np
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/common/utils.py
# def get_statsd_client(dimensions=None):
# local_dims = dimensions.copy() if dimensions else {}
# local_dims.update(NOTIFICATION_DIMENSIONS)
# if CONF.statsd.enable:
# LOG.debug("Establishing connection with statsd on {0}:{1}"
# .format(CONF.statsd.host, CONF.statsd.port))
# client = monascastatsd.Client(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# else:
# LOG.warn("StatsD monitoring disabled. Overriding monascastatsd.Client to use it offline")
# client = OfflineClient(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# return client
#
# Path: monasca_notification/processors/alarm_processor.py
# CONF = cfg.CONF
# class AlarmProcessor(object):
# def __init__(self):
# def _parse_alarm(alarm_data):
# def _alarm_is_valid(self, alarm):
# def _build_notification(self, alarm):
# def to_notification(self, raw_alarm):
#
# Path: monasca_notification/processors/notification_processor.py
# class NotificationProcessor(object):
# def __init__(self):
# def _remaining_plugin_types(self):
# def insert_configured_plugins(self):
# def send(self, notifications):
. Output only the next line. | self._alarms = ap.AlarmProcessor() |
Continue the code snippet: <|code_start|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log = logging.getLogger(__name__)
CONF = cfg.CONF
class NotificationEngine(object):
def __init__(self):
self._statsd = get_statsd_client()
self._consumer = client_factory.get_kafka_consumer(
CONF.kafka.url,
CONF.kafka.group,
CONF.kafka.alarm_topic,
CONF.zookeeper.url,
CONF.zookeeper.notification_path,
CONF.kafka.legacy_kafka_client_enabled)
self._producer = client_factory.get_kafka_producer(
CONF.kafka.url,
CONF.kafka.legacy_kafka_client_enabled)
self._alarms = ap.AlarmProcessor()
<|code_end|>
. Use current file imports:
import time
from oslo_config import cfg
from oslo_log import log as logging
from monasca_common.kafka import client_factory
from monasca_notification.common.utils import get_statsd_client
from monasca_notification.processors import alarm_processor as ap
from monasca_notification.processors import notification_processor as np
and context (classes, functions, or code) from other files:
# Path: monasca_notification/common/utils.py
# def get_statsd_client(dimensions=None):
# local_dims = dimensions.copy() if dimensions else {}
# local_dims.update(NOTIFICATION_DIMENSIONS)
# if CONF.statsd.enable:
# LOG.debug("Establishing connection with statsd on {0}:{1}"
# .format(CONF.statsd.host, CONF.statsd.port))
# client = monascastatsd.Client(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# else:
# LOG.warn("StatsD monitoring disabled. Overriding monascastatsd.Client to use it offline")
# client = OfflineClient(name='monasca',
# host=CONF.statsd.host,
# port=CONF.statsd.port,
# dimensions=local_dims)
# return client
#
# Path: monasca_notification/processors/alarm_processor.py
# CONF = cfg.CONF
# class AlarmProcessor(object):
# def __init__(self):
# def _parse_alarm(alarm_data):
# def _alarm_is_valid(self, alarm):
# def _build_notification(self, alarm):
# def to_notification(self, raw_alarm):
#
# Path: monasca_notification/processors/notification_processor.py
# class NotificationProcessor(object):
# def __init__(self):
# def _remaining_plugin_types(self):
# def insert_configured_plugins(self):
# def send(self, notifications):
. Output only the next line. | self._notifier = np.NotificationProcessor() |
Using the snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOG = log.getLogger(__name__)
CONF = cfg.CONF
NOTIFICATION_DIMENSIONS = {'service': 'monitoring',
'component': 'monasca-notification'}
def get_db_repo():
repo_driver = CONF.database.repo_driver
LOG.debug('Enabling the %s RDB repository', repo_driver)
return repo_driver(CONF)
def construct_notification_object(db_repo, notification_json):
try:
<|code_end|>
, determine the next line of code. You have imports:
import monascastatsd
from keystoneauth1 import exceptions as kaexception
from keystoneauth1 import loading as kaloading
from oslo_config import cfg
from oslo_log import log
from monasca_notification.common.repositories import exceptions
from monasca_notification.notification import Notification
and context (class names, function names, or code) available:
# Path: monasca_notification/notification.py
# class Notification(object):
# """An abstract base class used to define the notification interface
# and common functions
# """
# __slots__ = (
# 'address',
# 'alarm_id',
# 'alarm_name',
# 'alarm_timestamp',
# 'id',
# 'message',
# 'name',
# 'notification_timestamp',
# 'state',
# 'severity',
# 'link',
# 'lifecycle_state',
# 'tenant_id',
# 'type',
# 'metrics',
# 'retry_count',
# 'raw_alarm',
# 'period',
# )
#
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# """Setup the notification object
# id - The notification id
# type - The notification type
# name - Name used in sending
# address - where to send the notification
# period - period of sending the notification
# retry_count - number of times we've tried to send
# alarm - info that caused the notification
# notifications that come after this one to remain uncommitted.
# Note that data may include six.text_type strings.
# """
# self.id = id
# self.address = address
# self.name = name
# self.type = type
# self.retry_count = retry_count
#
# self.raw_alarm = alarm
#
# self.alarm_id = alarm['alarmId']
# self.alarm_name = alarm['alarmName']
# # The event timestamp is in milliseconds
# self.alarm_timestamp = int(alarm['timestamp'] / 1000.0)
# self.message = alarm['stateChangeReason']
# self.state = alarm['newState']
# self.severity = alarm['severity']
# self.link = alarm['link']
# self.lifecycle_state = alarm['lifecycleState']
# self.tenant_id = alarm['tenantId']
# self.metrics = alarm['metrics']
#
# # to be updated on actual notification send time
# self.notification_timestamp = None
#
# self.period = period
#
# def __eq__(self, other):
# if not isinstance(other, Notification):
# return False
#
# for attrib in self.__slots__:
# if not getattr(self, attrib) == getattr(other, attrib):
# return False
#
# return True
#
# def __ne__(self, other):
# return not self.__eq__(other)
#
# def to_json(self):
# """Return json representation
# """
# notification_fields = [
# 'id',
# 'type',
# 'name',
# 'address',
# 'retry_count',
# 'raw_alarm',
# 'alarm_id',
# 'alarm_name',
# 'alarm_timestamp',
# 'message',
# 'notification_timestamp',
# 'state',
# 'severity',
# 'link',
# 'lifecycle_state',
# 'tenant_id',
# 'period',
# ]
# notification_data = {name: getattr(self, name)
# for name in notification_fields}
# return json.dumps(notification_data)
. Output only the next line. | notification = Notification(notification_json['id'], |
Continue the code snippet: <|code_start|> self.trap.put(data)
self.trap.put(headers)
r = requestsResponse(200)
return r
def _http_post_404(self, url, data, headers, **kwargs):
r = requestsResponse(404)
return r
def _http_post_exception(self, url, data, headers, **kwargs):
self.trap.put("timeout %s" % kwargs["timeout"])
raise requests.exceptions.Timeout
@mock.patch('monasca_notification.plugins.webhook_notifier.requests')
def notify(self, http_func, mock_requests):
mock_log = mock.MagicMock()
mock_log.warn = self.trap.put
mock_log.error = self.trap.put
mock_log.exception = self.trap.put
mock_requests.post = http_func
webhook = webhook_notifier.WebhookNotifier(mock_log)
metric = []
metric_data = {'dimensions': {'hostname': 'foo1', 'service': 'bar1'}}
metric.append(metric_data)
alarm_dict = alarm(metric)
<|code_end|>
. Use current file imports:
import requests
import simplejson as json
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import webhook_notifier
from tests import base
and context (classes, functions, or code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | notification = m_notification.Notification(0, 'webhook', 'webhook notification', |
Given the code snippet: <|code_start|># limitations under the License.
def alarm(metrics):
return {"tenantId": "0",
"alarmId": "0",
"alarmDefinitionId": 0,
"alarmName": "test Alarm",
"alarmDescription": "test Alarm description",
"oldState": "OK",
"newState": "ALARM",
"severity": "LOW",
"link": "some-link",
"lifecycleState": "OPEN",
"stateChangeReason": "I am alarming!",
"timestamp": 1429023453632,
"metrics": metrics}
class requestsResponse(object):
def __init__(self, status):
self.status_code = status
class TestWebhook(base.PluginTestCase):
def setUp(self):
<|code_end|>
, generate the next line using the imports in this file:
import requests
import simplejson as json
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import webhook_notifier
from tests import base
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | super(TestWebhook, self).setUp(webhook_notifier.register_opts) |
Next line prediction: <|code_start|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def alarm(metrics):
return {"tenantId": "0",
"alarmId": "0",
"alarmDefinitionId": 0,
"alarmName": "test Alarm",
"alarmDescription": "test Alarm description",
"oldState": "OK",
"newState": "ALARM",
"severity": "LOW",
"link": "some-link",
"lifecycleState": "OPEN",
"stateChangeReason": "I am alarming!",
"timestamp": 1429023453632,
"metrics": metrics}
class requestsResponse(object):
def __init__(self, status):
self.status_code = status
<|code_end|>
. Use current file imports:
(import requests
import simplejson as json
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import webhook_notifier
from tests import base)
and context including class names, function names, or small code snippets from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | class TestWebhook(base.PluginTestCase): |
Based on the snippet: <|code_start|># Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
_DEFAULT_URL = '127.0.0.1:2181'
_DEFAULT_NOTIFICATION_PATH = '/notification/alarms'
_DEFAULT_RETRY_PATH = '/notification/retry'
_DEFAULT_PERIODIC_PATH = {
'60': '/notification/60_seconds'
}
zookeeper_group = cfg.OptGroup('zookeeper',
title='Zookeeper Options',
help='Options under this group allow to '
'configure settings for zookeeper '
'handling.')
zookeeper_opts = [
<|code_end|>
, predict the immediate next line with the help of imports:
from oslo_config import cfg
from monasca_notification.conf import types
and context (classes, functions, sometimes code) from other files:
# Path: monasca_notification/conf/types.py
# LOG = log.getLogger(__name__)
# class Plugin(types.String):
# class PluginOpt(cfg.Opt):
# class HostAddressPortType(types.HostAddress):
# def __init__(self, ignore_missing=False, choices=None, plugin_map=None):
# def __call__(self, value):
# def _get_actual_target(self, value):
# def __init__(self, name, choices=None, plugin_map=None, **kwargs):
# def __init__(self, version=None):
# def __call__(self, value):
# def _validate_port(port=80):
# def _validate_addr(self, addr):
. Output only the next line. | cfg.ListOpt(name='url', item_type=types.HostAddressPortType(), |
Given the code snippet: <|code_start|> self.assertEqual(data['client_url'], '')
details = dict(data['details'])
self.assertEqual(details['alarm_id'], '0')
self.assertEqual(details['alarm_name'], 'test Alarm')
self.assertEqual(details['current'], 'ALARM')
self.assertEqual(details['message'], 'I am alarming!')
def pagerduty_http_error(self, log_msg, http_response):
self.assertRegex(log_msg, "Error with pagerduty request.")
self.assertRegex(log_msg, "key=<ABCDEF>")
self.assertRegex(log_msg, "response=%s" % http_response)
@mock.patch('monasca_notification.plugins.pagerduty_notifier.requests')
def notify(self, http_func, mock_requests):
mock_log = mock.MagicMock()
mock_log.warn = self.trap.put
mock_log.error = self.trap.put
mock_log.exception = self.trap.put
mock_requests.post = http_func
pagerduty = pagerduty_notifier.PagerdutyNotifier(mock_log)
metric = []
metric_data = {'dimensions': {'hostname': 'foo1', 'service': 'bar1'}}
metric.append(metric_data)
alarm_dict = alarm(metric)
<|code_end|>
, generate the next line using the imports in this file:
import json
import requests
import time
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import pagerduty_notifier
from tests import base
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | notification = m_notification.Notification(0, |
Using the snippet: <|code_start|># implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def alarm(metrics):
return {"tenantId": "0",
"alarmId": "0",
"alarmName": "test Alarm",
"oldState": "OK",
"newState": "ALARM",
"severity": "LOW",
"link": "some-link",
"lifecycleState": "OPEN",
"stateChangeReason": "I am alarming!",
"timestamp": time.time(),
"metrics": metrics}
class requestsResponse(object):
def __init__(self, status):
self.status_code = status
class TestPagerduty(base.PluginTestCase):
def setUp(self):
super(TestPagerduty, self).setUp(
<|code_end|>
, determine the next line of code. You have imports:
import json
import requests
import time
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import pagerduty_notifier
from tests import base
and context (class names, function names, or code) available:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | pagerduty_notifier.register_opts |
Predict the next line after this snippet: <|code_start|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def alarm(metrics):
return {"tenantId": "0",
"alarmId": "0",
"alarmName": "test Alarm",
"oldState": "OK",
"newState": "ALARM",
"severity": "LOW",
"link": "some-link",
"lifecycleState": "OPEN",
"stateChangeReason": "I am alarming!",
"timestamp": time.time(),
"metrics": metrics}
class requestsResponse(object):
def __init__(self, status):
self.status_code = status
<|code_end|>
using the current file's imports:
import json
import requests
import time
import queue
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import pagerduty_notifier
from tests import base
and any relevant context from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | class TestPagerduty(base.PluginTestCase): |
Given the code snippet: <|code_start|> self.assertEqual(len(notifications), 6)
self.assertItemsEqual(notifications,
['EMAIL', 'PAGERDUTY', 'WEBHOOK',
'HIPCHAT', 'SLACK', 'JIRA'])
@mock.patch('monasca_notification.plugins.email_notifier')
@mock.patch('monasca_notification.plugins.email_notifier.smtplib')
@mock.patch('monasca_notification.types.notifiers.log')
@mock.patch('monasca_notification.types.notifiers.importutils')
def test_send_notification_exception(self, mock_im, mock_log,
mock_smtp, mock_email):
self.conf_override(
group='notification_types',
enabled=[
'monasca_notification.plugins.email_notifier:EmailNotifier'
]
)
mock_log.warn = self.trap.append
mock_log.error = self.trap.append
mock_log.exception = self.trap.append
mock_email.EmailNotifier = self._sendExceptionStub
mock_im.import_class.return_value = mock_email.EmailNotifier
notifiers.init(self.statsd)
notifiers.load_plugins()
notifiers.config()
notifications = [
<|code_end|>
, generate the next line using the imports in this file:
import contextlib
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.plugins import pagerduty_notifier
from monasca_notification.plugins import webhook_notifier
from monasca_notification.types import notifiers
from tests import base
and context (functions, classes, or occasionally code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | m_notification.Notification(0, 'email', 'email notification', |
Given snippet: <|code_start|> def start(self, key):
key = key + "_start"
if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
def stop(self, key):
key = key + "_stop"
if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
class StatsdCounter(object):
def __init__(self):
self.counter = 0
def increment(self, val):
self.counter += val
class TestInterface(base.BaseTestCase):
def setUp(self):
super(TestInterface, self).setUp()
self.trap = []
self.statsd = Statsd()
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import contextlib
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.plugins import pagerduty_notifier
from monasca_notification.plugins import webhook_notifier
from monasca_notification.types import notifiers
from tests import base
and context:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
which might include code, classes, or functions. Output only the next line. | email_notifier.register_opts(base.config.CONF) |
Next line prediction: <|code_start|> if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
def stop(self, key):
key = key + "_stop"
if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
class StatsdCounter(object):
def __init__(self):
self.counter = 0
def increment(self, val):
self.counter += val
class TestInterface(base.BaseTestCase):
def setUp(self):
super(TestInterface, self).setUp()
self.trap = []
self.statsd = Statsd()
email_notifier.register_opts(base.config.CONF)
webhook_notifier.register_opts(base.config.CONF)
<|code_end|>
. Use current file imports:
(import contextlib
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.plugins import pagerduty_notifier
from monasca_notification.plugins import webhook_notifier
from monasca_notification.types import notifiers
from tests import base)
and context including class names, function names, or small code snippets from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | pagerduty_notifier.register_opts(base.config.CONF) |
Using the snippet: <|code_start|> key = key + "_start"
if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
def stop(self, key):
key = key + "_stop"
if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
class StatsdCounter(object):
def __init__(self):
self.counter = 0
def increment(self, val):
self.counter += val
class TestInterface(base.BaseTestCase):
def setUp(self):
super(TestInterface, self).setUp()
self.trap = []
self.statsd = Statsd()
email_notifier.register_opts(base.config.CONF)
<|code_end|>
, determine the next line of code. You have imports:
import contextlib
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.plugins import pagerduty_notifier
from monasca_notification.plugins import webhook_notifier
from monasca_notification.types import notifiers
from tests import base
and context (class names, function names, or code) available:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | webhook_notifier.register_opts(base.config.CONF) |
Based on the snippet: <|code_start|> self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
class StatsdCounter(object):
def __init__(self):
self.counter = 0
def increment(self, val):
self.counter += val
class TestInterface(base.BaseTestCase):
def setUp(self):
super(TestInterface, self).setUp()
self.trap = []
self.statsd = Statsd()
email_notifier.register_opts(base.config.CONF)
webhook_notifier.register_opts(base.config.CONF)
pagerduty_notifier.register_opts(base.config.CONF)
self.conf_override(group='email_notifier', server='my.smtp.server',
port=25, user=None, password=None,
timeout=60, from_addr='hpcs.mon@hp.com')
def tearDown(self):
super(TestInterface, self).tearDown()
<|code_end|>
, predict the immediate next line with the help of imports:
import contextlib
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.plugins import pagerduty_notifier
from monasca_notification.plugins import webhook_notifier
from monasca_notification.types import notifiers
from tests import base
and context (classes, functions, sometimes code) from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
. Output only the next line. | notifiers.possible_notifiers = [] |
Here is a snippet: <|code_start|>
@contextlib.contextmanager
def time(self, key):
self.start(key)
yield
self.stop(key)
def start(self, key):
key = key + "_start"
if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
def stop(self, key):
key = key + "_stop"
if key in self.timer_calls:
self.timer_calls[key] += 1
else:
self.timer_calls[key] = 1
class StatsdCounter(object):
def __init__(self):
self.counter = 0
def increment(self, val):
self.counter += val
<|code_end|>
. Write the next line using the current file imports:
import contextlib
import time
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import email_notifier
from monasca_notification.plugins import pagerduty_notifier
from monasca_notification.plugins import webhook_notifier
from monasca_notification.types import notifiers
from tests import base
and context from other files:
# Path: monasca_notification/notification.py
# class Notification(object):
# def __init__(self, id, type, name, address, period, retry_count, alarm):
# def __eq__(self, other):
# def __ne__(self, other):
# def to_json(self):
#
# Path: monasca_notification/plugins/email_notifier.py
# CONF = cfg.CONF
# EMAIL_SINGLE_HOST_BASE = u'''On host "{hostname}" for target "{target_host}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_MULTIPLE_HOST_BASE = u'''On host "{hostname}" {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions:
# {metric_dimensions}'''
# EMAIL_NO_HOST_BASE = u'''On multiple hosts {message}
#
# Alarm "{alarm_name}" transitioned to the {state} state at {timestamp} UTC
# Alarm_id: {alarm_id}
#
# Lifecycle state: {lifecycle_state}
# Link: {link}
# Link to Grafana: {grafana_url}
#
# With dimensions
# {metric_dimensions}'''
# class EmailNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config=None):
# def statsd_name(self):
# def send_notification(self, notification):
# def _sendmail(self, notification, msg):
# def _email_error(self, notification):
# def _smtp_connect(self):
# def _create_msg(self, hostname, notification, targethost=None):
# def _get_link_url(self, metric, timestamp_ms):
# def _format_dimensions(notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/pagerduty_notifier.py
# CONF = cfg.CONF
# VALID_HTTP_CODES = [200, 201, 204]
# class PagerdutyNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/plugins/webhook_notifier.py
# CONF = cfg.CONF
# class WebhookNotifier(abstract_notifier.AbstractNotifier):
# def __init__(self, log):
# def config(self, config_dict):
# def statsd_name(self):
# def send_notification(self, notification):
# def register_opts(conf):
# def list_opts():
#
# Path: monasca_notification/types/notifiers.py
# CONF = cfg.CONF
# def init(statsd_obj):
# def load_plugins():
# def enabled_notifications():
# def config():
# def send_notifications(notifications):
# def send_single_notification(notification):
#
# Path: tests/base.py
# class DisableStatsdFixture(fixtures.Fixture):
# class ConfigFixture(oo_cfg.Config):
# class BaseTestCase(oslotest_base.BaseTestCase):
# class PluginTestCase(BaseTestCase):
# def setUp(self):
# def __init__(self):
# def setUp(self):
# def _clean_config_loaded_flag():
# def setUp(self):
# def conf_override(**kw):
# def conf_default(**kw):
# def setUp(self, register_opts=None):
, which may include functions, classes, or code. Output only the next line. | class TestInterface(base.BaseTestCase): |
Predict the next line after this snippet: <|code_start|> removal_version='3.0.0'
)
def config(self, config_dict):
pass
@property
def statsd_name(self):
return 'sent_webhook_count'
def send_notification(self, notification):
"""Send the notification via webhook
Posts on the given url
"""
body = {'alarm_id': notification.alarm_id,
'alarm_definition_id': notification.raw_alarm['alarmDefinitionId'],
'alarm_name': notification.alarm_name,
'alarm_description': notification.raw_alarm['alarmDescription'],
'alarm_timestamp': notification.alarm_timestamp,
'state': notification.state,
'old_state': notification.raw_alarm['oldState'],
'message': notification.message,
'tenant_id': notification.tenant_id,
'metrics': notification.metrics}
headers = {'content-type': 'application/json'}
# Checks if keystone authentication is enabled and adds authentication
# token to the request headers
if CONF.keystone.auth_required:
<|code_end|>
using the current file's imports:
import requests
import simplejson as json
from debtcollector import removals
from oslo_config import cfg
from monasca_notification.common import utils
from monasca_notification.plugins import abstract_notifier
and any relevant context from other files:
# Path: monasca_notification/common/utils.py
# LOG = log.getLogger(__name__)
# CONF = cfg.CONF
# NOTIFICATION_DIMENSIONS = {'service': 'monitoring',
# 'component': 'monasca-notification'}
# def get_db_repo():
# def construct_notification_object(db_repo, notification_json):
# def grab_stored_notification_method(db_repo, notification_id):
# def get_statsd_client(dimensions=None):
# def _set_connection(self, connection, host, port):
# def __init__(self, host='localhost', port=8125, max_buffer_size=50):
# def connect(self, host, port):
# def _send_to_server(self, packet):
# def get_keystone_session():
# def get_auth_token():
# class OfflineClient(monascastatsd.Client):
# class OfflineConnection(monascastatsd.Connection):
#
# Path: monasca_notification/plugins/abstract_notifier.py
# class AbstractNotifier(object, metaclass=abc.ABCMeta):
# def __init__(self):
# def statsd_name(self):
# def config(self, config):
# def send_notification(self, notification):
. Output only the next line. | auth_token = utils.get_auth_token() |
Here is a snippet: <|code_start|># (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Notification Engine
This engine reads alarms from Kafka and then notifies the customer using their configured
notification method.
"""
LOG = log.getLogger(__name__)
<|code_end|>
. Write the next line using the current file imports:
import multiprocessing
import os
import signal
import sys
import time
import warnings
from oslo_log import log
from monasca_notification import config
from monasca_notification import notification_engine
from monasca_notification import periodic_engine
from monasca_notification import retry_engine
and context from other files:
# Path: monasca_notification/config.py
# LOG = log.getLogger(__name__)
# CONF = conf.CONF
# _CONF_LOADED = False
# _CONF_LOADED = True
# def parse_args(argv):
# def _get_config_files():
#
# Path: monasca_notification/notification_engine.py
# CONF = cfg.CONF
# class NotificationEngine(object):
# def __init__(self):
# def _add_periodic_notifications(self, notifications):
# def run(self):
#
# Path: monasca_notification/periodic_engine.py
# CONF = cfg.CONF
# class PeriodicEngine(object):
# def __init__(self, period):
# def _keep_sending(self, alarm_id, original_state, period):
# def run(self):
#
# Path: monasca_notification/retry_engine.py
# CONF = cfg.CONF
# class RetryEngine(object):
# def __init__(self):
# def run(self):
, which may include functions, classes, or code. Output only the next line. | CONF = config.CONF |
Based on the snippet: <|code_start|>
# Kill everything, that didn't already die
for child in multiprocessing.active_children():
LOG.debug('Killing pid %s' % child.pid)
try:
os.kill(child.pid, signal.SIGKILL)
except Exception: # nosec
# There is really nothing to do if the kill fails, so just go on.
# The # nosec keeps bandit from reporting this as a security issue
pass
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(signum)
def start_process(process_type, *args):
LOG.info("start process: {}".format(process_type))
p = process_type(*args)
p.run()
def main(argv=None):
warnings.simplefilter('always')
config.parse_args(argv=argv)
for proc in range(0, CONF.notification_processor.number):
processors.append(multiprocessing.Process(
target=start_process,
<|code_end|>
, predict the immediate next line with the help of imports:
import multiprocessing
import os
import signal
import sys
import time
import warnings
from oslo_log import log
from monasca_notification import config
from monasca_notification import notification_engine
from monasca_notification import periodic_engine
from monasca_notification import retry_engine
and context (classes, functions, sometimes code) from other files:
# Path: monasca_notification/config.py
# LOG = log.getLogger(__name__)
# CONF = conf.CONF
# _CONF_LOADED = False
# _CONF_LOADED = True
# def parse_args(argv):
# def _get_config_files():
#
# Path: monasca_notification/notification_engine.py
# CONF = cfg.CONF
# class NotificationEngine(object):
# def __init__(self):
# def _add_periodic_notifications(self, notifications):
# def run(self):
#
# Path: monasca_notification/periodic_engine.py
# CONF = cfg.CONF
# class PeriodicEngine(object):
# def __init__(self, period):
# def _keep_sending(self, alarm_id, original_state, period):
# def run(self):
#
# Path: monasca_notification/retry_engine.py
# CONF = cfg.CONF
# class RetryEngine(object):
# def __init__(self):
# def run(self):
. Output only the next line. | args=(notification_engine.NotificationEngine,)) |
Next line prediction: <|code_start|> if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(signum)
def start_process(process_type, *args):
LOG.info("start process: {}".format(process_type))
p = process_type(*args)
p.run()
def main(argv=None):
warnings.simplefilter('always')
config.parse_args(argv=argv)
for proc in range(0, CONF.notification_processor.number):
processors.append(multiprocessing.Process(
target=start_process,
args=(notification_engine.NotificationEngine,))
)
processors.append(multiprocessing.Process(
target=start_process,
args=(retry_engine.RetryEngine,))
)
for notification_period in CONF.kafka.periodic.keys():
processors.append(multiprocessing.Process(
target=start_process,
<|code_end|>
. Use current file imports:
(import multiprocessing
import os
import signal
import sys
import time
import warnings
from oslo_log import log
from monasca_notification import config
from monasca_notification import notification_engine
from monasca_notification import periodic_engine
from monasca_notification import retry_engine)
and context including class names, function names, or small code snippets from other files:
# Path: monasca_notification/config.py
# LOG = log.getLogger(__name__)
# CONF = conf.CONF
# _CONF_LOADED = False
# _CONF_LOADED = True
# def parse_args(argv):
# def _get_config_files():
#
# Path: monasca_notification/notification_engine.py
# CONF = cfg.CONF
# class NotificationEngine(object):
# def __init__(self):
# def _add_periodic_notifications(self, notifications):
# def run(self):
#
# Path: monasca_notification/periodic_engine.py
# CONF = cfg.CONF
# class PeriodicEngine(object):
# def __init__(self, period):
# def _keep_sending(self, alarm_id, original_state, period):
# def run(self):
#
# Path: monasca_notification/retry_engine.py
# CONF = cfg.CONF
# class RetryEngine(object):
# def __init__(self):
# def run(self):
. Output only the next line. | args=(periodic_engine.PeriodicEngine, int(notification_period))) |
Given snippet: <|code_start|> os.kill(child.pid, signal.SIGKILL)
except Exception: # nosec
# There is really nothing to do if the kill fails, so just go on.
# The # nosec keeps bandit from reporting this as a security issue
pass
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(signum)
def start_process(process_type, *args):
LOG.info("start process: {}".format(process_type))
p = process_type(*args)
p.run()
def main(argv=None):
warnings.simplefilter('always')
config.parse_args(argv=argv)
for proc in range(0, CONF.notification_processor.number):
processors.append(multiprocessing.Process(
target=start_process,
args=(notification_engine.NotificationEngine,))
)
processors.append(multiprocessing.Process(
target=start_process,
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import multiprocessing
import os
import signal
import sys
import time
import warnings
from oslo_log import log
from monasca_notification import config
from monasca_notification import notification_engine
from monasca_notification import periodic_engine
from monasca_notification import retry_engine
and context:
# Path: monasca_notification/config.py
# LOG = log.getLogger(__name__)
# CONF = conf.CONF
# _CONF_LOADED = False
# _CONF_LOADED = True
# def parse_args(argv):
# def _get_config_files():
#
# Path: monasca_notification/notification_engine.py
# CONF = cfg.CONF
# class NotificationEngine(object):
# def __init__(self):
# def _add_periodic_notifications(self, notifications):
# def run(self):
#
# Path: monasca_notification/periodic_engine.py
# CONF = cfg.CONF
# class PeriodicEngine(object):
# def __init__(self, period):
# def _keep_sending(self, alarm_id, original_state, period):
# def run(self):
#
# Path: monasca_notification/retry_engine.py
# CONF = cfg.CONF
# class RetryEngine(object):
# def __init__(self):
# def run(self):
which might include code, classes, or functions. Output only the next line. | args=(retry_engine.RetryEngine,)) |
Next line prediction: <|code_start|> self.logger = logger or setup_logger('cloudify_agent.api.factory')
@staticmethod
def _find_implementation(process_management):
"""
Locates the proper daemon implementation for the specific
process management system. For this to work, all implementations
need to be imported at this time.
see api/pm/__init__.py
:param process_management: The process management type.
:raise DaemonNotImplementedError: if no implementation could be found.
"""
daemons = []
def _find_daemons(daemon_superclass):
daemons.append(daemon_superclass)
subclasses = daemon_superclass.__subclasses__()
if subclasses:
for subclass in subclasses:
_find_daemons(subclass)
_find_daemons(Daemon)
for daemon in daemons:
if daemon.PROCESS_MANAGEMENT == process_management:
return daemon
<|code_end|>
. Use current file imports:
(import os
import json
from cloudify.utils import setup_logger
from cloudify_agent.api import exceptions
from cloudify_agent.api import utils
from cloudify_agent.api.pm.base import Daemon)
and context including class names, function names, or small code snippets from other files:
# Path: cloudify_agent/api/exceptions.py
# class DaemonException(BaseException):
# class DaemonStartupTimeout(DaemonException):
# class DaemonShutdownTimeout(DaemonException):
# class DaemonStillRunningException(DaemonException):
# class DaemonError(BaseException):
# class DaemonConfigurationError(DaemonError):
# class DaemonPropertiesError(DaemonError):
# class DaemonMissingMandatoryPropertyError(DaemonPropertiesError):
# class DaemonNotConfiguredError(DaemonError):
# class DaemonNotFoundError(DaemonError):
# class DaemonAlreadyExistsError(DaemonError):
# class DaemonNotImplementedError(DaemonError):
# def __init__(self, timeout, name):
# def __str__(self):
# def __init__(self, timeout, name):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, param):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, process_management):
# def __str__(self):
#
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
. Output only the next line. | raise exceptions.DaemonNotImplementedError(process_management) |
Using the snippet: <|code_start|> """
Factory class for manipulating various daemon instances.
"""
def __init__(self, username=None, storage=None, logger=None):
"""
:param username: the username the daemons are registered under.
if no username if passed, the currently logged user
will be used. this setting is used for computing
the storage directory, hence, if `storage` is passed,
the username will be ignored.
:param storage: the storage directory where daemons are stored.
if no directory is passed, it will computed using the
`utils.get_storage_directory` function.
:param logger: a logger to be used to log various subsequent
operations.
"""
######################################################################
# `username` and `storage` are arguments because the default home
# directory may change depending on how the daemon process is
# executed. For example if running in a Windows Service, the home
# directory changes. This means that we must the ability to specify
# exactly where the storage directory is, and not let the code
# auto-detect it in any scenario.
#####################################################################
self.username = username
<|code_end|>
, determine the next line of code. You have imports:
import os
import json
from cloudify.utils import setup_logger
from cloudify_agent.api import exceptions
from cloudify_agent.api import utils
from cloudify_agent.api.pm.base import Daemon
and context (class names, function names, or code) available:
# Path: cloudify_agent/api/exceptions.py
# class DaemonException(BaseException):
# class DaemonStartupTimeout(DaemonException):
# class DaemonShutdownTimeout(DaemonException):
# class DaemonStillRunningException(DaemonException):
# class DaemonError(BaseException):
# class DaemonConfigurationError(DaemonError):
# class DaemonPropertiesError(DaemonError):
# class DaemonMissingMandatoryPropertyError(DaemonPropertiesError):
# class DaemonNotConfiguredError(DaemonError):
# class DaemonNotFoundError(DaemonError):
# class DaemonAlreadyExistsError(DaemonError):
# class DaemonNotImplementedError(DaemonError):
# def __init__(self, timeout, name):
# def __str__(self):
# def __init__(self, timeout, name):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, param):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, name):
# def __str__(self):
# def __init__(self, process_management):
# def __str__(self):
#
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
. Output only the next line. | self.storage = storage or utils.internal.get_storage_directory( |
Based on the snippet: <|code_start|>
logger = setup_logger(
'cloudify-agent.tests.daemon',
logger_level=logging.DEBUG)
def _is_agent_alive(name, timeout=10):
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
import time
from cloudify.amqp_client import get_client
from cloudify.utils import setup_logger
from cloudify_agent.api import utils as agent_utils
and context (classes, functions, sometimes code) from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
. Output only the next line. | return agent_utils.is_agent_alive( |
Here is a snippet: <|code_start|> daemon.stop()
daemon.delete()
assert not os.path.exists(daemon.config_path)
pytest.raises(
CommandExecutionException,
nssm_daemon.runner.run,
'sc getdisplayname {0}'.format(daemon.name))
@pytest.mark.only_rabbit
@pytest.mark.only_nt
def test_status(nssm_daemon):
daemon = nssm_daemon.create_daemon()
daemon.create()
daemon.configure()
assert not daemon.status()
daemon.start()
# on windows, the daemon.start completes and returns fast enough
# that the service state is still SERVICE_START_PENDING
for retry in range(5):
if daemon.status():
break
time.sleep(1)
else:
pytest.fail('Daemon failed to start')
@pytest.mark.only_rabbit
@pytest.mark.only_nt
def test_create(nssm_daemon):
<|code_end|>
. Write the next line using the current file imports:
import os
import time
import pytest
from cloudify.exceptions import CommandExecutionException
from cloudify_agent.tests.api.pm import shared
and context from other files:
# Path: cloudify_agent/tests/api/pm/shared.py
# def patch_get_source():
# def patch_no_managed_plugin():
# def _test_create(daemon_fixture):
# def _test_create_overwrite(daemon_fixture):
# def _test_start(daemon_fixture):
# def _test_start_delete_amqp_queue(daemon_fixture):
# def _test_start_with_error(daemon_fixture):
# def _test_start_short_timeout(daemon_fixture):
# def _test_status(daemon_fixture):
# def _test_stop(daemon_fixture):
# def _test_stop_short_timeout(daemon_fixture):
# def _test_restart(daemon_fixture):
# def _test_two_daemons(daemon_fixture):
# def _test_conf_env_variables(daemon_fixture):
# def _get_env_var(var):
# def _check_env_var(var, expected_value):
# def _test_extra_env(daemon_fixture):
# def _test_execution_env(daemon_fixture):
# def _test_delete_before_stop(daemon_fixture):
# def _test_delete_before_stop_with_force(daemon_fixture):
# def _test_logging(daemon_fixture):
# def log_and_assert(_message, _deployment_id=None):
, which may include functions, classes, or code. Output only the next line. | shared._test_create(nssm_daemon) |
Predict the next line after this snippet: <|code_start|> assert os.path.exists(daemon.script_path)
assert os.path.exists(daemon.config_path)
@pytest.mark.only_rabbit
@pytest.mark.only_posix
def test_delete(detach_daemon):
daemon = detach_daemon.create_daemon()
daemon.create()
daemon.configure()
daemon.start()
daemon.stop()
daemon.delete()
assert not os.path.exists(daemon.script_path)
assert not os.path.exists(daemon.config_path)
assert not os.path.exists(daemon.pid_file)
@pytest.mark.only_rabbit
@pytest.mark.only_posix
def test_cron_respawn(detach_daemon):
daemon = detach_daemon.create_daemon(cron_respawn=True,
cron_respawn_delay=1)
daemon.create()
daemon.configure()
daemon.start()
crontab = detach_daemon.runner.run('crontab -l').std_out
assert daemon.cron_respawn_path in crontab
<|code_end|>
using the current file's imports:
import os
import pytest
from cloudify_agent.tests.daemon import (
wait_for_daemon_alive,
wait_for_daemon_dead,
)
from cloudify_agent.tests.api.pm import shared
and any relevant context from other files:
# Path: cloudify_agent/tests/daemon.py
# def wait_for_daemon_alive(name, timeout=10):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to start...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to start. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# def wait_for_daemon_dead(name, timeout=20):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if not _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to stop...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to stop. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# Path: cloudify_agent/tests/api/pm/shared.py
# def patch_get_source():
# def patch_no_managed_plugin():
# def _test_create(daemon_fixture):
# def _test_create_overwrite(daemon_fixture):
# def _test_start(daemon_fixture):
# def _test_start_delete_amqp_queue(daemon_fixture):
# def _test_start_with_error(daemon_fixture):
# def _test_start_short_timeout(daemon_fixture):
# def _test_status(daemon_fixture):
# def _test_stop(daemon_fixture):
# def _test_stop_short_timeout(daemon_fixture):
# def _test_restart(daemon_fixture):
# def _test_two_daemons(daemon_fixture):
# def _test_conf_env_variables(daemon_fixture):
# def _get_env_var(var):
# def _check_env_var(var, expected_value):
# def _test_extra_env(daemon_fixture):
# def _test_execution_env(daemon_fixture):
# def _test_delete_before_stop(daemon_fixture):
# def _test_delete_before_stop_with_force(daemon_fixture):
# def _test_logging(daemon_fixture):
# def log_and_assert(_message, _deployment_id=None):
. Output only the next line. | wait_for_daemon_alive(daemon.queue) |
Continue the code snippet: <|code_start|>@pytest.mark.only_rabbit
@pytest.mark.only_posix
def test_delete(detach_daemon):
daemon = detach_daemon.create_daemon()
daemon.create()
daemon.configure()
daemon.start()
daemon.stop()
daemon.delete()
assert not os.path.exists(daemon.script_path)
assert not os.path.exists(daemon.config_path)
assert not os.path.exists(daemon.pid_file)
@pytest.mark.only_rabbit
@pytest.mark.only_posix
def test_cron_respawn(detach_daemon):
daemon = detach_daemon.create_daemon(cron_respawn=True,
cron_respawn_delay=1)
daemon.create()
daemon.configure()
daemon.start()
crontab = detach_daemon.runner.run('crontab -l').std_out
assert daemon.cron_respawn_path in crontab
wait_for_daemon_alive(daemon.queue)
# lets kill the process
detach_daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'")
<|code_end|>
. Use current file imports:
import os
import pytest
from cloudify_agent.tests.daemon import (
wait_for_daemon_alive,
wait_for_daemon_dead,
)
from cloudify_agent.tests.api.pm import shared
and context (classes, functions, or code) from other files:
# Path: cloudify_agent/tests/daemon.py
# def wait_for_daemon_alive(name, timeout=10):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to start...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to start. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# def wait_for_daemon_dead(name, timeout=20):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if not _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to stop...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to stop. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# Path: cloudify_agent/tests/api/pm/shared.py
# def patch_get_source():
# def patch_no_managed_plugin():
# def _test_create(daemon_fixture):
# def _test_create_overwrite(daemon_fixture):
# def _test_start(daemon_fixture):
# def _test_start_delete_amqp_queue(daemon_fixture):
# def _test_start_with_error(daemon_fixture):
# def _test_start_short_timeout(daemon_fixture):
# def _test_status(daemon_fixture):
# def _test_stop(daemon_fixture):
# def _test_stop_short_timeout(daemon_fixture):
# def _test_restart(daemon_fixture):
# def _test_two_daemons(daemon_fixture):
# def _test_conf_env_variables(daemon_fixture):
# def _get_env_var(var):
# def _check_env_var(var, expected_value):
# def _test_extra_env(daemon_fixture):
# def _test_execution_env(daemon_fixture):
# def _test_delete_before_stop(daemon_fixture):
# def _test_delete_before_stop_with_force(daemon_fixture):
# def _test_logging(daemon_fixture):
# def log_and_assert(_message, _deployment_id=None):
. Output only the next line. | wait_for_daemon_dead(daemon.queue) |
Given snippet: <|code_start|>def test_cron_respawn(detach_daemon):
daemon = detach_daemon.create_daemon(cron_respawn=True,
cron_respawn_delay=1)
daemon.create()
daemon.configure()
daemon.start()
crontab = detach_daemon.runner.run('crontab -l').std_out
assert daemon.cron_respawn_path in crontab
wait_for_daemon_alive(daemon.queue)
# lets kill the process
detach_daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'")
wait_for_daemon_dead(daemon.queue)
# check it was respawned
# mocking cron - respawn it using the cron respawn script
detach_daemon.runner.run(daemon.cron_respawn_path)
wait_for_daemon_alive(daemon.queue)
daemon.stop()
wait_for_daemon_dead(daemon.queue)
crontab = detach_daemon.runner.run('crontab -l').std_out
assert daemon.cron_respawn_path not in crontab
@pytest.mark.only_posix
def test_create(detach_daemon):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import pytest
from cloudify_agent.tests.daemon import (
wait_for_daemon_alive,
wait_for_daemon_dead,
)
from cloudify_agent.tests.api.pm import shared
and context:
# Path: cloudify_agent/tests/daemon.py
# def wait_for_daemon_alive(name, timeout=10):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to start...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to start. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# def wait_for_daemon_dead(name, timeout=20):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if not _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to stop...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to stop. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# Path: cloudify_agent/tests/api/pm/shared.py
# def patch_get_source():
# def patch_no_managed_plugin():
# def _test_create(daemon_fixture):
# def _test_create_overwrite(daemon_fixture):
# def _test_start(daemon_fixture):
# def _test_start_delete_amqp_queue(daemon_fixture):
# def _test_start_with_error(daemon_fixture):
# def _test_start_short_timeout(daemon_fixture):
# def _test_status(daemon_fixture):
# def _test_stop(daemon_fixture):
# def _test_stop_short_timeout(daemon_fixture):
# def _test_restart(daemon_fixture):
# def _test_two_daemons(daemon_fixture):
# def _test_conf_env_variables(daemon_fixture):
# def _get_env_var(var):
# def _check_env_var(var, expected_value):
# def _test_extra_env(daemon_fixture):
# def _test_execution_env(daemon_fixture):
# def _test_delete_before_stop(daemon_fixture):
# def _test_delete_before_stop_with_force(daemon_fixture):
# def _test_logging(daemon_fixture):
# def log_and_assert(_message, _deployment_id=None):
which might include code, classes, or functions. Output only the next line. | shared._test_create(detach_daemon) |
Given snippet: <|code_start|> else:
return winrm.Session(
target=winrm_url,
auth=(self.session_config['user'],
self.session_config['password']),
transport=self.session_config['transport'])
def run(self, command, raise_on_failure=True, execution_env=None,
powershell=False):
"""
:param command: The command to execute.
:param raise_on_failure: by default, this will raise an exception
if the command fails. You can use
raise_on_failure=False to just log the
error and not raise an exception.
:param execution_env: environment variables to be applied before
running the command
:param powershell: Determines where to run command as a powershell
script.
:return a response object with information about the execution
:rtype WinRMCommandExecutionResponse.
"""
if execution_env is None:
execution_env = {}
remote_env_file = None
if execution_env:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import winrm
import ntpath
from cloudify.exceptions import CommandExecutionException
from cloudify.exceptions import CommandExecutionError
from cloudify.utils import CommandExecutionResponse
from cloudify.utils import setup_logger
from cloudify_agent.installer import utils
from cloudify_agent.api import utils as api_utils
from cloudify_rest_client.utils import is_kerberos_env
from functools import reduce
and context:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
which might include code, classes, or functions. Output only the next line. | env_file = utils.env_to_file(execution_env, posix=False) |
Predict the next line for this snippet: <|code_start|> return self.run(
'@powershell -Command "[System.IO.Path]::GetTempPath()"'
).std_out.strip()
def new_dir(self, path):
"""
Creates the path as a new directory.
:param path: The directory path to create.
:return a response object with information about the execution
:rtype WinRMCommandExecutionResponse.
"""
return self.run('mkdir \"{0}\" -Force'.format(path), powershell=True)
def machine_distribution(self):
"""
Retrieves the distribution information of the host.
:return: dictionary of the platform distribution as returned from
'platform.dist()'
"""
response = self.python(
imports_line='import platform, json',
command='json.dumps(platform.dist())'
)
<|code_end|>
with the help of current file imports:
import winrm
import ntpath
from cloudify.exceptions import CommandExecutionException
from cloudify.exceptions import CommandExecutionError
from cloudify.utils import CommandExecutionResponse
from cloudify.utils import setup_logger
from cloudify_agent.installer import utils
from cloudify_agent.api import utils as api_utils
from cloudify_rest_client.utils import is_kerberos_env
from functools import reduce
and context from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
, which may contain function names, class names, or code. Output only the next line. | return api_utils.json_loads(response) |
Given the following code snippet before the placeholder: <|code_start|> """
return os.environ[cls.CLOUDIFY_DAEMON_USER_KEY]
@classmethod
def get_storage_directory(cls, username=None):
"""
Retrieve path to the directory where all daemon
registered under a specific username will be stored.
If no `username` is provided, username under which current daemon
was installed will be used.
:param username: the user
"""
if cls.CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY in os.environ:
return cls.get_daemon_storage_dir()
if os.name == 'nt':
return appdirs.site_data_dir('cloudify-agent', 'Cloudify')
if username is None and cls.CLOUDIFY_DAEMON_USER_KEY in os.environ:
username = cls.get_daemon_user()
return os.path.join(get_home_dir(username), '.cfy-agent')
@staticmethod
def generate_agent_name():
"""Generates a unique name with a pre-defined prefix
"""
<|code_end|>
, predict the next line using imports from the current file:
import uuid
import json
import copy
import tempfile
import os
import errno
import getpass
import pkgutil
import appdirs
import pkg_resources
import cloudify_agent
import pwd
from jinja2 import Template
from cloudify._compat import urlquote, PY2
from cloudify.cluster import CloudifyClusterClient
from cloudify.workflows import tasks as workflows_tasks
from cloudify.utils import setup_logger, get_exec_tempdir, ipv6_url_compat
from cloudify.constants import (SECURED_PROTOCOL,
BROKER_PORT_SSL,
BROKER_PORT_NO_SSL)
from cloudify.utils import is_agent_alive # noqa
from cloudify_agent.api import defaults
and context including class names, function names, and sometimes code from other files:
# Path: cloudify_agent/api/defaults.py
# START_TIMEOUT = 60
# START_INTERVAL = 1
# STOP_TIMEOUT = 60
# STOP_INTERVAL = 1
# INTERNAL_REST_PORT = 53333
# MIN_WORKERS = 0
# MAX_WORKERS = 5
# BROKER_URL = 'amqp://{username}:{password}@{host}:{port}/{vhost}'
# DELETE_AMQP_QUEUE_BEFORE_START = False
# DAEMON_FORCE_DELETE = False
# CLOUDIFY_AGENT_PREFIX = 'cfy-agent'
# LOG_LEVEL = 'info'
# LOG_FILE_SIZE = 5 * 1024 * 1024
# LOG_BACKUPS = 7
# SSL_CERTS_TARGET_DIR = 'cloudify/ssl'
# AGENT_SSL_CERT_FILENAME = 'cloudify_internal_cert.pem'
# DEFAULT_TENANT_NAME = 'default_tenant'
. Output only the next line. | return '{0}-{1}'.format(defaults.CLOUDIFY_AGENT_PREFIX, uuid.uuid4()) |
Given the code snippet: <|code_start|> '[%(name)s] %(message)s',
logger_level=logging.INFO)
def get_logger():
return _logger
def show_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
ver = get_agent_version()
logger = get_logger()
logger.info('Cloudify Agent {0}'.format(ver))
ctx.exit()
@cfy.group()
@click.option('--debug', default=False, is_flag=True)
@click.option('--version', is_flag=True, callback=show_version,
expose_value=False, is_eager=True, help='Show version and exit')
def main(debug):
if debug:
# configure global logger level
global _logger
_logger.setLevel(logging.DEBUG)
# configure api loggers so that there logging level does not rely
# on imports from the shell modules
<|code_end|>
, generate the next line using the imports in this file:
import logging
import click
from cloudify.utils import setup_logger
from cloudify_agent.api.utils import (
get_agent_version,
logger as api_utils_logger
)
from cloudify_agent.shell.commands import daemons
from cloudify_agent.shell.commands import configure
from cloudify_agent.shell.commands import cfy
and context (functions, classes, or occasionally code) from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/shell/commands/daemons.py
# class _ExpandUserPath(click.Path):
# def convert(self, value, param, ctx):
# def create(**params):
# def configure(name, user=None):
# def start(name, interval, timeout, no_delete_amqp_queue, user=None):
# def stop(name, interval, timeout):
# def restart(name):
# def delete(name):
# def inspect(name):
# def ls():
# def status(name):
# def _load_daemon(name, user=None):
# def _save_daemon(daemon):
# def _parse_custom_options(options):
#
# Path: cloudify_agent/shell/commands/configure.py
# @cfy.command()
# @click.option('--disable-requiretty',
# help='Disables the requiretty directive in the sudoers file.',
# is_flag=True)
# @click.option('--relocated-env',
# help='Indication that this virtualenv was relocated. '
# 'If this option is passed, an auto-correction '
# 'to the virtualenv shabang entries '
# 'will be performed',
# is_flag=True)
# @click.option('--no-sudo',
# help='Indication whether sudo should be used when applying '
# ' disable-requiretty part',
# is_flag=True)
# @handle_failures
# def configure(disable_requiretty, relocated_env, no_sudo):
#
# """
# Configures global agent properties.
# """
#
# if disable_requiretty:
# click.echo('Disabling requiretty directive in sudoers file')
# _disable_requiretty(no_sudo)
# click.echo('Successfully disabled requiretty for cfy-agent')
#
# Path: cloudify_agent/shell/commands/cfy.py
# CLICK_CONTEXT_SETTINGS = dict(
# help_option_names=['-h', '--help'],
# token_normalize_func=lambda param: param.lower())
# class CommandMixin(object):
# class AgentCommand(CommandMixin, click.Command):
# class AgentGroup(CommandMixin, click.Group):
# def main(
# self,
# args=None,
# prog_name=None,
# complete_var=None,
# standalone_mode=True,
# **extra
# ):
# def set_locale_env():
# def group(name=None):
# def command(*args, **kwargs):
. Output only the next line. | api_utils_logger.setLevel(logging.DEBUG) |
Next line prediction: <|code_start|>
@cfy.group()
@click.option('--debug', default=False, is_flag=True)
@click.option('--version', is_flag=True, callback=show_version,
expose_value=False, is_eager=True, help='Show version and exit')
def main(debug):
if debug:
# configure global logger level
global _logger
_logger.setLevel(logging.DEBUG)
# configure api loggers so that there logging level does not rely
# on imports from the shell modules
api_utils_logger.setLevel(logging.DEBUG)
@cfy.group(name='daemons')
def daemon_sub_command():
pass
@cfy.group(name='plugins')
def plugins_sub_command():
pass
main.add_command(configure.configure)
<|code_end|>
. Use current file imports:
(import logging
import click
from cloudify.utils import setup_logger
from cloudify_agent.api.utils import (
get_agent_version,
logger as api_utils_logger
)
from cloudify_agent.shell.commands import daemons
from cloudify_agent.shell.commands import configure
from cloudify_agent.shell.commands import cfy)
and context including class names, function names, or small code snippets from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/shell/commands/daemons.py
# class _ExpandUserPath(click.Path):
# def convert(self, value, param, ctx):
# def create(**params):
# def configure(name, user=None):
# def start(name, interval, timeout, no_delete_amqp_queue, user=None):
# def stop(name, interval, timeout):
# def restart(name):
# def delete(name):
# def inspect(name):
# def ls():
# def status(name):
# def _load_daemon(name, user=None):
# def _save_daemon(daemon):
# def _parse_custom_options(options):
#
# Path: cloudify_agent/shell/commands/configure.py
# @cfy.command()
# @click.option('--disable-requiretty',
# help='Disables the requiretty directive in the sudoers file.',
# is_flag=True)
# @click.option('--relocated-env',
# help='Indication that this virtualenv was relocated. '
# 'If this option is passed, an auto-correction '
# 'to the virtualenv shabang entries '
# 'will be performed',
# is_flag=True)
# @click.option('--no-sudo',
# help='Indication whether sudo should be used when applying '
# ' disable-requiretty part',
# is_flag=True)
# @handle_failures
# def configure(disable_requiretty, relocated_env, no_sudo):
#
# """
# Configures global agent properties.
# """
#
# if disable_requiretty:
# click.echo('Disabling requiretty directive in sudoers file')
# _disable_requiretty(no_sudo)
# click.echo('Successfully disabled requiretty for cfy-agent')
#
# Path: cloudify_agent/shell/commands/cfy.py
# CLICK_CONTEXT_SETTINGS = dict(
# help_option_names=['-h', '--help'],
# token_normalize_func=lambda param: param.lower())
# class CommandMixin(object):
# class AgentCommand(CommandMixin, click.Command):
# class AgentGroup(CommandMixin, click.Group):
# def main(
# self,
# args=None,
# prog_name=None,
# complete_var=None,
# standalone_mode=True,
# **extra
# ):
# def set_locale_env():
# def group(name=None):
# def command(*args, **kwargs):
. Output only the next line. | daemon_sub_command.add_command(daemons.create) |
Next line prediction: <|code_start|> logger.info('Cloudify Agent {0}'.format(ver))
ctx.exit()
@cfy.group()
@click.option('--debug', default=False, is_flag=True)
@click.option('--version', is_flag=True, callback=show_version,
expose_value=False, is_eager=True, help='Show version and exit')
def main(debug):
if debug:
# configure global logger level
global _logger
_logger.setLevel(logging.DEBUG)
# configure api loggers so that there logging level does not rely
# on imports from the shell modules
api_utils_logger.setLevel(logging.DEBUG)
@cfy.group(name='daemons')
def daemon_sub_command():
pass
@cfy.group(name='plugins')
def plugins_sub_command():
pass
<|code_end|>
. Use current file imports:
(import logging
import click
from cloudify.utils import setup_logger
from cloudify_agent.api.utils import (
get_agent_version,
logger as api_utils_logger
)
from cloudify_agent.shell.commands import daemons
from cloudify_agent.shell.commands import configure
from cloudify_agent.shell.commands import cfy)
and context including class names, function names, or small code snippets from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/shell/commands/daemons.py
# class _ExpandUserPath(click.Path):
# def convert(self, value, param, ctx):
# def create(**params):
# def configure(name, user=None):
# def start(name, interval, timeout, no_delete_amqp_queue, user=None):
# def stop(name, interval, timeout):
# def restart(name):
# def delete(name):
# def inspect(name):
# def ls():
# def status(name):
# def _load_daemon(name, user=None):
# def _save_daemon(daemon):
# def _parse_custom_options(options):
#
# Path: cloudify_agent/shell/commands/configure.py
# @cfy.command()
# @click.option('--disable-requiretty',
# help='Disables the requiretty directive in the sudoers file.',
# is_flag=True)
# @click.option('--relocated-env',
# help='Indication that this virtualenv was relocated. '
# 'If this option is passed, an auto-correction '
# 'to the virtualenv shabang entries '
# 'will be performed',
# is_flag=True)
# @click.option('--no-sudo',
# help='Indication whether sudo should be used when applying '
# ' disable-requiretty part',
# is_flag=True)
# @handle_failures
# def configure(disable_requiretty, relocated_env, no_sudo):
#
# """
# Configures global agent properties.
# """
#
# if disable_requiretty:
# click.echo('Disabling requiretty directive in sudoers file')
# _disable_requiretty(no_sudo)
# click.echo('Successfully disabled requiretty for cfy-agent')
#
# Path: cloudify_agent/shell/commands/cfy.py
# CLICK_CONTEXT_SETTINGS = dict(
# help_option_names=['-h', '--help'],
# token_normalize_func=lambda param: param.lower())
# class CommandMixin(object):
# class AgentCommand(CommandMixin, click.Command):
# class AgentGroup(CommandMixin, click.Group):
# def main(
# self,
# args=None,
# prog_name=None,
# complete_var=None,
# standalone_mode=True,
# **extra
# ):
# def set_locale_env():
# def group(name=None):
# def command(*args, **kwargs):
. Output only the next line. | main.add_command(configure.configure) |
Given snippet: <|code_start|># * See the License for the specific language governing permissions and
# * limitations under the License.
# adding all of our commands.
_logger = setup_logger('cloudify_agent.shell.main',
logger_format='%(asctime)s [%(levelname)-5s] '
'[%(name)s] %(message)s',
logger_level=logging.INFO)
def get_logger():
return _logger
def show_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
ver = get_agent_version()
logger = get_logger()
logger.info('Cloudify Agent {0}'.format(ver))
ctx.exit()
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import logging
import click
from cloudify.utils import setup_logger
from cloudify_agent.api.utils import (
get_agent_version,
logger as api_utils_logger
)
from cloudify_agent.shell.commands import daemons
from cloudify_agent.shell.commands import configure
from cloudify_agent.shell.commands import cfy
and context:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/shell/commands/daemons.py
# class _ExpandUserPath(click.Path):
# def convert(self, value, param, ctx):
# def create(**params):
# def configure(name, user=None):
# def start(name, interval, timeout, no_delete_amqp_queue, user=None):
# def stop(name, interval, timeout):
# def restart(name):
# def delete(name):
# def inspect(name):
# def ls():
# def status(name):
# def _load_daemon(name, user=None):
# def _save_daemon(daemon):
# def _parse_custom_options(options):
#
# Path: cloudify_agent/shell/commands/configure.py
# @cfy.command()
# @click.option('--disable-requiretty',
# help='Disables the requiretty directive in the sudoers file.',
# is_flag=True)
# @click.option('--relocated-env',
# help='Indication that this virtualenv was relocated. '
# 'If this option is passed, an auto-correction '
# 'to the virtualenv shabang entries '
# 'will be performed',
# is_flag=True)
# @click.option('--no-sudo',
# help='Indication whether sudo should be used when applying '
# ' disable-requiretty part',
# is_flag=True)
# @handle_failures
# def configure(disable_requiretty, relocated_env, no_sudo):
#
# """
# Configures global agent properties.
# """
#
# if disable_requiretty:
# click.echo('Disabling requiretty directive in sudoers file')
# _disable_requiretty(no_sudo)
# click.echo('Successfully disabled requiretty for cfy-agent')
#
# Path: cloudify_agent/shell/commands/cfy.py
# CLICK_CONTEXT_SETTINGS = dict(
# help_option_names=['-h', '--help'],
# token_normalize_func=lambda param: param.lower())
# class CommandMixin(object):
# class AgentCommand(CommandMixin, click.Command):
# class AgentGroup(CommandMixin, click.Group):
# def main(
# self,
# args=None,
# prog_name=None,
# complete_var=None,
# standalone_mode=True,
# **extra
# ):
# def set_locale_env():
# def group(name=None):
# def command(*args, **kwargs):
which might include code, classes, or functions. Output only the next line. | @cfy.group() |
Predict the next line after this snippet: <|code_start|> password=None,
validate_connection=True,
fabric_env=None,
tmpdir=None):
# logger
self.logger = logger or setup_logger('fabric_runner')
# silence paramiko
logging.getLogger('paramiko.transport').setLevel(logging.WARNING)
# connection details
self.port = port or DEFAULT_REMOTE_EXECUTION_PORT
self.password = password
self.user = user
self.host = host
self.key = key
self.tmpdir = tmpdir
# fabric environment
self.env = self._set_env()
self.env.update(fabric_env or {})
self._connection = None
self._validate_ssh_config()
if validate_connection:
self.validate_connection()
def _validate_ssh_config(self):
if not self.host:
<|code_end|>
using the current file's imports:
import os
import sys
import logging
from fabric import Connection
from paramiko import RSAKey, ECDSAKey, Ed25519Key, SSHException
from cloudify._compat import reraise, StringIO
from cloudify.utils import CommandExecutionResponse
from cloudify.utils import setup_logger
from cloudify.exceptions import CommandExecutionException
from cloudify.exceptions import CommandExecutionError
from cloudify_agent.installer import exceptions
from cloudify_agent.api import utils as api_utils
from cloudify_rest_client.utils import is_kerberos_env
and any relevant context from other files:
# Path: cloudify_agent/installer/exceptions.py
# class AgentInstallerConfigurationError(exceptions.NonRecoverableError):
# def __init__(self, msg):
#
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
. Output only the next line. | raise exceptions.AgentInstallerConfigurationError('Missing host') |
Next line prediction: <|code_start|> stdout = self.run('{0} -c "import sys; {1}; '
'sys.stdout.write(\'{2}{3}{4}\\n\''
'.format({5}))"'
.format(python_bin,
imports_line,
start,
'{0}',
end,
command), **attributes).std_out
result = stdout[stdout.find(start) - 1 + len(end):
stdout.find(end)]
return result
def machine_distribution(self, **attributes):
"""
Retrieves the distribution information of the host.
:param attributes: custom attributes passed directly to
fabric's run command
:return: dictionary of the platform distribution as returned from
'platform.dist()'
"""
response = self.python(
imports_line='import platform, json',
command='json.dumps(platform.dist())', **attributes
)
<|code_end|>
. Use current file imports:
(import os
import sys
import logging
from fabric import Connection
from paramiko import RSAKey, ECDSAKey, Ed25519Key, SSHException
from cloudify._compat import reraise, StringIO
from cloudify.utils import CommandExecutionResponse
from cloudify.utils import setup_logger
from cloudify.exceptions import CommandExecutionException
from cloudify.exceptions import CommandExecutionError
from cloudify_agent.installer import exceptions
from cloudify_agent.api import utils as api_utils
from cloudify_rest_client.utils import is_kerberos_env)
and context including class names, function names, or small code snippets from other files:
# Path: cloudify_agent/installer/exceptions.py
# class AgentInstallerConfigurationError(exceptions.NonRecoverableError):
# def __init__(self, msg):
#
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
. Output only the next line. | return api_utils.json_loads(response) |
Predict the next line for this snippet: <|code_start|>@pytest.mark.only_rabbit
@pytest.mark.only_ci
@pytest.mark.only_posix
def test_configure_start_on_boot(initd_daemon):
daemon = initd_daemon.create_daemon(start_on_boot=True)
daemon.create()
daemon.configure()
@pytest.mark.only_rabbit
@pytest.mark.only_ci
@pytest.mark.only_posix
def test_cron_respawn(initd_daemon):
daemon = initd_daemon.create_daemon(cron_respawn=True,
cron_respawn_delay=1)
daemon.create()
daemon.configure()
daemon.start()
# initd daemon's cron is for root, so that respawning the daemon can
# use the init system which requires root
crontab = initd_daemon.runner.run('sudo crontab -lu root').std_out
assert daemon.cron_respawn_path in crontab
initd_daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'")
wait_for_daemon_dead(daemon.queue)
# check it was respawned
# mocking cron - respawn it using the cron respawn script
initd_daemon.runner.run(daemon.cron_respawn_path)
<|code_end|>
with the help of current file imports:
import os
import pytest
from cloudify_agent.tests.daemon import (
wait_for_daemon_alive,
wait_for_daemon_dead,
)
from cloudify_agent.tests.api.pm import shared
and context from other files:
# Path: cloudify_agent/tests/daemon.py
# def wait_for_daemon_alive(name, timeout=10):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to start...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to start. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# def wait_for_daemon_dead(name, timeout=20):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if not _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to stop...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to stop. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# Path: cloudify_agent/tests/api/pm/shared.py
# def patch_get_source():
# def patch_no_managed_plugin():
# def _test_create(daemon_fixture):
# def _test_create_overwrite(daemon_fixture):
# def _test_start(daemon_fixture):
# def _test_start_delete_amqp_queue(daemon_fixture):
# def _test_start_with_error(daemon_fixture):
# def _test_start_short_timeout(daemon_fixture):
# def _test_status(daemon_fixture):
# def _test_stop(daemon_fixture):
# def _test_stop_short_timeout(daemon_fixture):
# def _test_restart(daemon_fixture):
# def _test_two_daemons(daemon_fixture):
# def _test_conf_env_variables(daemon_fixture):
# def _get_env_var(var):
# def _check_env_var(var, expected_value):
# def _test_extra_env(daemon_fixture):
# def _test_execution_env(daemon_fixture):
# def _test_delete_before_stop(daemon_fixture):
# def _test_delete_before_stop_with_force(daemon_fixture):
# def _test_logging(daemon_fixture):
# def log_and_assert(_message, _deployment_id=None):
, which may contain function names, class names, or code. Output only the next line. | wait_for_daemon_alive(daemon.queue) |
Predict the next line for this snippet: <|code_start|> daemon.delete()
assert not os.path.exists(daemon.script_path)
assert not os.path.exists(daemon.config_path)
@pytest.mark.only_rabbit
@pytest.mark.only_ci
@pytest.mark.only_posix
def test_configure_start_on_boot(initd_daemon):
daemon = initd_daemon.create_daemon(start_on_boot=True)
daemon.create()
daemon.configure()
@pytest.mark.only_rabbit
@pytest.mark.only_ci
@pytest.mark.only_posix
def test_cron_respawn(initd_daemon):
daemon = initd_daemon.create_daemon(cron_respawn=True,
cron_respawn_delay=1)
daemon.create()
daemon.configure()
daemon.start()
# initd daemon's cron is for root, so that respawning the daemon can
# use the init system which requires root
crontab = initd_daemon.runner.run('sudo crontab -lu root').std_out
assert daemon.cron_respawn_path in crontab
initd_daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'")
<|code_end|>
with the help of current file imports:
import os
import pytest
from cloudify_agent.tests.daemon import (
wait_for_daemon_alive,
wait_for_daemon_dead,
)
from cloudify_agent.tests.api.pm import shared
and context from other files:
# Path: cloudify_agent/tests/daemon.py
# def wait_for_daemon_alive(name, timeout=10):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to start...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to start. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# def wait_for_daemon_dead(name, timeout=20):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if not _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to stop...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to stop. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# Path: cloudify_agent/tests/api/pm/shared.py
# def patch_get_source():
# def patch_no_managed_plugin():
# def _test_create(daemon_fixture):
# def _test_create_overwrite(daemon_fixture):
# def _test_start(daemon_fixture):
# def _test_start_delete_amqp_queue(daemon_fixture):
# def _test_start_with_error(daemon_fixture):
# def _test_start_short_timeout(daemon_fixture):
# def _test_status(daemon_fixture):
# def _test_stop(daemon_fixture):
# def _test_stop_short_timeout(daemon_fixture):
# def _test_restart(daemon_fixture):
# def _test_two_daemons(daemon_fixture):
# def _test_conf_env_variables(daemon_fixture):
# def _get_env_var(var):
# def _check_env_var(var, expected_value):
# def _test_extra_env(daemon_fixture):
# def _test_execution_env(daemon_fixture):
# def _test_delete_before_stop(daemon_fixture):
# def _test_delete_before_stop_with_force(daemon_fixture):
# def _test_logging(daemon_fixture):
# def log_and_assert(_message, _deployment_id=None):
, which may contain function names, class names, or code. Output only the next line. | wait_for_daemon_dead(daemon.queue) |
Given snippet: <|code_start|> daemon = initd_daemon.create_daemon(cron_respawn=True,
cron_respawn_delay=1)
daemon.create()
daemon.configure()
daemon.start()
# initd daemon's cron is for root, so that respawning the daemon can
# use the init system which requires root
crontab = initd_daemon.runner.run('sudo crontab -lu root').std_out
assert daemon.cron_respawn_path in crontab
initd_daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'")
wait_for_daemon_dead(daemon.queue)
# check it was respawned
# mocking cron - respawn it using the cron respawn script
initd_daemon.runner.run(daemon.cron_respawn_path)
wait_for_daemon_alive(daemon.queue)
# this should also disable the crontab entry
daemon.stop()
wait_for_daemon_dead(daemon.queue)
crontab = initd_daemon.runner.run('sudo crontab -lu root').std_out
assert daemon.cron_respawn_path not in crontab
@pytest.mark.only_ci
@pytest.mark.only_posix
def test_create(initd_daemon):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import pytest
from cloudify_agent.tests.daemon import (
wait_for_daemon_alive,
wait_for_daemon_dead,
)
from cloudify_agent.tests.api.pm import shared
and context:
# Path: cloudify_agent/tests/daemon.py
# def wait_for_daemon_alive(name, timeout=10):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to start...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to start. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# def wait_for_daemon_dead(name, timeout=20):
# deadline = time.time() + timeout
#
# while time.time() < deadline:
# if not _is_agent_alive(name, timeout=5):
# return
# logger.info('Waiting for daemon {0} to stop...'
# .format(name))
# time.sleep(1)
# raise RuntimeError('Failed waiting for daemon {0} to stop. Waited '
# 'for {1} seconds'.format(name, timeout))
#
# Path: cloudify_agent/tests/api/pm/shared.py
# def patch_get_source():
# def patch_no_managed_plugin():
# def _test_create(daemon_fixture):
# def _test_create_overwrite(daemon_fixture):
# def _test_start(daemon_fixture):
# def _test_start_delete_amqp_queue(daemon_fixture):
# def _test_start_with_error(daemon_fixture):
# def _test_start_short_timeout(daemon_fixture):
# def _test_status(daemon_fixture):
# def _test_stop(daemon_fixture):
# def _test_stop_short_timeout(daemon_fixture):
# def _test_restart(daemon_fixture):
# def _test_two_daemons(daemon_fixture):
# def _test_conf_env_variables(daemon_fixture):
# def _get_env_var(var):
# def _check_env_var(var, expected_value):
# def _test_extra_env(daemon_fixture):
# def _test_execution_env(daemon_fixture):
# def _test_delete_before_stop(daemon_fixture):
# def _test_delete_before_stop_with_force(daemon_fixture):
# def _test_logging(daemon_fixture):
# def log_and_assert(_message, _deployment_id=None):
which might include code, classes, or functions. Output only the next line. | shared._test_create(initd_daemon) |
Using the snippet: <|code_start|>
# these imports may run on a windows box, in which case they may fail. (if
# the pywin32 extensions). The tests wont run anyway because of the decorator,
# so we can just avoid this import.
try:
except ImportError:
FabricRunner = None
FabricCommandExecutionException = None
##############################################################################
# note that this file only tests validation and defaults of the fabric runner.
# it does not test the actual functionality because that requires starting
# a vm. functional tests are executed as local workflow tests in the system
# tests framework
##############################################################################
@pytest.mark.only_posix
def test_default_port():
runner = FabricRunner(
validate_connection=False,
user='user',
host='host',
password='password')
assert runner.port == 22
@pytest.mark.only_posix
def test_no_host():
<|code_end|>
, determine the next line of code. You have imports:
from mock import Mock, patch
from cloudify_agent.installer import exceptions
from cloudify_agent.installer.runners.fabric_runner import FabricRunner
from cloudify_agent.installer.runners.fabric_runner import (
FabricCommandExecutionException,
)
import pytest
and context (class names, function names, or code) available:
# Path: cloudify_agent/installer/exceptions.py
# class AgentInstallerConfigurationError(exceptions.NonRecoverableError):
# def __init__(self, msg):
. Output only the next line. | with pytest.raises(exceptions.AgentInstallerConfigurationError, |
Continue the code snippet: <|code_start|>
try:
except ImportError:
# py2
class AgentPackageGenerator(object):
def __init__(self, file_server):
self.initialized = False
self._fs = file_server
def _initialize(self):
config = RawConfigParser()
config.add_section('install')
config.set('install', 'cloudify_agent_module', get_source_uri())
config.set('install', 'requirements_file',
get_requirements_uri())
config.add_section('system')
config.set('system', 'python_path',
os.path.join(getattr(sys, 'real_prefix', sys.prefix),
'bin', 'python'))
<|code_end|>
. Use current file imports:
import os
import sys
from cloudify_agent.tests.utils import (
create_agent_package,
get_requirements_uri,
get_source_uri,
)
from configparser import RawConfigParser
from ConfigParser import RawConfigParser
and context (classes, functions, or code) from other files:
# Path: cloudify_agent/tests/utils.py
# def create_agent_package(directory, config, package_logger=None):
# if package_logger is None:
# package_logger = logger
# package_logger.info('Changing directory into {0}'.format(directory))
# original = os.getcwd()
# try:
# package_logger.info('Creating Agent Package')
# os.chdir(directory)
# if platform.system() == 'Linux':
# packager.create(config=config,
# config_file=None,
# force=False,
# verbose=False)
# distname, _, distid = platform.dist()
# return '{0}-{1}-agent.tar.gz'.format(distname, distid)
# elif platform.system() == 'Windows':
# create_windows_installer(config, logger)
# return 'cloudify-windows-agent-{}.exe'.format(get_agent_version())
# else:
# raise NonRecoverableError('Platform not supported: {0}'
# .format(platform.system()))
# finally:
# os.chdir(original)
#
# def get_requirements_uri():
# return os.path.join(get_source_uri(), 'dev-requirements.txt')
#
# def get_source_uri():
# return os.path.dirname(os.path.dirname(cloudify_agent.__file__))
. Output only the next line. | package_name = create_agent_package(self._fs.root_path, config) |
Here is a snippet: <|code_start|>
try:
except ImportError:
# py2
class AgentPackageGenerator(object):
def __init__(self, file_server):
self.initialized = False
self._fs = file_server
def _initialize(self):
config = RawConfigParser()
config.add_section('install')
config.set('install', 'cloudify_agent_module', get_source_uri())
config.set('install', 'requirements_file',
<|code_end|>
. Write the next line using the current file imports:
import os
import sys
from cloudify_agent.tests.utils import (
create_agent_package,
get_requirements_uri,
get_source_uri,
)
from configparser import RawConfigParser
from ConfigParser import RawConfigParser
and context from other files:
# Path: cloudify_agent/tests/utils.py
# def create_agent_package(directory, config, package_logger=None):
# if package_logger is None:
# package_logger = logger
# package_logger.info('Changing directory into {0}'.format(directory))
# original = os.getcwd()
# try:
# package_logger.info('Creating Agent Package')
# os.chdir(directory)
# if platform.system() == 'Linux':
# packager.create(config=config,
# config_file=None,
# force=False,
# verbose=False)
# distname, _, distid = platform.dist()
# return '{0}-{1}-agent.tar.gz'.format(distname, distid)
# elif platform.system() == 'Windows':
# create_windows_installer(config, logger)
# return 'cloudify-windows-agent-{}.exe'.format(get_agent_version())
# else:
# raise NonRecoverableError('Platform not supported: {0}'
# .format(platform.system()))
# finally:
# os.chdir(original)
#
# def get_requirements_uri():
# return os.path.join(get_source_uri(), 'dev-requirements.txt')
#
# def get_source_uri():
# return os.path.dirname(os.path.dirname(cloudify_agent.__file__))
, which may include functions, classes, or code. Output only the next line. | get_requirements_uri()) |
Next line prediction: <|code_start|>
try:
except ImportError:
# py2
class AgentPackageGenerator(object):
def __init__(self, file_server):
self.initialized = False
self._fs = file_server
def _initialize(self):
config = RawConfigParser()
config.add_section('install')
<|code_end|>
. Use current file imports:
(import os
import sys
from cloudify_agent.tests.utils import (
create_agent_package,
get_requirements_uri,
get_source_uri,
)
from configparser import RawConfigParser
from ConfigParser import RawConfigParser)
and context including class names, function names, or small code snippets from other files:
# Path: cloudify_agent/tests/utils.py
# def create_agent_package(directory, config, package_logger=None):
# if package_logger is None:
# package_logger = logger
# package_logger.info('Changing directory into {0}'.format(directory))
# original = os.getcwd()
# try:
# package_logger.info('Creating Agent Package')
# os.chdir(directory)
# if platform.system() == 'Linux':
# packager.create(config=config,
# config_file=None,
# force=False,
# verbose=False)
# distname, _, distid = platform.dist()
# return '{0}-{1}-agent.tar.gz'.format(distname, distid)
# elif platform.system() == 'Windows':
# create_windows_installer(config, logger)
# return 'cloudify-windows-agent-{}.exe'.format(get_agent_version())
# else:
# raise NonRecoverableError('Platform not supported: {0}'
# .format(platform.system()))
# finally:
# os.chdir(original)
#
# def get_requirements_uri():
# return os.path.join(get_source_uri(), 'dev-requirements.txt')
#
# def get_source_uri():
# return os.path.dirname(os.path.dirname(cloudify_agent.__file__))
. Output only the next line. | config.set('install', 'cloudify_agent_module', get_source_uri()) |
Based on the snippet: <|code_start|>@click.option('--no-sudo',
help='Indication whether sudo should be used when applying '
' disable-requiretty part',
is_flag=True)
@handle_failures
def configure(disable_requiretty, relocated_env, no_sudo):
"""
Configures global agent properties.
"""
if disable_requiretty:
click.echo('Disabling requiretty directive in sudoers file')
_disable_requiretty(no_sudo)
click.echo('Successfully disabled requiretty for cfy-agent')
def _disable_requiretty(no_sudo):
"""
Disables the requiretty directive in the /etc/sudoers file. This
will enable operations that require sudo permissions to work properly.
This is needed because operations are executed
from within the worker process, which is not a tty process.
"""
runner = LocalCommandRunner(get_logger())
<|code_end|>
, predict the immediate next line with the help of imports:
import click
from cloudify_agent.api import utils
from cloudify.utils import LocalCommandRunner
from cloudify_agent.shell.decorators import handle_failures
from cloudify_agent.shell.commands import cfy
from cloudify_agent.shell.main import get_logger
and context (classes, functions, sometimes code) from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/shell/decorators.py
# def handle_failures(func):
#
# @wraps(func)
# def wrapper(*args, **kwargs):
# try:
# func(*args, **kwargs)
# except BaseException as e:
# tpe, value, tb = sys.exc_info()
#
# if isinstance(e, exceptions.DaemonException):
#
# # convert api exceptions to click exceptions.
# value = click.ClickException(str(e))
#
# if isinstance(e, exceptions.DaemonError):
#
# # convert api errors to cli exceptions
# value = click.ClickException(str(e))
#
# # set the exit_code accordingly. the exit_code property is later
# # read by the click framework to set the exit code of
# # the process.
# value.exit_code = codes.get(tpe, 1)
# reraise(type(value), value, tb)
#
# return wrapper
#
# Path: cloudify_agent/shell/commands/cfy.py
# CLICK_CONTEXT_SETTINGS = dict(
# help_option_names=['-h', '--help'],
# token_normalize_func=lambda param: param.lower())
# class CommandMixin(object):
# class AgentCommand(CommandMixin, click.Command):
# class AgentGroup(CommandMixin, click.Group):
# def main(
# self,
# args=None,
# prog_name=None,
# complete_var=None,
# standalone_mode=True,
# **extra
# ):
# def set_locale_env():
# def group(name=None):
# def command(*args, **kwargs):
. Output only the next line. | disable_requiretty_script_path = utils.resource_to_tempfile( |
Based on the snippet: <|code_start|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
@cfy.command()
@click.option('--disable-requiretty',
help='Disables the requiretty directive in the sudoers file.',
is_flag=True)
@click.option('--relocated-env',
help='Indication that this virtualenv was relocated. '
'If this option is passed, an auto-correction '
'to the virtualenv shabang entries '
'will be performed',
is_flag=True)
@click.option('--no-sudo',
help='Indication whether sudo should be used when applying '
' disable-requiretty part',
is_flag=True)
<|code_end|>
, predict the immediate next line with the help of imports:
import click
from cloudify_agent.api import utils
from cloudify.utils import LocalCommandRunner
from cloudify_agent.shell.decorators import handle_failures
from cloudify_agent.shell.commands import cfy
from cloudify_agent.shell.main import get_logger
and context (classes, functions, sometimes code) from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/shell/decorators.py
# def handle_failures(func):
#
# @wraps(func)
# def wrapper(*args, **kwargs):
# try:
# func(*args, **kwargs)
# except BaseException as e:
# tpe, value, tb = sys.exc_info()
#
# if isinstance(e, exceptions.DaemonException):
#
# # convert api exceptions to click exceptions.
# value = click.ClickException(str(e))
#
# if isinstance(e, exceptions.DaemonError):
#
# # convert api errors to cli exceptions
# value = click.ClickException(str(e))
#
# # set the exit_code accordingly. the exit_code property is later
# # read by the click framework to set the exit code of
# # the process.
# value.exit_code = codes.get(tpe, 1)
# reraise(type(value), value, tb)
#
# return wrapper
#
# Path: cloudify_agent/shell/commands/cfy.py
# CLICK_CONTEXT_SETTINGS = dict(
# help_option_names=['-h', '--help'],
# token_normalize_func=lambda param: param.lower())
# class CommandMixin(object):
# class AgentCommand(CommandMixin, click.Command):
# class AgentGroup(CommandMixin, click.Group):
# def main(
# self,
# args=None,
# prog_name=None,
# complete_var=None,
# standalone_mode=True,
# **extra
# ):
# def set_locale_env():
# def group(name=None):
# def command(*args, **kwargs):
. Output only the next line. | @handle_failures |
Given the code snippet: <|code_start|>
##############################################################################
# note that this file only tests validation and defaults of the winrm runner.
# it does not test the actual functionality because that requires starting
# a vm. functional tests are executed as local workflow tests in the system
# tests framework
##############################################################################
def test_validate_host():
# Missing host
session_config = {
'user': 'test_user',
'password': 'test_password'
}
with pytest.raises(ValueError, match='.*Invalid host.*'):
<|code_end|>
, generate the next line using the imports in this file:
import pytest
from cloudify_agent.installer.runners import winrm_runner
from cloudify_agent.installer.runners.winrm_runner import split_into_chunks
and context (functions, classes, or occasionally code) from other files:
# Path: cloudify_agent/installer/runners/winrm_runner.py
# DEFAULT_WINRM_PORT = '5985'
# DEFAULT_WINRM_URI = 'wsman'
# DEFAULT_WINRM_PROTOCOL = 'http'
# DEFAULT_TRANSPORT = 'basic'
# def validate(session_config):
# def _validate(prop):
# def __init__(self,
# user,
# password=None,
# protocol=None,
# host=None,
# port=None,
# uri=None,
# transport=None,
# validate_connection=True,
# logger=None,
# tmpdir=None):
# def validate_connection(self):
# def _create_session(self):
# def run(self, command, raise_on_failure=True, execution_env=None,
# powershell=False):
# def _chk(res):
# def ping(self):
# def delete(self, path, ignore_missing=False):
# def mktemp(self):
# def get_temp_dir(self):
# def new_dir(self, path):
# def machine_distribution(self):
# def python(self, imports_line, command):
# def put(self, contents, path):
# def put_file(self, src, dst=None):
# def close(self):
# def run_script(self, script_path):
# def split_into_chunks(contents, max_size=2000, separator='\r\n'):
# def join_lines(lines, line):
# class WinRMRunner(object):
# class WinRMCommandExecutionError(CommandExecutionError):
# class WinRMCommandExecutionException(CommandExecutionException):
# class WinRMCommandExecutionResponse(CommandExecutionResponse):
#
# Path: cloudify_agent/installer/runners/winrm_runner.py
# def split_into_chunks(contents, max_size=2000, separator='\r\n'):
# """Split content into chunks to avoid command line too long error.
#
# Maximum allowed commmand line length should be 2047 in old windows:
# https://support.microsoft.com/en-us/help/830473/command-prompt-cmd--exe-command-line-string-limitation
#
# :param contents:
# The contents of a file that exceeds the maximum command line length in
# windows.
# :type content: str
# :returns: The same content in chunks that won't exceed the limit
# :rtype: list[str]
#
# """
# def join_lines(lines, line):
# if len(line) > max_size:
# raise ValueError('Line too long (%d characters)' % len(line))
#
# if (
# lines and
# len(lines[-1]) + len(line) + len(separator) <= max_size
# ):
# lines[-1] += '{0}{1}'.format(separator, line)
# else:
# lines.append(line)
# return lines
#
# if contents:
# chunks = reduce(join_lines, contents.splitlines(), [])
# else:
# chunks = ['']
# return chunks
. Output only the next line. | winrm_runner.validate(session_config) |
Continue the code snippet: <|code_start|> # Missing password
session_config = {
'host': 'test_host',
'user': 'test_user'
}
with pytest.raises(ValueError, match='.*Invalid password.*'):
winrm_runner.validate(session_config)
def test_defaults():
runner = winrm_runner.WinRMRunner(
validate_connection=False,
host='test_host',
user='test_user',
password='test_password')
assert runner.session_config['protocol'] == \
winrm_runner.DEFAULT_WINRM_PROTOCOL
assert runner.session_config['uri'] == \
winrm_runner.DEFAULT_WINRM_URI
assert runner.session_config['port'] == \
winrm_runner.DEFAULT_WINRM_PORT
assert runner.session_config['transport'] == \
winrm_runner.DEFAULT_TRANSPORT
def test_empty_string():
"""An empty string is not splitted."""
contents = ''
expected_chunks = ['']
<|code_end|>
. Use current file imports:
import pytest
from cloudify_agent.installer.runners import winrm_runner
from cloudify_agent.installer.runners.winrm_runner import split_into_chunks
and context (classes, functions, or code) from other files:
# Path: cloudify_agent/installer/runners/winrm_runner.py
# DEFAULT_WINRM_PORT = '5985'
# DEFAULT_WINRM_URI = 'wsman'
# DEFAULT_WINRM_PROTOCOL = 'http'
# DEFAULT_TRANSPORT = 'basic'
# def validate(session_config):
# def _validate(prop):
# def __init__(self,
# user,
# password=None,
# protocol=None,
# host=None,
# port=None,
# uri=None,
# transport=None,
# validate_connection=True,
# logger=None,
# tmpdir=None):
# def validate_connection(self):
# def _create_session(self):
# def run(self, command, raise_on_failure=True, execution_env=None,
# powershell=False):
# def _chk(res):
# def ping(self):
# def delete(self, path, ignore_missing=False):
# def mktemp(self):
# def get_temp_dir(self):
# def new_dir(self, path):
# def machine_distribution(self):
# def python(self, imports_line, command):
# def put(self, contents, path):
# def put_file(self, src, dst=None):
# def close(self):
# def run_script(self, script_path):
# def split_into_chunks(contents, max_size=2000, separator='\r\n'):
# def join_lines(lines, line):
# class WinRMRunner(object):
# class WinRMCommandExecutionError(CommandExecutionError):
# class WinRMCommandExecutionException(CommandExecutionException):
# class WinRMCommandExecutionResponse(CommandExecutionResponse):
#
# Path: cloudify_agent/installer/runners/winrm_runner.py
# def split_into_chunks(contents, max_size=2000, separator='\r\n'):
# """Split content into chunks to avoid command line too long error.
#
# Maximum allowed commmand line length should be 2047 in old windows:
# https://support.microsoft.com/en-us/help/830473/command-prompt-cmd--exe-command-line-string-limitation
#
# :param contents:
# The contents of a file that exceeds the maximum command line length in
# windows.
# :type content: str
# :returns: The same content in chunks that won't exceed the limit
# :rtype: list[str]
#
# """
# def join_lines(lines, line):
# if len(line) > max_size:
# raise ValueError('Line too long (%d characters)' % len(line))
#
# if (
# lines and
# len(lines[-1]) + len(line) + len(separator) <= max_size
# ):
# lines[-1] += '{0}{1}'.format(separator, line)
# else:
# lines.append(line)
# return lines
#
# if contents:
# chunks = reduce(join_lines, contents.splitlines(), [])
# else:
# chunks = ['']
# return chunks
. Output only the next line. | assert split_into_chunks(contents) == expected_chunks |
Continue the code snippet: <|code_start|>########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
"""
This module maintains backwards compatibility with Compute node types
version < 3.3.
"""
@operation
def install(**kwargs):
_fix_winrm_port_for_old_windows_blueprints(kwargs)
<|code_end|>
. Use current file imports:
from cloudify.decorators import operation
from cloudify_agent.installer import operations
and context (classes, functions, or code) from other files:
# Path: cloudify_agent/installer/operations.py
# def create(cloudify_agent, installer, **_):
# def configure(cloudify_agent, installer, **_):
# def start(cloudify_agent, **_):
# def stop(cloudify_agent, installer, **_):
# def delete(cloudify_agent, installer, **_):
# def restart(cloudify_agent, installer, **_):
. Output only the next line. | operations.create(**kwargs) |
Using the snippet: <|code_start|>@pytest.fixture(scope='function')
def detach_daemon(tmp_path, agent_ssl_cert):
daemon = TestDetachedDaemon(tmp_path, logger, agent_ssl_cert)
yield daemon
daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'",
exit_on_failure=False)
for _daemon in daemon.daemons:
daemon.factory.delete(_daemon.name)
@pytest.fixture(scope='function')
def initd_daemon(tmp_path, agent_ssl_cert):
daemon = TestInitdDaemon(tmp_path, logger, agent_ssl_cert)
yield daemon
daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'",
exit_on_failure=False)
for _daemon in daemon.daemons:
daemon.factory.delete(_daemon.name)
@pytest.fixture(scope='function')
def nssm_daemon(tmp_path, agent_ssl_cert):
daemon = TestNSSMDaemon(tmp_path, logger, agent_ssl_cert)
yield daemon
<|code_end|>
, determine the next line of code. You have imports:
import os
import pytest
from cloudify.utils import setup_logger
from cloudify_agent.api import utils
from cloudify_agent.tests.api.pm.daemons import (
TestDetachedDaemon,
TestInitdDaemon,
TestNSSMDaemon,
)
and context (class names, function names, or code) available:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/tests/api/pm/daemons.py
# class TestDetachedDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return DetachedDaemon
#
# class TestInitdDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return InitDDaemon
#
# class TestNSSMDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return NonSuckingServiceManagerDaemon
. Output only the next line. | nssm_path = utils.get_absolute_resource_path( |
Predict the next line for this snippet: <|code_start|>
logger = setup_logger('cloudify_agent.tests.api.pm')
@pytest.fixture(scope='function', autouse=True)
def always_mock_storage_dir(mock_get_storage_dir):
yield
@pytest.fixture(scope='function')
def detach_daemon(tmp_path, agent_ssl_cert):
<|code_end|>
with the help of current file imports:
import os
import pytest
from cloudify.utils import setup_logger
from cloudify_agent.api import utils
from cloudify_agent.tests.api.pm.daemons import (
TestDetachedDaemon,
TestInitdDaemon,
TestNSSMDaemon,
)
and context from other files:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/tests/api/pm/daemons.py
# class TestDetachedDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return DetachedDaemon
#
# class TestInitdDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return InitDDaemon
#
# class TestNSSMDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return NonSuckingServiceManagerDaemon
, which may contain function names, class names, or code. Output only the next line. | daemon = TestDetachedDaemon(tmp_path, logger, agent_ssl_cert) |
Given snippet: <|code_start|>
logger = setup_logger('cloudify_agent.tests.api.pm')
@pytest.fixture(scope='function', autouse=True)
def always_mock_storage_dir(mock_get_storage_dir):
yield
@pytest.fixture(scope='function')
def detach_daemon(tmp_path, agent_ssl_cert):
daemon = TestDetachedDaemon(tmp_path, logger, agent_ssl_cert)
yield daemon
daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'",
exit_on_failure=False)
for _daemon in daemon.daemons:
daemon.factory.delete(_daemon.name)
@pytest.fixture(scope='function')
def initd_daemon(tmp_path, agent_ssl_cert):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import pytest
from cloudify.utils import setup_logger
from cloudify_agent.api import utils
from cloudify_agent.tests.api.pm.daemons import (
TestDetachedDaemon,
TestInitdDaemon,
TestNSSMDaemon,
)
and context:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/tests/api/pm/daemons.py
# class TestDetachedDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return DetachedDaemon
#
# class TestInitdDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return InitDDaemon
#
# class TestNSSMDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return NonSuckingServiceManagerDaemon
which might include code, classes, or functions. Output only the next line. | daemon = TestInitdDaemon(tmp_path, logger, agent_ssl_cert) |
Given snippet: <|code_start|>def always_mock_storage_dir(mock_get_storage_dir):
yield
@pytest.fixture(scope='function')
def detach_daemon(tmp_path, agent_ssl_cert):
daemon = TestDetachedDaemon(tmp_path, logger, agent_ssl_cert)
yield daemon
daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'",
exit_on_failure=False)
for _daemon in daemon.daemons:
daemon.factory.delete(_daemon.name)
@pytest.fixture(scope='function')
def initd_daemon(tmp_path, agent_ssl_cert):
daemon = TestInitdDaemon(tmp_path, logger, agent_ssl_cert)
yield daemon
daemon.runner.run("pkill -9 -f 'cloudify_agent.worker'",
exit_on_failure=False)
for _daemon in daemon.daemons:
daemon.factory.delete(_daemon.name)
@pytest.fixture(scope='function')
def nssm_daemon(tmp_path, agent_ssl_cert):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import pytest
from cloudify.utils import setup_logger
from cloudify_agent.api import utils
from cloudify_agent.tests.api.pm.daemons import (
TestDetachedDaemon,
TestInitdDaemon,
TestNSSMDaemon,
)
and context:
# Path: cloudify_agent/api/utils.py
# class _Internal(object):
# CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
# CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'
# def get_daemon_storage_dir(cls):
# def get_daemon_user(cls):
# def get_storage_directory(cls, username=None):
# def generate_agent_name():
# def generate_new_agent_name(old_agent_name):
# def daemon_to_dict(daemon):
# def get_broker_url(agent):
# def get_agent_registered(name,
# celery_client,
# timeout=workflows_tasks.INSPECT_TIMEOUT):
# def get_windows_home_dir(username):
# def get_home_dir(username=None):
# def render_template_to_file(template_path, file_path=None, **values):
# def resource_to_tempfile(resource_path, executable=False):
# def get_resource(resource_path):
# def get_absolute_resource_path(resource_path):
# def content_to_file(content, file_path=None, executable=False):
# def env_to_file(env_variables, destination_path=None, posix=True):
# def stringify_values(dictionary):
# def purge_none_values(dictionary):
# def json_load(file_path):
# def json_loads(content):
# def safe_create_dir(path):
# def get_rest_client(rest_host,
# rest_port,
# rest_token,
# rest_tenant,
# ssl_cert_path,
# bypass_maintenance_mode=False):
# def _parse_comma_separated(ctx, param, value):
# def get_manager_file_server_url(hostname, port, scheme=None):
# def get_agent_version():
# def get_windows_basedir():
# def get_linux_basedir():
# def get_agent_basedir(is_windows=False):
#
# Path: cloudify_agent/tests/api/pm/daemons.py
# class TestDetachedDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return DetachedDaemon
#
# class TestInitdDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return InitDDaemon
#
# class TestNSSMDaemon(BaseDaemon):
# @property
# def daemon_cls(self):
# return NonSuckingServiceManagerDaemon
which might include code, classes, or functions. Output only the next line. | daemon = TestNSSMDaemon(tmp_path, logger, agent_ssl_cert) |
Predict the next line for this snippet: <|code_start|>
class AutoscaleApp(AppMixin, TemplateView):
template_name = 'app/index.html'
def get_context_data(self, *args, **kwargs):
context = super(AutoscaleApp, self).get_context_data(*args, **kwargs)
token = self.request.session.get('tsuru_token').split(" ")[-1]
<|code_end|>
with the help of current file imports:
import os
from django.views.generic import TemplateView
from tsuru_autoscale.instance import client
from tsuru_autoscale.wizard import client as wclient
from tsuru_dashboard.apps.views import AppMixin
and context from other files:
# Path: tsuru_autoscale/instance/client.py
# def host():
# def list(token):
# def get(name, token):
# def alarms_by_instance(instance, token):
#
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
, which may contain function names, class names, or code. Output only the next line. | instances = client.list(token).json() or [] |
Given the code snippet: <|code_start|>
class AutoscaleApp(AppMixin, TemplateView):
template_name = 'app/index.html'
def get_context_data(self, *args, **kwargs):
context = super(AutoscaleApp, self).get_context_data(*args, **kwargs)
token = self.request.session.get('tsuru_token').split(" ")[-1]
instances = client.list(token).json() or []
app = context.get('app', {})
pool_name = app.get('pool')
<|code_end|>
, generate the next line using the imports in this file:
import os
from django.views.generic import TemplateView
from tsuru_autoscale.instance import client
from tsuru_autoscale.wizard import client as wclient
from tsuru_dashboard.apps.views import AppMixin
and context (functions, classes, or occasionally code) from other files:
# Path: tsuru_autoscale/instance/client.py
# def host():
# def list(token):
# def get(name, token):
# def alarms_by_instance(instance, token):
#
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
. Output only the next line. | pool_info = wclient.pool_info(pool_name, token) if pool_name else None |
Continue the code snippet: <|code_start|>
def new(request):
form = DataSourceForm(request.POST or None)
if form.is_valid():
token = request.GET.get("TSURU_TOKEN")
<|code_end|>
. Use current file imports:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.datasource.forms import DataSourceForm
from tsuru_autoscale.datasource import client
and context (classes, functions, or code) from other files:
# Path: tsuru_autoscale/datasource/forms.py
# class DataSourceForm(forms.Form):
# name = forms.CharField()
# url = forms.CharField()
# method = forms.CharField()
# body = forms.CharField(required=False)
# headers = forms.CharField(required=False)
#
# Path: tsuru_autoscale/datasource/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | response = client.new(form.cleaned_data, token) |
Predict the next line for this snippet: <|code_start|>
class ScaleFormTest(TestCase):
def test_required_fields(self):
fields = {
"metric": True,
"operator": True,
"value": True,
"step": True,
"wait": True,
"aggregator": True,
}
<|code_end|>
with the help of current file imports:
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.conf import settings
from tsuru_autoscale.wizard import forms
from tsuru_autoscale.wizard import client
from importlib import import_module
import mock
import httpretty
import os
and context from other files:
# Path: tsuru_autoscale/wizard/forms.py
# OPERATOR_CHOICES = (
# ('>', '>'),
# ('>=', '>='),
# ('<=', '<='),
# ('<', '<'),
# ('!=', '!='),
# )
# AGGREGATOR_CHOICES = (
# ('avg', 'average'),
# ('max', 'maximum'),
# )
# class ScaleForm(forms.Form):
# class ConfigForm(forms.Form):
# def __init__(self, min_wait, *args, **kwargs):
# def clean_value(self):
# def clean_step(self):
#
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
, which may contain function names, class names, or code. Output only the next line. | form = forms.ScaleForm(42) |
Next line prediction: <|code_start|> "wait": True,
"aggregator": True,
}
form = forms.ScaleForm(42)
for field, required in fields.items():
self.assertEqual(form.fields[field].required, required)
self.assertEqual(form.fields['wait'].min_value, 42)
class ConfigFormTest(TestCase):
def test_required_fields(self):
fields = {
"min": True,
}
form = forms.ConfigForm()
for field, required in fields.items():
self.assertEqual(form.fields[field].required, required)
class IndexTestCase(TestCase):
def setUp(self):
settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file'
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.save()
self.session = store
<|code_end|>
. Use current file imports:
(from django.test import TestCase
from django.core.urlresolvers import reverse
from django.conf import settings
from tsuru_autoscale.wizard import forms
from tsuru_autoscale.wizard import client
from importlib import import_module
import mock
import httpretty
import os)
and context including class names, function names, or small code snippets from other files:
# Path: tsuru_autoscale/wizard/forms.py
# OPERATOR_CHOICES = (
# ('>', '>'),
# ('>=', '>='),
# ('<=', '<='),
# ('<', '<'),
# ('!=', '!='),
# )
# AGGREGATOR_CHOICES = (
# ('avg', 'average'),
# ('max', 'maximum'),
# )
# class ScaleForm(forms.Form):
# class ConfigForm(forms.Form):
# def __init__(self, min_wait, *args, **kwargs):
# def clean_value(self):
# def clean_step(self):
#
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
. Output only the next line. | self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key |
Using the snippet: <|code_start|>
class RemoveTestCase(TestCase):
@mock.patch("tsuru_autoscale.action.client.list")
@mock.patch("tsuru_autoscale.action.client.remove")
def test_new_post(self, remove_mock, list_mock):
url = "{}?TSURU_TOKEN=bla".format(reverse("action-remove", args=["name"]))
response = self.client.delete(url)
url = "{}?TSURU_TOKEN=bla".format(reverse("action-list"))
self.assertRedirects(response, url)
remove_mock.assert_called_with("name", "bla")
class NewTestCase(TestCase):
def test_new(self):
url = "{}?TSURU_TOKEN=bla".format(reverse("action-new"))
response = self.client.get(url)
self.assertTemplateUsed(response, "action/new.html")
<|code_end|>
, determine the next line of code. You have imports:
from django.test import TestCase
from django.core.urlresolvers import reverse
from tsuru_autoscale.action.forms import ActionForm
from tsuru_autoscale.action import client
import httpretty
import mock
import os
and context (class names, function names, or code) available:
# Path: tsuru_autoscale/action/forms.py
# class ActionForm(forms.Form):
# name = forms.CharField()
# url = forms.CharField()
# method = forms.CharField()
# body = forms.CharField(required=False)
# headers = forms.CharField(required=False)
#
# Path: tsuru_autoscale/action/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | self.assertIsInstance(response.context['form'], ActionForm) |
Given the code snippet: <|code_start|>
class RemoveTestCase(TestCase):
@mock.patch("tsuru_autoscale.action.client.list")
@mock.patch("tsuru_autoscale.action.client.remove")
def test_new_post(self, remove_mock, list_mock):
url = "{}?TSURU_TOKEN=bla".format(reverse("action-remove", args=["name"]))
<|code_end|>
, generate the next line using the imports in this file:
from django.test import TestCase
from django.core.urlresolvers import reverse
from tsuru_autoscale.action.forms import ActionForm
from tsuru_autoscale.action import client
import httpretty
import mock
import os
and context (functions, classes, or occasionally code) from other files:
# Path: tsuru_autoscale/action/forms.py
# class ActionForm(forms.Form):
# name = forms.CharField()
# url = forms.CharField()
# method = forms.CharField()
# body = forms.CharField(required=False)
# headers = forms.CharField(required=False)
#
# Path: tsuru_autoscale/action/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | response = self.client.delete(url) |
Predict the next line after this snippet: <|code_start|>
def new(request):
form = ActionForm(request.POST or None)
if form.is_valid():
token = request.GET.get("TSURU_TOKEN")
<|code_end|>
using the current file's imports:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.action.forms import ActionForm
from tsuru_autoscale.action import client
and any relevant context from other files:
# Path: tsuru_autoscale/action/forms.py
# class ActionForm(forms.Form):
# name = forms.CharField()
# url = forms.CharField()
# method = forms.CharField()
# body = forms.CharField(required=False)
# headers = forms.CharField(required=False)
#
# Path: tsuru_autoscale/action/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | client.new(form.cleaned_data, token) |
Predict the next line after this snippet: <|code_start|>
def get_or_create_tsuru_instance(instance_name, token):
token = urllib.unquote(token)
token = "bearer {}".format(token)
url = "{}/services/autoscale/instances/{}".format(client.tsuru_host(), instance_name)
headers = {"Authorization": token}
response = requests.get(url, headers=headers)
if response.status_code == 200:
return
app = client.app_info(instance_name, token)
url = "{}/services/autoscale/instances".format(client.tsuru_host(), instance_name)
headers = {"Authorization": token, "Content-Type": "application/x-www-form-urlencoded"}
data = {"service_name": "autoscale", "name": instance_name, "owner": app["teamowner"]}
response = requests.post(url, headers=headers, data=data)
url = "{}/services/{}/instances/{}/{}".format(client.tsuru_host(), "autoscale", instance_name, instance_name)
headers = {"Authorization": token}
response = requests.put(url, headers=headers, data={"noRestart": "true"})
def new(request, instance=None):
token = request.session.get('tsuru_token').split(" ")[-1]
dlist = [(d["Name"], d["Name"]) for d in dclient.list(token).json()]
<|code_end|>
using the current file's imports:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.wizard import forms
from tsuru_autoscale.wizard import client
from tsuru_autoscale.datasource import client as dclient
import requests
import urllib
and any relevant context from other files:
# Path: tsuru_autoscale/wizard/forms.py
# OPERATOR_CHOICES = (
# ('>', '>'),
# ('>=', '>='),
# ('<=', '<='),
# ('<', '<'),
# ('!=', '!='),
# )
# AGGREGATOR_CHOICES = (
# ('avg', 'average'),
# ('max', 'maximum'),
# )
# class ScaleForm(forms.Form):
# class ConfigForm(forms.Form):
# def __init__(self, min_wait, *args, **kwargs):
# def clean_value(self):
# def clean_step(self):
#
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
#
# Path: tsuru_autoscale/datasource/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | scale_up_form = forms.ScaleForm(30, request.POST or None, prefix="scale_up", initial={"operator": ">"}) |
Given the code snippet: <|code_start|>
def get_or_create_tsuru_instance(instance_name, token):
token = urllib.unquote(token)
token = "bearer {}".format(token)
<|code_end|>
, generate the next line using the imports in this file:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.wizard import forms
from tsuru_autoscale.wizard import client
from tsuru_autoscale.datasource import client as dclient
import requests
import urllib
and context (functions, classes, or occasionally code) from other files:
# Path: tsuru_autoscale/wizard/forms.py
# OPERATOR_CHOICES = (
# ('>', '>'),
# ('>=', '>='),
# ('<=', '<='),
# ('<', '<'),
# ('!=', '!='),
# )
# AGGREGATOR_CHOICES = (
# ('avg', 'average'),
# ('max', 'maximum'),
# )
# class ScaleForm(forms.Form):
# class ConfigForm(forms.Form):
# def __init__(self, min_wait, *args, **kwargs):
# def clean_value(self):
# def clean_step(self):
#
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
#
# Path: tsuru_autoscale/datasource/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | url = "{}/services/autoscale/instances/{}".format(client.tsuru_host(), instance_name) |
Continue the code snippet: <|code_start|>
def get_or_create_tsuru_instance(instance_name, token):
token = urllib.unquote(token)
token = "bearer {}".format(token)
url = "{}/services/autoscale/instances/{}".format(client.tsuru_host(), instance_name)
headers = {"Authorization": token}
response = requests.get(url, headers=headers)
if response.status_code == 200:
return
app = client.app_info(instance_name, token)
url = "{}/services/autoscale/instances".format(client.tsuru_host(), instance_name)
headers = {"Authorization": token, "Content-Type": "application/x-www-form-urlencoded"}
data = {"service_name": "autoscale", "name": instance_name, "owner": app["teamowner"]}
response = requests.post(url, headers=headers, data=data)
url = "{}/services/{}/instances/{}/{}".format(client.tsuru_host(), "autoscale", instance_name, instance_name)
headers = {"Authorization": token}
response = requests.put(url, headers=headers, data={"noRestart": "true"})
def new(request, instance=None):
token = request.session.get('tsuru_token').split(" ")[-1]
<|code_end|>
. Use current file imports:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.wizard import forms
from tsuru_autoscale.wizard import client
from tsuru_autoscale.datasource import client as dclient
import requests
import urllib
and context (classes, functions, or code) from other files:
# Path: tsuru_autoscale/wizard/forms.py
# OPERATOR_CHOICES = (
# ('>', '>'),
# ('>=', '>='),
# ('<=', '<='),
# ('<', '<'),
# ('!=', '!='),
# )
# AGGREGATOR_CHOICES = (
# ('avg', 'average'),
# ('max', 'maximum'),
# )
# class ScaleForm(forms.Form):
# class ConfigForm(forms.Form):
# def __init__(self, min_wait, *args, **kwargs):
# def clean_value(self):
# def clean_step(self):
#
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
#
# Path: tsuru_autoscale/datasource/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | dlist = [(d["Name"], d["Name"]) for d in dclient.list(token).json()] |
Given the following code snippet before the placeholder: <|code_start|>
class ListTestCase(TestCase):
@mock.patch("tsuru_autoscale.event.client.list")
def test_list(self, list_mock):
url = "{}?TSURU_TOKEN=bla".format(reverse("event-list", args=["alarm_name"]))
<|code_end|>
, predict the next line using imports from the current file:
from django.test import TestCase
from django.core.urlresolvers import reverse
from tsuru_autoscale.event import client
import httpretty
import mock
import os
and context including class names, function names, and sometimes code from other files:
# Path: tsuru_autoscale/event/client.py
# def host():
# def list(alarm_name, token):
. Output only the next line. | response = self.client.get(url) |
Predict the next line after this snippet: <|code_start|>
urlpatterns = [
url(r'^$', views.list, name='alarm-list'),
url(r'^new/$', views.new, name='alarm-new'),
url(r'^(?P<name>[\w\s-]+)/remove/$', views.remove, name='alarm-remove'),
<|code_end|>
using the current file's imports:
from django.conf.urls import url
from tsuru_autoscale.alarm import views
from tsuru_autoscale.event.views import list as event_list
and any relevant context from other files:
# Path: tsuru_autoscale/alarm/views.py
# def new(request):
# def list(request):
# def remove(request, name):
# def get(request, name):
#
# Path: tsuru_autoscale/event/views.py
# def list(request, alarm_name):
# token = request.GET.get("TSURU_TOKEN")
# events = client.list(alarm_name, token).json()
# context = {
# "list": events,
# }
# return render(request, "event/list.html", context)
. Output only the next line. | url(r'^(?P<alarm_name>[\w\s-]+)/event/$', event_list, name='event-list'), |
Given the code snippet: <|code_start|>
class ListTestCase(TestCase):
def setUp(self):
settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file'
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.save()
self.session = store
<|code_end|>
, generate the next line using the imports in this file:
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.conf import settings
from tsuru_autoscale.instance import client
from importlib import import_module
import httpretty
import mock
import os
and context (functions, classes, or occasionally code) from other files:
# Path: tsuru_autoscale/instance/client.py
# def host():
# def list(token):
# def get(name, token):
# def alarms_by_instance(instance, token):
. Output only the next line. | self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key |
Based on the snippet: <|code_start|>
class RemoveTestCase(TestCase):
@mock.patch("tsuru_autoscale.datasource.client.list")
@mock.patch("tsuru_autoscale.datasource.client.remove")
def test_remove(self, remove_mock, list_mock):
url = "{}?TSURU_TOKEN=bla".format(reverse("datasource-remove", args=["name"]))
response = self.client.delete(url)
url = "{}?TSURU_TOKEN=bla".format(reverse("datasource-list"))
self.assertRedirects(response, url)
remove_mock.assert_called_with("name", "bla")
class NewTestCase(TestCase):
def test_new(self):
url = "{}?TSURU_TOKEN=bla".format(reverse("datasource-new"))
response = self.client.get(url)
self.assertTemplateUsed(response, "datasource/new.html")
<|code_end|>
, predict the immediate next line with the help of imports:
from django.test import TestCase
from django.core.urlresolvers import reverse
from tsuru_autoscale.datasource.forms import DataSourceForm
from tsuru_autoscale.datasource import client
import httpretty
import mock
import os
and context (classes, functions, sometimes code) from other files:
# Path: tsuru_autoscale/datasource/forms.py
# class DataSourceForm(forms.Form):
# name = forms.CharField()
# url = forms.CharField()
# method = forms.CharField()
# body = forms.CharField(required=False)
# headers = forms.CharField(required=False)
#
# Path: tsuru_autoscale/datasource/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
. Output only the next line. | self.assertIsInstance(response.context['form'], DataSourceForm) |
Given snippet: <|code_start|>
class RemoveTestCase(TestCase):
@mock.patch("tsuru_autoscale.datasource.client.list")
@mock.patch("tsuru_autoscale.datasource.client.remove")
def test_remove(self, remove_mock, list_mock):
url = "{}?TSURU_TOKEN=bla".format(reverse("datasource-remove", args=["name"]))
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.test import TestCase
from django.core.urlresolvers import reverse
from tsuru_autoscale.datasource.forms import DataSourceForm
from tsuru_autoscale.datasource import client
import httpretty
import mock
import os
and context:
# Path: tsuru_autoscale/datasource/forms.py
# class DataSourceForm(forms.Form):
# name = forms.CharField()
# url = forms.CharField()
# method = forms.CharField()
# body = forms.CharField(required=False)
# headers = forms.CharField(required=False)
#
# Path: tsuru_autoscale/datasource/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
which might include code, classes, or functions. Output only the next line. | response = self.client.delete(url) |
Predict the next line after this snippet: <|code_start|>
class NativeAutoscale(AppMixin, FormView):
template_name = "native/index.html"
form_class = forms.ScaleForm
def get_form(self):
token = self.request.session.get('tsuru_token').split(" ")[-1]
app_name = self.kwargs.get('app_name')
form = forms.ScaleForm(self.request.POST or None)
<|code_end|>
using the current file's imports:
import json
import requests
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.views.generic import FormView, View
from tsuru_dashboard.apps.views import AppMixin
from tsuru_autoscale.wizard import client as wclient
from tsuru_autoscale.native import forms
and any relevant context from other files:
# Path: tsuru_autoscale/wizard/client.py
# def host():
# def tsuru_host():
# def clean_token(token):
# def app_info(name, token):
# def pool_info(name, token):
# def process_list(instance_name, token):
# def new(data, token):
# def get(name, token):
# def remove(name, token):
# def enable(name, token):
# def disable(name, token):
# def events(name, token):
#
# Path: tsuru_autoscale/native/forms.py
# class ScaleForm(forms.Form):
# def clean(self):
. Output only the next line. | p_list = wclient.process_list(app_name, token) |
Predict the next line for this snippet: <|code_start|>
class RemoveTestCase(TestCase):
@mock.patch("tsuru_autoscale.alarm.client.list")
@mock.patch("tsuru_autoscale.alarm.client.remove")
def test_remove(self, remove_mock, list_mock):
url = "{}?TSURU_TOKEN=bla".format(reverse("alarm-remove", args=["name"]))
response = self.client.delete(url)
url = "{}?TSURU_TOKEN=bla".format(reverse("alarm-list"))
self.assertRedirects(response, url)
remove_mock.assert_called_with("name", "bla")
class NewTestCase(TestCase):
@mock.patch("tsuru_autoscale.alarm.client.service_instance_list")
@mock.patch("tsuru_autoscale.datasource.client")
@mock.patch("tsuru_autoscale.action.client")
def test_new(self, ds_client_mock, a_client_mock, sil):
url = "{}?TSURU_TOKEN=bla".format(reverse("alarm-new"))
response = self.client.get(url)
self.assertTemplateUsed(response, "alarm/new.html")
<|code_end|>
with the help of current file imports:
from django.test import TestCase
from django.core.urlresolvers import reverse
from tsuru_autoscale.alarm.forms import AlarmForm
from tsuru_autoscale.alarm import client
import httpretty
import mock
import os
and context from other files:
# Path: tsuru_autoscale/alarm/forms.py
# class AlarmForm(forms.Form):
# name = forms.CharField()
# expression = forms.CharField()
# enabled = forms.BooleanField(initial=True)
# wait = forms.IntegerField()
# datasource = forms.ChoiceField()
# actions = forms.MultipleChoiceField()
# instance = forms.ChoiceField()
#
# Path: tsuru_autoscale/alarm/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
# def service_instance_list(token):
, which may contain function names, class names, or code. Output only the next line. | self.assertIsInstance(response.context['form'], AlarmForm) |
Predict the next line after this snippet: <|code_start|>
class RemoveTestCase(TestCase):
@mock.patch("tsuru_autoscale.alarm.client.list")
@mock.patch("tsuru_autoscale.alarm.client.remove")
def test_remove(self, remove_mock, list_mock):
url = "{}?TSURU_TOKEN=bla".format(reverse("alarm-remove", args=["name"]))
<|code_end|>
using the current file's imports:
from django.test import TestCase
from django.core.urlresolvers import reverse
from tsuru_autoscale.alarm.forms import AlarmForm
from tsuru_autoscale.alarm import client
import httpretty
import mock
import os
and any relevant context from other files:
# Path: tsuru_autoscale/alarm/forms.py
# class AlarmForm(forms.Form):
# name = forms.CharField()
# expression = forms.CharField()
# enabled = forms.BooleanField(initial=True)
# wait = forms.IntegerField()
# datasource = forms.ChoiceField()
# actions = forms.MultipleChoiceField()
# instance = forms.ChoiceField()
#
# Path: tsuru_autoscale/alarm/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
# def service_instance_list(token):
. Output only the next line. | response = self.client.delete(url) |
Here is a snippet: <|code_start|>
def new(request):
token = request.GET.get("TSURU_TOKEN")
form = AlarmForm(request.POST or None)
<|code_end|>
. Write the next line using the current file imports:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.alarm.forms import AlarmForm, datasource_list, action_list, service_instance_list
from tsuru_autoscale.alarm import client
and context from other files:
# Path: tsuru_autoscale/alarm/forms.py
# class AlarmForm(forms.Form):
# name = forms.CharField()
# expression = forms.CharField()
# enabled = forms.BooleanField(initial=True)
# wait = forms.IntegerField()
# datasource = forms.ChoiceField()
# actions = forms.MultipleChoiceField()
# instance = forms.ChoiceField()
#
# def datasource_list(token):
# from tsuru_autoscale.datasource import client
# dl = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in dl]
#
# def action_list(token):
# from tsuru_autoscale.action import client
# al = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# def service_instance_list(token):
# from tsuru_autoscale.alarm import client
# al = client.service_instance_list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# Path: tsuru_autoscale/alarm/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
# def service_instance_list(token):
, which may include functions, classes, or code. Output only the next line. | form.fields['datasource'].choices = datasource_list(token) |
Here is a snippet: <|code_start|>
def new(request):
token = request.GET.get("TSURU_TOKEN")
form = AlarmForm(request.POST or None)
form.fields['datasource'].choices = datasource_list(token)
<|code_end|>
. Write the next line using the current file imports:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.alarm.forms import AlarmForm, datasource_list, action_list, service_instance_list
from tsuru_autoscale.alarm import client
and context from other files:
# Path: tsuru_autoscale/alarm/forms.py
# class AlarmForm(forms.Form):
# name = forms.CharField()
# expression = forms.CharField()
# enabled = forms.BooleanField(initial=True)
# wait = forms.IntegerField()
# datasource = forms.ChoiceField()
# actions = forms.MultipleChoiceField()
# instance = forms.ChoiceField()
#
# def datasource_list(token):
# from tsuru_autoscale.datasource import client
# dl = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in dl]
#
# def action_list(token):
# from tsuru_autoscale.action import client
# al = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# def service_instance_list(token):
# from tsuru_autoscale.alarm import client
# al = client.service_instance_list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# Path: tsuru_autoscale/alarm/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
# def service_instance_list(token):
, which may include functions, classes, or code. Output only the next line. | form.fields['actions'].choices = action_list(token) |
Here is a snippet: <|code_start|>
def new(request):
token = request.GET.get("TSURU_TOKEN")
form = AlarmForm(request.POST or None)
form.fields['datasource'].choices = datasource_list(token)
form.fields['actions'].choices = action_list(token)
<|code_end|>
. Write the next line using the current file imports:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.alarm.forms import AlarmForm, datasource_list, action_list, service_instance_list
from tsuru_autoscale.alarm import client
and context from other files:
# Path: tsuru_autoscale/alarm/forms.py
# class AlarmForm(forms.Form):
# name = forms.CharField()
# expression = forms.CharField()
# enabled = forms.BooleanField(initial=True)
# wait = forms.IntegerField()
# datasource = forms.ChoiceField()
# actions = forms.MultipleChoiceField()
# instance = forms.ChoiceField()
#
# def datasource_list(token):
# from tsuru_autoscale.datasource import client
# dl = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in dl]
#
# def action_list(token):
# from tsuru_autoscale.action import client
# al = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# def service_instance_list(token):
# from tsuru_autoscale.alarm import client
# al = client.service_instance_list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# Path: tsuru_autoscale/alarm/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
# def service_instance_list(token):
, which may include functions, classes, or code. Output only the next line. | form.fields['instance'].choices = service_instance_list(token) |
Given the following code snippet before the placeholder: <|code_start|>
def new(request):
token = request.GET.get("TSURU_TOKEN")
form = AlarmForm(request.POST or None)
form.fields['datasource'].choices = datasource_list(token)
form.fields['actions'].choices = action_list(token)
form.fields['instance'].choices = service_instance_list(token)
if form.is_valid():
<|code_end|>
, predict the next line using imports from the current file:
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.alarm.forms import AlarmForm, datasource_list, action_list, service_instance_list
from tsuru_autoscale.alarm import client
and context including class names, function names, and sometimes code from other files:
# Path: tsuru_autoscale/alarm/forms.py
# class AlarmForm(forms.Form):
# name = forms.CharField()
# expression = forms.CharField()
# enabled = forms.BooleanField(initial=True)
# wait = forms.IntegerField()
# datasource = forms.ChoiceField()
# actions = forms.MultipleChoiceField()
# instance = forms.ChoiceField()
#
# def datasource_list(token):
# from tsuru_autoscale.datasource import client
# dl = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in dl]
#
# def action_list(token):
# from tsuru_autoscale.action import client
# al = client.list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# def service_instance_list(token):
# from tsuru_autoscale.alarm import client
# al = client.service_instance_list(token).json() or []
# return [(ds['Name'], ds['Name']) for ds in al]
#
# Path: tsuru_autoscale/alarm/client.py
# def host():
# def new(data, token):
# def list(token):
# def remove(name, token):
# def get(name, token):
# def service_instance_list(token):
. Output only the next line. | client.new(form.cleaned_data, token) |
Given the following code snippet before the placeholder: <|code_start|>
def list(request, app_name=None):
token = request.session.get('tsuru_token').split(" ")[-1]
instances = client.list(token).json()
context = {
"list": instances,
}
return render(request, "instance/list.html", context)
def get(request, name):
token = request.session.get('tsuru_token').split(" ")[-1]
instance = client.get(name, token).json()
alarms = client.alarms_by_instance(name, token).json() or []
events = []
for alarm in alarms:
<|code_end|>
, predict the next line using imports from the current file:
from django.shortcuts import render
from tsuru_autoscale.instance import client
from tsuru_autoscale.event import client as eclient
and context including class names, function names, and sometimes code from other files:
# Path: tsuru_autoscale/instance/client.py
# def host():
# def list(token):
# def get(name, token):
# def alarms_by_instance(instance, token):
#
# Path: tsuru_autoscale/event/client.py
# def host():
# def list(alarm_name, token):
. Output only the next line. | events.extend(eclient.list(alarm["name"], token).json()) |
Using the snippet: <|code_start|> if x.startswith("("):
x = x[1:].strip()
if x.endswith(")"):
x = x[:-1].strip()
if x.startswith("v"):
x = x[1:].strip()
return x
def up_to_date(env, cmd, version, args=None, stdout_flag=None,
stdout_index=-1):
iversion = get_installed_version(env, cmd, version, args, stdout_flag,
stdout_index)
if not iversion:
return False
else:
return LooseVersion(iversion) >= LooseVersion(version)
def is_version(env, cmd, version, args=None, stdout_flag=None,
stdout_index=-1):
iversion = get_installed_version(env, cmd, version, args, stdout_flag,
stdout_index)
if not iversion:
return False
else:
return LooseVersion(iversion) == LooseVersion(version)
def get_installed_version(env, cmd, version, args=None, stdout_flag=None,
stdout_index=-1):
"""Check if the given command is up to date with the provided version.
"""
<|code_end|>
, determine the next line of code. You have imports:
from distutils.version import LooseVersion
from cloudbio.custom import shared
from cloudbio.fabutils import quiet
and context (class names, function names, or code) available:
# Path: cloudbio/custom/shared.py
# CBL_REPO_ROOT_URL = "https://raw.github.com/chapmanb/cloudbiolinux/master/"
# def chdir(new_dir):
# def safe_makedir(dname):
# def which(program, env=None):
# def is_exe(fpath):
# def _if_not_installed(pname):
# def argcatcher(func):
# def decorator(*args, **kwargs):
# def _all_cbl_paths(env, ext):
# def _executable_not_on_path(pname):
# def _galaxy_tool_install(args):
# def _galaxy_tool_present(args):
# def _if_not_python_lib(library):
# def argcatcher(func):
# def decorator(*args, **kwargs):
# def make_tmp_dir_local(ext, work_dir):
# def _make_tmp_dir(ext=None, work_dir=None):
# def __work_dir():
# def _get_expected_file(url, dir_name=None, safe_tar=False, tar_file_name=None):
# def _safe_dir_name(dir_name, need_dir=True):
# def _remote_fetch(env, url, out_file=None, allow_fail=False, fix_fn=None, samedir=False):
# def _fetch_and_unpack(url, need_dir=True, dir_name=None, revision=None,
# safe_tar=False, tar_file_name=None):
# def _configure_make(env):
# def _ac_configure_make(env):
# def _make_copy(find_cmd=None, premake_cmd=None, do_make=True):
# def _do_work(env):
# def _get_install(url, env, make_command, post_unpack_fn=None, revision=None, dir_name=None,
# safe_tar=False, tar_file_name=None):
# def _apply_patch(env, url):
# def _get_install_local(url, env, make_command, dir_name=None,
# post_unpack_fn=None, safe_tar=False, tar_file_name=None):
# def _symlinked_install_dir(pname, version, env, extra_dir=None):
# def _symlinked_dir_exists(pname, version, env, extra_dir=None):
# def _symlinked_shared_dir(pname, version, env, extra_dir=None):
# def _symlinked_java_version_dir(pname, version, env):
# def _java_install(pname, version, url, env, install_fn=None,
# pre_fetch_fn=None):
# def _python_cmd(env):
# def _pip_cmd(env):
# def _conda_cmd(env):
# def _is_anaconda(env):
# def _python_make(env):
# def _get_installed_file(env, local_file):
# def _get_installed_file_contents(env, local_file):
# def _write_to_file(contents, path, mode):
# def _get_bin_dir(env):
# def _get_include_dir(env):
# def _get_lib_dir(env):
# def _get_install_subdir(env, subdir):
# def _set_default_config(env, install_dir, sym_dir_name="default"):
# def _setup_simple_service(service_name):
# def _render_config_file_template(env, name, defaults={}, overrides={}, default_source=None):
# def _extend_env(env, defaults={}, overrides={}):
# def _setup_conf_file(env, dest, name, defaults={}, overrides={}, default_source=None, mode="0755"):
# def _add_to_profiles(line, profiles=[], use_sudo=True):
# def install_venvburrito():
# def _create_python_virtualenv(env, venv_name, reqs_file=None, reqs_url=None):
# def create():
# def _create_local_python_virtualenv(env, venv_name, reqs_file, reqs_url):
# def _create_global_python_virtualenv(env, venv_name, reqs_file, reqs_url):
# def _get_bitbucket_download_url(revision, default_repo):
# def _read_boolean(env, name, default):
#
# Path: cloudbio/fabutils.py
# def quiet():
# return settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True)
. Output only the next line. | if shared._executable_not_on_path(cmd): |
Given the code snippet: <|code_start|> if x.startswith("v"):
x = x[1:].strip()
return x
def up_to_date(env, cmd, version, args=None, stdout_flag=None,
stdout_index=-1):
iversion = get_installed_version(env, cmd, version, args, stdout_flag,
stdout_index)
if not iversion:
return False
else:
return LooseVersion(iversion) >= LooseVersion(version)
def is_version(env, cmd, version, args=None, stdout_flag=None,
stdout_index=-1):
iversion = get_installed_version(env, cmd, version, args, stdout_flag,
stdout_index)
if not iversion:
return False
else:
return LooseVersion(iversion) == LooseVersion(version)
def get_installed_version(env, cmd, version, args=None, stdout_flag=None,
stdout_index=-1):
"""Check if the given command is up to date with the provided version.
"""
if shared._executable_not_on_path(cmd):
return False
if args:
cmd = cmd + " " + " ".join(args)
<|code_end|>
, generate the next line using the imports in this file:
from distutils.version import LooseVersion
from cloudbio.custom import shared
from cloudbio.fabutils import quiet
and context (functions, classes, or occasionally code) from other files:
# Path: cloudbio/custom/shared.py
# CBL_REPO_ROOT_URL = "https://raw.github.com/chapmanb/cloudbiolinux/master/"
# def chdir(new_dir):
# def safe_makedir(dname):
# def which(program, env=None):
# def is_exe(fpath):
# def _if_not_installed(pname):
# def argcatcher(func):
# def decorator(*args, **kwargs):
# def _all_cbl_paths(env, ext):
# def _executable_not_on_path(pname):
# def _galaxy_tool_install(args):
# def _galaxy_tool_present(args):
# def _if_not_python_lib(library):
# def argcatcher(func):
# def decorator(*args, **kwargs):
# def make_tmp_dir_local(ext, work_dir):
# def _make_tmp_dir(ext=None, work_dir=None):
# def __work_dir():
# def _get_expected_file(url, dir_name=None, safe_tar=False, tar_file_name=None):
# def _safe_dir_name(dir_name, need_dir=True):
# def _remote_fetch(env, url, out_file=None, allow_fail=False, fix_fn=None, samedir=False):
# def _fetch_and_unpack(url, need_dir=True, dir_name=None, revision=None,
# safe_tar=False, tar_file_name=None):
# def _configure_make(env):
# def _ac_configure_make(env):
# def _make_copy(find_cmd=None, premake_cmd=None, do_make=True):
# def _do_work(env):
# def _get_install(url, env, make_command, post_unpack_fn=None, revision=None, dir_name=None,
# safe_tar=False, tar_file_name=None):
# def _apply_patch(env, url):
# def _get_install_local(url, env, make_command, dir_name=None,
# post_unpack_fn=None, safe_tar=False, tar_file_name=None):
# def _symlinked_install_dir(pname, version, env, extra_dir=None):
# def _symlinked_dir_exists(pname, version, env, extra_dir=None):
# def _symlinked_shared_dir(pname, version, env, extra_dir=None):
# def _symlinked_java_version_dir(pname, version, env):
# def _java_install(pname, version, url, env, install_fn=None,
# pre_fetch_fn=None):
# def _python_cmd(env):
# def _pip_cmd(env):
# def _conda_cmd(env):
# def _is_anaconda(env):
# def _python_make(env):
# def _get_installed_file(env, local_file):
# def _get_installed_file_contents(env, local_file):
# def _write_to_file(contents, path, mode):
# def _get_bin_dir(env):
# def _get_include_dir(env):
# def _get_lib_dir(env):
# def _get_install_subdir(env, subdir):
# def _set_default_config(env, install_dir, sym_dir_name="default"):
# def _setup_simple_service(service_name):
# def _render_config_file_template(env, name, defaults={}, overrides={}, default_source=None):
# def _extend_env(env, defaults={}, overrides={}):
# def _setup_conf_file(env, dest, name, defaults={}, overrides={}, default_source=None, mode="0755"):
# def _add_to_profiles(line, profiles=[], use_sudo=True):
# def install_venvburrito():
# def _create_python_virtualenv(env, venv_name, reqs_file=None, reqs_url=None):
# def create():
# def _create_local_python_virtualenv(env, venv_name, reqs_file, reqs_url):
# def _create_global_python_virtualenv(env, venv_name, reqs_file, reqs_url):
# def _get_bitbucket_download_url(revision, default_repo):
# def _read_boolean(env, name, default):
#
# Path: cloudbio/fabutils.py
# def quiet():
# return settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True)
. Output only the next line. | with quiet(): |
Given the code snippet: <|code_start|>"""
Automated installation on debian package systems with apt.
"""
def _apt_packages(to_install=None, pkg_list=None):
"""
Install packages available via apt-get.
Note that ``to_install`` and ``pkg_list`` arguments cannot be used simultaneously.
:type to_install: list
:param to_install: A list of strings (ie, groups) present in the ``main.yaml``
config file that will be used to filter out the specific
packages to be installed.
:type pkg_list: list
:param pkg_list: An explicit list of packages to install. No other files,
flavors are considered.
"""
if "minimal" not in env.flavor.short_name:
env.logger.info("Update the system")
with settings(warn_only=True):
env.safe_sudo("apt-get update")
if to_install is not None:
config_file = get_config_file(env, "packages.yaml")
if "minimal" not in env.flavor.name and "minimal" not in env.flavor.short_name:
env.flavor.apt_upgrade_system(env=env)
<|code_end|>
, generate the next line using the imports in this file:
from fabric.api import *
from fabric.contrib.files import *
from cloudbio.package.shared import _yaml_to_packages
from cloudbio.flavor.config import get_config_file
and context (functions, classes, or occasionally code) from other files:
# Path: cloudbio/package/shared.py
# def _yaml_to_packages(yaml_file, to_install=None, subs_yaml_file=None, namesort=True, env=None):
# """Read a list of packages from a nested YAML configuration file.
# """
# print("Reading packages from %s" % yaml_file)
# with open(yaml_file) as in_handle:
# full_data = yaml.safe_load(in_handle)
# if full_data is None:
# full_data = {}
# if subs_yaml_file is not None:
# with open(subs_yaml_file) as in_handle:
# subs = yaml.safe_load(in_handle)
# else:
# subs = {}
# # filter the data based on what we have configured to install
# data = [(k, v) for (k, v) in full_data.items()
# if (to_install is None or k in to_install) and k not in ["channels"]]
# data.sort()
# packages = []
# pkg_to_group = dict()
# while len(data) > 0:
# cur_key, cur_info = data.pop(0)
# if cur_info:
# if isinstance(cur_info, (list, tuple)):
# packages.extend(_filter_subs_packages(cur_info, subs, namesort))
# for p in cur_info:
# pkg_to_group[p] = cur_key
# elif isinstance(cur_info, dict):
# for key, val in cur_info.items():
# # if we are okay, propagate with the top level key
# if env and key == 'needs_64bit':
# if env.is_64bit:
# data.insert(0, (cur_key, val))
# elif env and key.startswith(env.distribution):
# if key.endswith(env.dist_name):
# data.insert(0, (cur_key, val))
# else:
# data.insert(0, (cur_key, val))
# else:
# raise ValueError(cur_info)
# return packages, pkg_to_group
. Output only the next line. | (packages, _) = _yaml_to_packages(config_file.base, to_install, config_file.dist) |
Continue the code snippet: <|code_start|>
TODO: Rename versions and document options.
"""
if type(versions) is str:
versions = [versions]
for version_info in versions:
if type(version_info) is str:
_install_tool(env, name, version=version_info, requirement_name=name, tool_install_dir=tool_install_dir)
else:
version = version_info["version"]
bin_dirs = version_info.get("bin_dirs", ["bin"])
env_vars = version_info.get("env_vars", {})
provides = version_info.get("provides", [])
if isinstance(provides, (str, unicode, six.string_types)):
provides = [provides]
for provide_conf in provides[:]:
if isinstance(provide_conf, dict):
provides.remove(provide_conf)
if __check_conditional(provide_conf):
provies.append(provide_conf["name"])
# Some requirements (e.g. blast+) maybe not have valid python
# identifiers as name. Use install_blast to setup but override
# requirement directory name with requirement_name field.
requirement_name = version_info.get("requirement_name", name)
tool_env = _install_tool(env, name, version, bin_dirs=bin_dirs, env_vars=env_vars, requirement_name=requirement_name, tool_install_dir=tool_install_dir)
symlink_versions = version_info.get("symlink_versions", [])
if type(symlink_versions) is str:
symlink_versions = [symlink_versions]
for symlink_version in symlink_versions:
<|code_end|>
. Use current file imports:
import os
import six
import yaml
from string import Template
from cloudbio.custom.bio_general import *
from cloudbio.custom.bio_nextgen import *
from cloudbio.custom.bio_proteomics import *
from cloudbio.custom.shared import _set_default_config, _add_to_profiles
from cloudbio.galaxy.applications import *
from cloudbio.galaxy.r import _install_r_packages
from cloudbio.galaxy.utils import _chown_galaxy, _read_boolean
and context (classes, functions, or code) from other files:
# Path: cloudbio/custom/shared.py
# def _set_default_config(env, install_dir, sym_dir_name="default"):
# """
# Sets up default galaxy config directory symbolic link (if needed). Needed
# when it doesn't exists or when installing a new version of software.
# """
# version = env["tool_version"]
# if env.safe_exists(install_dir):
# install_dir_root = "%s/.." % install_dir
# sym_dir = "%s/%s" % (install_dir_root, sym_dir_name)
# replace_default = False
# if not env.safe_exists(sym_dir):
# replace_default = True
# if not replace_default:
# default_version = env.safe_sudo("basename `readlink -f %s`" % sym_dir)
# if version > default_version: # Bug: Wouldn't work for 1.9 < 1.10
# print("default version %s is older than version %s just installed, replacing..." % (default_version, version))
# replace_default = True
# if replace_default:
# env.safe_sudo("rm -rf %s; ln -f -s %s %s" % (sym_dir, install_dir, sym_dir))
#
# def _add_to_profiles(line, profiles=[], use_sudo=True):
# """
# If it's not already there, append ``line`` to shell profiles files.
# By default, these are ``/etc/profile`` and ``/etc/bash.bashrc`` but can be
# overridden by providing a list of file paths to the ``profiles`` argument.
# """
# if not profiles:
# profiles = ['/etc/bash.bashrc', '/etc/profile']
# for profile in profiles:
# if not env.safe_contains(profile, line):
# env.safe_append(profile, line, use_sudo=use_sudo)
. Output only the next line. | _set_default_config(tool_env, tool_env["system_install"], symlink_version) |
Here is a snippet: <|code_start|>def _tools_conf_path(env):
"""
Load path to galaxy_tools_conf file from env, allowing expansion of $__contrib_dir__.
Default to $__contrib_dir__/flavor/cloudman/tools.yaml.
"""
contrib_dir = os.path.join(env.config_dir, os.pardir, "contrib")
default_tools_conf_path = os.path.join(contrib_dir, "flavor", "cloudman", "tools.yaml")
tools_conf_path = env.get("galaxy_tools_conf", default_tools_conf_path)
## Allow expansion of __config_dir__ in galaxy_tools_conf property.
return Template(tools_conf_path).safe_substitute({"__contrib_dir__": contrib_dir})
def _load_tools_conf(env):
with open(_tools_conf_path(env)) as in_handle:
full_data = yaml.safe_load(in_handle)
return full_data
def _setup_install_dir(env):
"""Sets up install dir and ensures its owned by Galaxy"""
if not env.safe_exists(env.galaxy_tools_dir):
env.safe_sudo("mkdir -p %s" % env.galaxy_tools_dir)
_chown_galaxy(env, env.galaxy_tools_dir)
# Create a general-purpose ``bin`` directory under the galaxy_tools_dir
# and put it on the PATH so users can more easily add custom tools
bin_dir = os.path.join(env.galaxy_tools_dir, 'bin')
if not env.safe_exists(bin_dir):
env.safe_sudo("mkdir -p %s" % bin_dir)
_chown_galaxy(env, bin_dir)
line = "export PATH={0}:$PATH".format(bin_dir)
<|code_end|>
. Write the next line using the current file imports:
import os
import six
import yaml
from string import Template
from cloudbio.custom.bio_general import *
from cloudbio.custom.bio_nextgen import *
from cloudbio.custom.bio_proteomics import *
from cloudbio.custom.shared import _set_default_config, _add_to_profiles
from cloudbio.galaxy.applications import *
from cloudbio.galaxy.r import _install_r_packages
from cloudbio.galaxy.utils import _chown_galaxy, _read_boolean
and context from other files:
# Path: cloudbio/custom/shared.py
# def _set_default_config(env, install_dir, sym_dir_name="default"):
# """
# Sets up default galaxy config directory symbolic link (if needed). Needed
# when it doesn't exists or when installing a new version of software.
# """
# version = env["tool_version"]
# if env.safe_exists(install_dir):
# install_dir_root = "%s/.." % install_dir
# sym_dir = "%s/%s" % (install_dir_root, sym_dir_name)
# replace_default = False
# if not env.safe_exists(sym_dir):
# replace_default = True
# if not replace_default:
# default_version = env.safe_sudo("basename `readlink -f %s`" % sym_dir)
# if version > default_version: # Bug: Wouldn't work for 1.9 < 1.10
# print("default version %s is older than version %s just installed, replacing..." % (default_version, version))
# replace_default = True
# if replace_default:
# env.safe_sudo("rm -rf %s; ln -f -s %s %s" % (sym_dir, install_dir, sym_dir))
#
# def _add_to_profiles(line, profiles=[], use_sudo=True):
# """
# If it's not already there, append ``line`` to shell profiles files.
# By default, these are ``/etc/profile`` and ``/etc/bash.bashrc`` but can be
# overridden by providing a list of file paths to the ``profiles`` argument.
# """
# if not profiles:
# profiles = ['/etc/bash.bashrc', '/etc/profile']
# for profile in profiles:
# if not env.safe_contains(profile, line):
# env.safe_append(profile, line, use_sudo=use_sudo)
, which may include functions, classes, or code. Output only the next line. | _add_to_profiles(line) |
Given snippet: <|code_start|> py_version = ENV_PY_VERSIONS[env_name]
if "deepvariant" in env_packages:
# Ignore /etc/boto.cfg which creates conflicts with conda gsutils
# https://github.com/GoogleCloudPlatform/gsutil/issues/516
exports = "export BOTO_CONFIG=/ignoreglobal && "
else:
exports = ""
if os.path.exists(mamba_bin):
try:
subprocess.check_call("{mamba_bin} install -q -y {env_str} {channels} "
"{py_version} {pkgs_str}".format(**locals()), shell=True)
except subprocess.CalledProcessError:
# Fall back to standard conda install when we have system specific issues
# https://github.com/bcbio/bcbio-nextgen/issues/2871
subprocess.check_call("{exports}{conda_bin} install -q -y {env_str} {channels} "
"{py_version} {pkgs_str}".format(**locals()), shell=True)
conda_pkg_list = json.loads(subprocess.check_output(
"{conda_bin} list --json {env_str} -q".format(**locals()), shell=True))
return conda_pkg_list
def install_in(conda_bin, system_installdir, config_file=None, packages=None):
"""Install packages inside a given anaconda directory.
New approach, local only and not dependent on fabric.
conda_bin could refer to mamba
"""
if config_file is None and packages is None:
packages = []
check_channels = []
else:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import collections
import json
import os
import shutil
import subprocess
import yaml
from cloudbio.package.shared import _yaml_to_packages
from cloudbio.flavor.config import get_config_file
from cloudbio.custom import shared
and context:
# Path: cloudbio/package/shared.py
# def _yaml_to_packages(yaml_file, to_install=None, subs_yaml_file=None, namesort=True, env=None):
# """Read a list of packages from a nested YAML configuration file.
# """
# print("Reading packages from %s" % yaml_file)
# with open(yaml_file) as in_handle:
# full_data = yaml.safe_load(in_handle)
# if full_data is None:
# full_data = {}
# if subs_yaml_file is not None:
# with open(subs_yaml_file) as in_handle:
# subs = yaml.safe_load(in_handle)
# else:
# subs = {}
# # filter the data based on what we have configured to install
# data = [(k, v) for (k, v) in full_data.items()
# if (to_install is None or k in to_install) and k not in ["channels"]]
# data.sort()
# packages = []
# pkg_to_group = dict()
# while len(data) > 0:
# cur_key, cur_info = data.pop(0)
# if cur_info:
# if isinstance(cur_info, (list, tuple)):
# packages.extend(_filter_subs_packages(cur_info, subs, namesort))
# for p in cur_info:
# pkg_to_group[p] = cur_key
# elif isinstance(cur_info, dict):
# for key, val in cur_info.items():
# # if we are okay, propagate with the top level key
# if env and key == 'needs_64bit':
# if env.is_64bit:
# data.insert(0, (cur_key, val))
# elif env and key.startswith(env.distribution):
# if key.endswith(env.dist_name):
# data.insert(0, (cur_key, val))
# else:
# data.insert(0, (cur_key, val))
# else:
# raise ValueError(cur_info)
# return packages, pkg_to_group
which might include code, classes, or functions. Output only the next line. | (packages, _) = _yaml_to_packages(config_file) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.