Instruction
stringlengths
362
7.83k
output_code
stringlengths
1
945
Given the following code snippet before the placeholder: <|code_start|> rel_ctx = MockRelationshipContext( type='cloudify.relationships.resources.reserve_list_item', target=tar_rel_subject_ctx ) # source src_ctx = MockCloudifyContext( node_id='test_item_123456', node_type='cloudify.nodes.resources.ListItem', source=self, target=tar_rel_subject_ctx, relationships=rel_ctx ) current_ctx.set(src_ctx) return src_ctx def test_create_delete_resources_list(self): ctx = self._mock_resource_list_ctx() # when (create) tasks.create_list(ctx) # then (create) self.assertTrue( RESOURCES_LIST_PROPERTY in ctx.instance.runtime_properties) self.assertTrue( FREE_RESOURCES_LIST_PROPERTY in ctx.instance.runtime_properties) self.assertTrue( <|code_end|> , predict the next line using imports from the current file: import unittest from cloudify.state import current_ctx from cloudify.mocks import ( MockCloudifyContext, MockNodeContext, MockNodeInstanceContext, MockRelationshipContext, MockRelationshipSubjectContext ) from cloudify_resources import tasks from cloudify_resources.constants import ( RESOURCES_LIST_PROPERTY, FREE_RESOURCES_LIST_PROPERTY, RESERVATIONS_PROPERTY, SINGLE_RESERVATION_PROPERTY ) and context including class names, function names, and sometimes code from other files: # Path: cloudify_resources/tasks.py # def _refresh_source_and_target_runtime_props(ctx, **kwargs): # def _update_source_and_target_runtime_props(ctx, **kwargs): # def create_list(ctx, **kwargs): # def create_list_item(ctx, **kwargs): # def delete_list_item(ctx, **kwargs): # def delete_list(ctx, **kwargs): # def reserve_list_item(ctx, **kwargs): # def return_list_item(ctx, **kwargs): # # Path: cloudify_resources/constants.py # RESOURCES_LIST_PROPERTY = 'resource_config' # # FREE_RESOURCES_LIST_PROPERTY = 'free_resources' # # RESERVATIONS_PROPERTY = 'reservations' # # SINGLE_RESERVATION_PROPERTY = 'reservation' . Output only the next line.
RESERVATIONS_PROPERTY in ctx.instance.runtime_properties)
Given snippet: <|code_start|> ctx.instance.runtime_properties[RESERVATIONS_PROPERTY], {} ) # when (delete) tasks.delete_list(ctx) # then (delete) self.assertEquals( ctx.instance.runtime_properties[RESOURCES_LIST_PROPERTY], [] ) self.assertEquals( ctx.instance.runtime_properties[FREE_RESOURCES_LIST_PROPERTY], [] ) self.assertEquals( ctx.instance.runtime_properties[RESERVATIONS_PROPERTY], {} ) def test_create_delete_resources_list_item(self): ctx = self._mock_resource_list_item_ctx() # when (create) tasks.create_list_item(ctx) # then (create) self.assertTrue( <|code_end|> , continue by predicting the next line. Consider current file imports: import unittest from cloudify.state import current_ctx from cloudify.mocks import ( MockCloudifyContext, MockNodeContext, MockNodeInstanceContext, MockRelationshipContext, MockRelationshipSubjectContext ) from cloudify_resources import tasks from cloudify_resources.constants import ( RESOURCES_LIST_PROPERTY, FREE_RESOURCES_LIST_PROPERTY, RESERVATIONS_PROPERTY, SINGLE_RESERVATION_PROPERTY ) and context: # Path: cloudify_resources/tasks.py # def _refresh_source_and_target_runtime_props(ctx, **kwargs): # def _update_source_and_target_runtime_props(ctx, **kwargs): # def create_list(ctx, **kwargs): # def create_list_item(ctx, **kwargs): # def delete_list_item(ctx, **kwargs): # def delete_list(ctx, **kwargs): # def reserve_list_item(ctx, **kwargs): # def return_list_item(ctx, **kwargs): # # Path: cloudify_resources/constants.py # RESOURCES_LIST_PROPERTY = 'resource_config' # # FREE_RESOURCES_LIST_PROPERTY = 'free_resources' # # RESERVATIONS_PROPERTY = 'reservations' # # SINGLE_RESERVATION_PROPERTY = 'reservation' which might include code, classes, or functions. Output only the next line.
SINGLE_RESERVATION_PROPERTY in ctx.instance.runtime_properties)
Predict the next line for this snippet: <|code_start|># http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. DATA_RUNTIME_PROPERTY = 'data' DO_NOT_DELETE_PROPERTY = 'do_not_delete' KEYS_PROPERTY = 'keys' def _get_parameters(properties, kwargs): for k, v in properties.items(): if k not in kwargs: kwargs[k] = v return kwargs @operation(resumable=True) def create(ctx, **kwargs): parameters = _get_parameters(ctx.node.properties, kwargs) <|code_end|> with the help of current file imports: from cloudify.decorators import operation from cloudify.manager import get_rest_client from .sdk import SecretsSDK and context from other files: # Path: cloudify_secrets/sdk.py # class SecretsSDK(object): # # DEFAULT_SEPARATOR = '__' # # @staticmethod # def _try_to_serialize(value): # if isinstance(value, (dict, list, tuple)): # return json.dumps(value) # # return str(value) # # @staticmethod # def _try_to_parse(value): # try: # return json.loads(value) # except ValueError: # return value # # def __init__(self, logger, rest_client, separator=DEFAULT_SEPARATOR, **_): # self._logger = logger # self._rest_client = rest_client # self._separator = separator # # def _handle_variant(self, key, variant=None): # if variant: # return '{0}{1}{2}'.format(key, self._separator, variant) # # return key # # def _write(self, rest_client_method, entries, variant=None): # result = {} # # for key, value in entries.items(): # self._logger.debug( # 'Creating secret "{0}" with value: {1}' # .format(key, value) # ) # # result[key] = rest_client_method( # key=self._handle_variant(key, variant), # value=self._try_to_serialize(value) # ) # # return result # # def create(self, entries, variant=None, **_): # return self._write(self._rest_client.secrets.create, entries, variant) # # def update(self, entries, variant=None, **_): # return self._write(self._rest_client.secrets.patch, entries, variant) # # def delete(self, secrets, variant=None, **_): # for key in secrets.keys(): # self._logger.debug( # 'Deleting secret "{0}" ...'.format(key) # ) # # self._rest_client.secrets.delete( # key=self._handle_variant(key, variant) # ) # # def read(self, keys, variant=None, **_): # result = {} # # for key in keys: # self._logger.debug('Reading secret "{0}" ...'.format(key)) # # response = self._rest_client.secrets.get( # key=self._handle_variant(key, variant) # ) # # response['value'] = self._try_to_parse(response['value']) # result[key] = response # # return result , which may contain function names, class names, or code. Output only the next line.
result = SecretsSDK(ctx.logger, get_rest_client(), **parameters).create(
Next line prediction: <|code_start|># See the License for the specific language governing permissions and # limitations under the License. class TestPlugin(unittest.TestCase): def test_execute_mock_sdk(self): _ctx = MockCloudifyContext('node_name', properties={'hosts': ['--fake.cake--', 'test123.test'], 'port': -1, 'ssl': False, 'verify': False, 'params': {'f': 'e'}}) _ctx.instance._runtime_properties = DirtyTrackingDict( {'b': {'c': 'd'}}) __location__ = os.path.realpath( os.path.join(os.getcwd(), os.path.dirname(__file__))) with open(os.path.join(__location__, 'template1.yaml'), 'r') as f: template = f.read() _ctx.get_resource = MagicMock(return_value=template.encode('utf-8')) _ctx.logger.setLevel(logging.DEBUG) current_ctx.set(_ctx) check_mock = MagicMock(return_value={}) with patch( "cloudify_rest.tasks.utility.process", check_mock ): <|code_end|> . Use current file imports: (import unittest import requests_mock import json import os import logging from mock import MagicMock, patch from cloudify.exceptions import RecoverableError, NonRecoverableError from cloudify.mocks import MockCloudifyContext from cloudify.state import current_ctx from cloudify.manager import DirtyTrackingDict from cloudify_rest import tasks) and context including class names, function names, or small code snippets from other files: # Path: cloudify_rest/tasks.py # def _get_params_attributes(ctx, runtime_properties, params_list): # def bunch_execute(templates=None, **kwargs): # def execute(*argc, **kwargs): # def execute_as_relationship(*argc, **kwargs): # def execute_as_workflow(*args, **kwargs): # def _execute_in_retry(template, params, instance_props, node_props, # resource_callback=None, save_path=None, # prerender=False, remove_calls=False, auth=None): # def _execute(params, template_file, retry_count, retry_sleep, ctx, **kwargs): . Output only the next line.
tasks.bunch_execute(templates=[{
Given the code snippet: <|code_start|># Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. REST_CLIENT_EXCEPTION = \ mock.MagicMock(side_effect=CloudifyClientError('Mistake')) class TestBlueprint(DeploymentProxyTestBase): def setUp(self): super(TestBlueprint, self).setUp() self.resource_config = dict() self.resource_config['resource_config'] = {} def test_upload_blueprint_rest_client_error(self): # Tests that upload blueprint fails on rest client error test_name = 'test_upload_blueprint_rest_client_error' archive = 'sample_file.zip' _ctx = self.get_mock_ctx(test_name) _ctx._resources = {'sample_file.zip': 'Sample Blueprint'} current_ctx.set(_ctx) with mock.patch('cloudify.manager.get_rest_client') as mock_client: <|code_end|> , generate the next line using the imports in this file: import mock from cloudify.state import current_ctx from cloudify.exceptions import NonRecoverableError from cloudify_rest_client.exceptions import CloudifyClientError from ..tasks import upload_blueprint from .base import DeploymentProxyTestBase from ..constants import EXTERNAL_RESOURCE from .client_mock import MockCloudifyRestClient and context (functions, classes, or occasionally code) from other files: # Path: cloudify_deployment_proxy/tests/base.py # class DeploymentProxyTestBase(testtools.TestCase): # # def tearDown(self): # current_ctx.clear() # super(DeploymentProxyTestBase, self).tearDown() # # def get_mock_ctx(self, # test_name, # test_properties=DEPLOYMENT_PROXY_PROPS, # node_type=DEPLOYMENT_PROXY_TYPE, # retry_number=0): # # test_node_id = test_name # test_properties = test_properties # # operation = json.loads(json.dumps({ # 'retry_number': retry_number # })) # ctx = MockCloudifyContext( # node_id=test_node_id, # deployment_id=test_name, # operation=operation, # properties=test_properties # ) # # ctx.operation._operation_context = json.loads(json.dumps( # {'name': 'some.test'})) # ctx.node.type_hierarchy = \ # json.loads(json.dumps(['cloudify.nodes.Root', node_type])) # try: # ctx.node.type = node_type # except AttributeError: # ctx.logger.error('Failed to set node type attribute.') # # return ctx # # Path: cloudify_deployment_proxy/tests/client_mock.py # class MockCloudifyRestClient(object): # # def __init__(self): # self.blueprints = MockBlueprintsClient() # self.deployments = MockDeploymentsClient() # self.executions = MockExecutionsClient() # self.node_instances = MockNodeInstancesClient() # self.events = MockEventsClient() # self.secrets = MagicMock() # self.plugins = MagicMock() . Output only the next line.
cfy_mock_client = MockCloudifyRestClient()
Based on the snippet: <|code_start|> ('registration_id', registration_id), ]), logging.INFO, device=device, ) if result.get('failure'): log_middleware_information( '{0} | Should remove {1} because {2}', OrderedDict([ ('unique_key', unique_key), ('registration_id', registration_id), ('results', result['results']), ]), logging.WARNING, device=device, ) if (len(result['results']) > 0 and 'error' in result['results'][0] and result['results'][0]['error'] == 'NotRegistered'): log_middleware_information( '{0} | Removed {1}', OrderedDict([ ('unique_key', unique_key), ('device', device), ]), logging.INFO, ) # Remove the unique key from the cache so we can sent # NAK to asterisk. <|code_end|> , predict the immediate next line with the help of imports: from collections import OrderedDict from time import time from urllib.parse import urljoin from apns2.client import APNsClient from apns2.errors import APNsException, BadDeviceToken, DeviceTokenNotForTopic, Unregistered from apns2.payload import Payload from django.conf import settings from gcm.gcm import GCM, GCMAuthenticationException from pyfcm import FCMNotification from pyfcm.errors import AuthenticationError, FCMServerError, InternalPackageError from app.cache import RedisClusterCache from app.utils import log_middleware_information from .models import ANDROID_PLATFORM, APNS_PLATFORM, GCM_PLATFORM import datetime import logging import os and context (classes, functions, sometimes code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/utils.py # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # GCM_PLATFORM = 'gcm' . Output only the next line.
redis_cache = RedisClusterCache()
Given the following code snippet before the placeholder: <|code_start|> TYPE_CALL = 'call' TYPE_MESSAGE = 'message' def send_call_message(device, unique_key, phonenumber, caller_id, attempt): """ Function to send the call push notification. Args: device (Device): A Device object. unique_key (string): String with the unique_key. phonenumber (string): Phonenumber that is calling. caller_id (string): ID of the caller. attempt (int): The amount of attempts made. """ data = { 'unique_key': unique_key, 'phonenumber': phonenumber, 'caller_id': caller_id, 'attempt': attempt, } if device.app.platform == APNS_PLATFORM: send_apns_message(device, device.app, TYPE_CALL, data) elif device.app.platform == GCM_PLATFORM: send_gcm_message(device, device.app, TYPE_CALL, data) elif device.app.platform == ANDROID_PLATFORM: send_fcm_message(device, device.app, TYPE_CALL, data) else: <|code_end|> , predict the next line using imports from the current file: from collections import OrderedDict from time import time from urllib.parse import urljoin from apns2.client import APNsClient from apns2.errors import APNsException, BadDeviceToken, DeviceTokenNotForTopic, Unregistered from apns2.payload import Payload from django.conf import settings from gcm.gcm import GCM, GCMAuthenticationException from pyfcm import FCMNotification from pyfcm.errors import AuthenticationError, FCMServerError, InternalPackageError from app.cache import RedisClusterCache from app.utils import log_middleware_information from .models import ANDROID_PLATFORM, APNS_PLATFORM, GCM_PLATFORM import datetime import logging import os and context including class names, function names, and sometimes code from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/utils.py # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # GCM_PLATFORM = 'gcm' . Output only the next line.
log_middleware_information(
Predict the next line for this snippet: <|code_start|> TYPE_CALL = 'call' TYPE_MESSAGE = 'message' def send_call_message(device, unique_key, phonenumber, caller_id, attempt): """ Function to send the call push notification. Args: device (Device): A Device object. unique_key (string): String with the unique_key. phonenumber (string): Phonenumber that is calling. caller_id (string): ID of the caller. attempt (int): The amount of attempts made. """ data = { 'unique_key': unique_key, 'phonenumber': phonenumber, 'caller_id': caller_id, 'attempt': attempt, } if device.app.platform == APNS_PLATFORM: send_apns_message(device, device.app, TYPE_CALL, data) elif device.app.platform == GCM_PLATFORM: send_gcm_message(device, device.app, TYPE_CALL, data) <|code_end|> with the help of current file imports: from collections import OrderedDict from time import time from urllib.parse import urljoin from apns2.client import APNsClient from apns2.errors import APNsException, BadDeviceToken, DeviceTokenNotForTopic, Unregistered from apns2.payload import Payload from django.conf import settings from gcm.gcm import GCM, GCMAuthenticationException from pyfcm import FCMNotification from pyfcm.errors import AuthenticationError, FCMServerError, InternalPackageError from app.cache import RedisClusterCache from app.utils import log_middleware_information from .models import ANDROID_PLATFORM, APNS_PLATFORM, GCM_PLATFORM import datetime import logging import os and context from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/utils.py # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # GCM_PLATFORM = 'gcm' , which may contain function names, class names, or code. Output only the next line.
elif device.app.platform == ANDROID_PLATFORM:
Given the following code snippet before the placeholder: <|code_start|> TYPE_CALL = 'call' TYPE_MESSAGE = 'message' def send_call_message(device, unique_key, phonenumber, caller_id, attempt): """ Function to send the call push notification. Args: device (Device): A Device object. unique_key (string): String with the unique_key. phonenumber (string): Phonenumber that is calling. caller_id (string): ID of the caller. attempt (int): The amount of attempts made. """ data = { 'unique_key': unique_key, 'phonenumber': phonenumber, 'caller_id': caller_id, 'attempt': attempt, } <|code_end|> , predict the next line using imports from the current file: from collections import OrderedDict from time import time from urllib.parse import urljoin from apns2.client import APNsClient from apns2.errors import APNsException, BadDeviceToken, DeviceTokenNotForTopic, Unregistered from apns2.payload import Payload from django.conf import settings from gcm.gcm import GCM, GCMAuthenticationException from pyfcm import FCMNotification from pyfcm.errors import AuthenticationError, FCMServerError, InternalPackageError from app.cache import RedisClusterCache from app.utils import log_middleware_information from .models import ANDROID_PLATFORM, APNS_PLATFORM, GCM_PLATFORM import datetime import logging import os and context including class names, function names, and sometimes code from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/utils.py # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # GCM_PLATFORM = 'gcm' . Output only the next line.
if device.app.platform == APNS_PLATFORM:
Using the snippet: <|code_start|> TYPE_CALL = 'call' TYPE_MESSAGE = 'message' def send_call_message(device, unique_key, phonenumber, caller_id, attempt): """ Function to send the call push notification. Args: device (Device): A Device object. unique_key (string): String with the unique_key. phonenumber (string): Phonenumber that is calling. caller_id (string): ID of the caller. attempt (int): The amount of attempts made. """ data = { 'unique_key': unique_key, 'phonenumber': phonenumber, 'caller_id': caller_id, 'attempt': attempt, } if device.app.platform == APNS_PLATFORM: send_apns_message(device, device.app, TYPE_CALL, data) <|code_end|> , determine the next line of code. You have imports: from collections import OrderedDict from time import time from urllib.parse import urljoin from apns2.client import APNsClient from apns2.errors import APNsException, BadDeviceToken, DeviceTokenNotForTopic, Unregistered from apns2.payload import Payload from django.conf import settings from gcm.gcm import GCM, GCMAuthenticationException from pyfcm import FCMNotification from pyfcm.errors import AuthenticationError, FCMServerError, InternalPackageError from app.cache import RedisClusterCache from app.utils import log_middleware_information from .models import ANDROID_PLATFORM, APNS_PLATFORM, GCM_PLATFORM import datetime import logging import os and context (class names, function names, or code) available: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/utils.py # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # GCM_PLATFORM = 'gcm' . Output only the next line.
elif device.app.platform == GCM_PLATFORM:
Using the snippet: <|code_start|> class RegisterDeviceTest(TestCase): def setUp(self): super(RegisterDeviceTest, self).setUp() self.client = APIClient() <|code_end|> , determine the next line of code. You have imports: from ast import literal_eval from datetime import datetime, timedelta from unittest import mock from django.conf import settings from django.core.cache import cache from django.test import TestCase, TransactionTestCase from freezegun import freeze_time from rest_framework.test import APIClient from testfixtures import LogCapture from app.cache import RedisClusterCache from app.models import App, Device, ResponseLog from main.prometheus.consts import ( APP_VERSION_KEY, CALL_SETUP_SUCCESSFUL_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY) from .utils import mocked_send_apns_message, mocked_send_fcm_message, ThreadWithReturn import time and context (class names, function names, or code) available: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CALL_SETUP_SUCCESSFUL_KEY = 'call_setup_successful' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # Path: api/tests/utils.py # def mocked_send_apns_message(device, app, message_type, data=None): # cache.set('attempts', data.get('attempt', 1), 300) # print('WORKED APNS') # print(data.get('attempt', 1)) # # def mocked_send_fcm_message(device, app, message_type, data=None): # cache.set('attempts', data.get('attempt', 1), 300) # print('WORKED FCM') # print(data.get('attempt', 1)) # # class ThreadWithReturn(Thread): # def __init__(self, *args, **kwargs): # super(ThreadWithReturn, self).__init__(*args, **kwargs) # # self._return = None # # def run(self): # if self._target: # self._return = self._target(*self._args, **self._kwargs) # # def join(self, *args, **kwargs): # super(ThreadWithReturn, self).join(*args, **kwargs) # # return self._return . Output only the next line.
self.ios_app, created = App.objects.get_or_create(platform='apns', app_id='com.voipgrid.vialer')
Here is a snippet: <|code_start|> def get_metrics_base_data(json_data): """ Function to parse the base metric data from JSON into a new dict. Args: json_data (dict): JSON dict containing the data from the app. Returns: dict: Dict in the format we can store in Redis. """ metrics_dict = { <|code_end|> . Write the next line using the current file imports: from main.prometheus.consts import ( APP_VERSION_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, ) and context from other files: # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' , which may include functions, classes, or code. Output only the next line.
APP_VERSION_KEY: json_data.get(APP_VERSION_KEY),
Predict the next line for this snippet: <|code_start|> def get_metrics_base_data(json_data): """ Function to parse the base metric data from JSON into a new dict. Args: json_data (dict): JSON dict containing the data from the app. Returns: dict: Dict in the format we can store in Redis. """ metrics_dict = { APP_VERSION_KEY: json_data.get(APP_VERSION_KEY), <|code_end|> with the help of current file imports: from main.prometheus.consts import ( APP_VERSION_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, ) and context from other files: # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' , which may contain function names, class names, or code. Output only the next line.
CONNECTION_TYPE_KEY: json_data.get(CONNECTION_TYPE_KEY),
Given the following code snippet before the placeholder: <|code_start|> def get_metrics_base_data(json_data): """ Function to parse the base metric data from JSON into a new dict. Args: json_data (dict): JSON dict containing the data from the app. Returns: dict: Dict in the format we can store in Redis. """ metrics_dict = { APP_VERSION_KEY: json_data.get(APP_VERSION_KEY), CONNECTION_TYPE_KEY: json_data.get(CONNECTION_TYPE_KEY), <|code_end|> , predict the next line using imports from the current file: from main.prometheus.consts import ( APP_VERSION_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, ) and context including class names, function names, and sometimes code from other files: # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' . Output only the next line.
DIRECTION_KEY: json_data.get(DIRECTION_KEY),
Using the snippet: <|code_start|> def get_metrics_base_data(json_data): """ Function to parse the base metric data from JSON into a new dict. Args: json_data (dict): JSON dict containing the data from the app. Returns: dict: Dict in the format we can store in Redis. """ metrics_dict = { APP_VERSION_KEY: json_data.get(APP_VERSION_KEY), CONNECTION_TYPE_KEY: json_data.get(CONNECTION_TYPE_KEY), DIRECTION_KEY: json_data.get(DIRECTION_KEY), <|code_end|> , determine the next line of code. You have imports: from main.prometheus.consts import ( APP_VERSION_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, ) and context (class names, function names, or code) available: # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' . Output only the next line.
NETWORK_KEY: json_data.get(NETWORK_KEY),
Predict the next line for this snippet: <|code_start|> def get_metrics_base_data(json_data): """ Function to parse the base metric data from JSON into a new dict. Args: json_data (dict): JSON dict containing the data from the app. Returns: dict: Dict in the format we can store in Redis. """ metrics_dict = { APP_VERSION_KEY: json_data.get(APP_VERSION_KEY), CONNECTION_TYPE_KEY: json_data.get(CONNECTION_TYPE_KEY), DIRECTION_KEY: json_data.get(DIRECTION_KEY), NETWORK_KEY: json_data.get(NETWORK_KEY), OS_KEY: json_data.get(OS_KEY), OS_VERSION_KEY: json_data.get(OS_VERSION_KEY), } if json_data.get(NETWORK_KEY, '').lower() != 'wifi': <|code_end|> with the help of current file imports: from main.prometheus.consts import ( APP_VERSION_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, ) and context from other files: # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' , which may contain function names, class names, or code. Output only the next line.
metrics_dict[NETWORK_OPERATOR_KEY] = json_data.get(NETWORK_OPERATOR_KEY)
Continue the code snippet: <|code_start|> def get_metrics_base_data(json_data): """ Function to parse the base metric data from JSON into a new dict. Args: json_data (dict): JSON dict containing the data from the app. Returns: dict: Dict in the format we can store in Redis. """ metrics_dict = { APP_VERSION_KEY: json_data.get(APP_VERSION_KEY), CONNECTION_TYPE_KEY: json_data.get(CONNECTION_TYPE_KEY), DIRECTION_KEY: json_data.get(DIRECTION_KEY), NETWORK_KEY: json_data.get(NETWORK_KEY), <|code_end|> . Use current file imports: from main.prometheus.consts import ( APP_VERSION_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, ) and context (classes, functions, or code) from other files: # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' . Output only the next line.
OS_KEY: json_data.get(OS_KEY),
Predict the next line after this snippet: <|code_start|> def get_metrics_base_data(json_data): """ Function to parse the base metric data from JSON into a new dict. Args: json_data (dict): JSON dict containing the data from the app. Returns: dict: Dict in the format we can store in Redis. """ metrics_dict = { APP_VERSION_KEY: json_data.get(APP_VERSION_KEY), CONNECTION_TYPE_KEY: json_data.get(CONNECTION_TYPE_KEY), DIRECTION_KEY: json_data.get(DIRECTION_KEY), NETWORK_KEY: json_data.get(NETWORK_KEY), OS_KEY: json_data.get(OS_KEY), <|code_end|> using the current file's imports: from main.prometheus.consts import ( APP_VERSION_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, ) and any relevant context from other files: # Path: main/prometheus/consts.py # APP_VERSION_KEY = 'app_version' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' . Output only the next line.
OS_VERSION_KEY: json_data.get(OS_VERSION_KEY),
Using the snippet: <|code_start|>#!/usr/bin/env python os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'main.settings') project_root = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../') sys.path.append(project_root) django.setup() # Middleware health metrics. MYSQL_HEALTH = Gauge('mysql_health', 'See if MySQL is still reachable through the ORM.') <|code_end|> , determine the next line of code. You have imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (class names, function names, or code) available: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
REDIS_CLUSTER_CLIENT = RedisClusterCache()
Given the code snippet: <|code_start|> bool: True if we can read and write to Redis """ try: result = REDIS_CLUSTER_CLIENT.client.execute_command('PING') for key, value in result.items(): if value is False: return False except: return False return True def write_read_orm(): """ Write a ResponseLog object to the database to see if it is up. Returns: bool: True if we can read and write using the ORM. """ random_roundtrip = randint(1, 1000) random_available = random() > 0.5 try: connection.ensure_connection() except: raven_client.captureException() return False try: response_log = ResponseLog.objects.create( <|code_end|> , generate the next line using the imports in this file: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (functions, classes, or occasionally code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
platform=GCM_PLATFORM,
Given the following code snippet before the placeholder: <|code_start|> Returns: bool: True if we can read and write to Redis """ try: result = REDIS_CLUSTER_CLIENT.client.execute_command('PING') for key, value in result.items(): if value is False: return False except: return False return True def write_read_orm(): """ Write a ResponseLog object to the database to see if it is up. Returns: bool: True if we can read and write using the ORM. """ random_roundtrip = randint(1, 1000) random_available = random() > 0.5 try: connection.ensure_connection() except: raven_client.captureException() return False try: <|code_end|> , predict the next line using imports from the current file: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context including class names, function names, and sometimes code from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
response_log = ResponseLog.objects.create(
Continue the code snippet: <|code_start|> VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL.labels( direction=value_dict[DIRECTION_KEY], os=value_dict[OS_KEY], ).inc() # Trim the list, this means that the values that are outside # of the selected range are deleted. In this case we are keeping # all of the values we did not yet process in the list. REDIS_CLUSTER_CLIENT.client.ltrim(VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY, list_length, -1) def increment_vialer_middleware_incoming_call_metric_counter(): """ Function that increments the vialer_middleware_incoming_call_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange( VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, 0, list_length, ) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL.labels( <|code_end|> . Use current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (classes, functions, or code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
action=value_dict[ACTION_KEY],
Continue the code snippet: <|code_start|> platform=GCM_PLATFORM, roundtrip_time=random_roundtrip, available=random_available, ) except DatabaseError: connection.close() raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( <|code_end|> . Use current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (classes, functions, or code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
app_version=value_dict[APP_VERSION_KEY],
Predict the next line for this snippet: <|code_start|> roundtrip_time=random_roundtrip, available=random_available, ) except DatabaseError: connection.close() raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], <|code_end|> with the help of current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' , which may contain function names, class names, or code. Output only the next line.
codec=value_dict[CODEC_KEY],
Continue the code snippet: <|code_start|> available=random_available, ) except DatabaseError: connection.close() raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], codec=value_dict[CODEC_KEY], <|code_end|> . Use current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (classes, functions, or code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
connection_type=value_dict[CONNECTION_TYPE_KEY],
Using the snippet: <|code_start|> ) except DatabaseError: connection.close() raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], codec=value_dict[CODEC_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], <|code_end|> , determine the next line of code. You have imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (class names, function names, or code) available: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
direction=value_dict[DIRECTION_KEY],
Given the code snippet: <|code_start|> mos=value_dict[MOS_KEY], network=value_dict[NETWORK_KEY], network_operator=value_dict.get(NETWORK_OPERATOR_KEY, ''), os=value_dict[OS_KEY], os_version=value_dict[OS_VERSION_KEY], ).inc() # Trim the list, this means that the values that are outside # of the selected range are deleted. In this case we are keeping # all of the values we did not yet process in the list. REDIS_CLUSTER_CLIENT.client.ltrim(VIALER_CALL_SUCCESS_TOTAL_KEY, list_length, -1) def increment_vialer_call_failure_metric_counter(): """ Function that increments the vialer_call_failure_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_FAILURE_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_FAILURE_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_FAILURE_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], direction=value_dict[DIRECTION_KEY], <|code_end|> , generate the next line using the imports in this file: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (functions, classes, or occasionally code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
failed_reason=value_dict[FAILED_REASON_KEY],
Given snippet: <|code_start|> failed_reason=value_dict[FAILED_REASON_KEY], network=value_dict[NETWORK_KEY], network_operator=value_dict.get(NETWORK_OPERATOR_KEY, ''), os=value_dict[OS_KEY], os_version=value_dict[OS_VERSION_KEY], ).inc() # Trim the list, this means that the values that are outside # of the selected range are deleted. In this case we are keeping # all of the values we did not yet process in the list. REDIS_CLUSTER_CLIENT.client.ltrim(VIALER_CALL_FAILURE_TOTAL_KEY, list_length, -1) def increment_vialer_hangup_reason_metric_counter(): """ Function that increments the vialer_hangup_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_HANGUP_REASON_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_HANGUP_REASON_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_HANGUP_REASON_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], direction=value_dict[DIRECTION_KEY], <|code_end|> , continue by predicting the next line. Consider current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' which might include code, classes, or functions. Output only the next line.
hangup_reason=value_dict[HANGUP_REASON_KEY],
Continue the code snippet: <|code_start|> except DatabaseError: connection.close() raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], codec=value_dict[CODEC_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], direction=value_dict[DIRECTION_KEY], <|code_end|> . Use current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (classes, functions, or code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
mos=value_dict[MOS_KEY],
Predict the next line for this snippet: <|code_start|> connection.close() raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], codec=value_dict[CODEC_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], direction=value_dict[DIRECTION_KEY], mos=value_dict[MOS_KEY], <|code_end|> with the help of current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' , which may contain function names, class names, or code. Output only the next line.
network=value_dict[NETWORK_KEY],
Using the snippet: <|code_start|> raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], codec=value_dict[CODEC_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], direction=value_dict[DIRECTION_KEY], mos=value_dict[MOS_KEY], network=value_dict[NETWORK_KEY], <|code_end|> , determine the next line of code. You have imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (class names, function names, or code) available: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
network_operator=value_dict.get(NETWORK_OPERATOR_KEY, ''),
Given snippet: <|code_start|> raven_client.captureException() return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], codec=value_dict[CODEC_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], direction=value_dict[DIRECTION_KEY], mos=value_dict[MOS_KEY], network=value_dict[NETWORK_KEY], network_operator=value_dict.get(NETWORK_OPERATOR_KEY, ''), <|code_end|> , continue by predicting the next line. Consider current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' which might include code, classes, or functions. Output only the next line.
os=value_dict[OS_KEY],
Predict the next line after this snippet: <|code_start|> return False else: if response_log.available == random_available and response_log.roundtrip_time == random_roundtrip: response_log.delete() return True return False def increment_vialer_call_success_metric_counter(): """ Function that increments the vialer_call_success_total counter. """ # Get the length of the list in redis. list_length = REDIS_CLUSTER_CLIENT.client.llen(VIALER_CALL_SUCCESS_TOTAL_KEY) # Get the values from the list in redis. data_list = REDIS_CLUSTER_CLIENT.client.lrange(VIALER_CALL_SUCCESS_TOTAL_KEY, 0, list_length) for value_str in data_list: # Parse the string to a dict. value_dict = literal_eval(value_str) VIALER_CALL_SUCCESS_TOTAL.labels( app_version=value_dict[APP_VERSION_KEY], codec=value_dict[CODEC_KEY], connection_type=value_dict[CONNECTION_TYPE_KEY], direction=value_dict[DIRECTION_KEY], mos=value_dict[MOS_KEY], network=value_dict[NETWORK_KEY], network_operator=value_dict.get(NETWORK_OPERATOR_KEY, ''), os=value_dict[OS_KEY], <|code_end|> using the current file's imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and any relevant context from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
os_version=value_dict[OS_VERSION_KEY],
Given snippet: <|code_start|> django.setup() # Middleware health metrics. MYSQL_HEALTH = Gauge('mysql_health', 'See if MySQL is still reachable through the ORM.') REDIS_CLUSTER_CLIENT = RedisClusterCache() REDIS_HEALTH = Gauge('redis_health', 'See if Redis is still reachable.') DOCKER_TAG = Counter('docker_tag', 'See which docker tag is running.', ['docker_tag']) # Vialer call metrics. VIALER_CALL_SUCCESS_TOTAL = Counter( VIALER_CALL_SUCCESS_TOTAL_KEY, 'The amount of successful calls that were made using the Vialer app', [ 'app_version', 'codec', 'connection_type', 'direction', 'mos', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_CALL_FAILURE_TOTAL = Counter( <|code_end|> , continue by predicting the next line. Consider current file imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' which might include code, classes, or functions. Output only the next line.
VIALER_CALL_FAILURE_TOTAL_KEY,
Next line prediction: <|code_start|>#!/usr/bin/env python os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'main.settings') project_root = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../') sys.path.append(project_root) django.setup() # Middleware health metrics. MYSQL_HEALTH = Gauge('mysql_health', 'See if MySQL is still reachable through the ORM.') REDIS_CLUSTER_CLIENT = RedisClusterCache() REDIS_HEALTH = Gauge('redis_health', 'See if Redis is still reachable.') DOCKER_TAG = Counter('docker_tag', 'See which docker tag is running.', ['docker_tag']) # Vialer call metrics. VIALER_CALL_SUCCESS_TOTAL = Counter( <|code_end|> . Use current file imports: (import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY)) and context including class names, function names, or small code snippets from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
VIALER_CALL_SUCCESS_TOTAL_KEY,
Predict the next line after this snippet: <|code_start|> 'The amount of successful calls that were made using the Vialer app', [ 'app_version', 'codec', 'connection_type', 'direction', 'mos', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_CALL_FAILURE_TOTAL = Counter( VIALER_CALL_FAILURE_TOTAL_KEY, 'The amount of calls that failed during setup using the Vialer app', [ 'app_version', 'connection_type', 'direction', 'failed_reason', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_HANGUP_REASON_TOTAL = Counter( <|code_end|> using the current file's imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and any relevant context from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
VIALER_HANGUP_REASON_TOTAL_KEY,
Given the following code snippet before the placeholder: <|code_start|> 'app_version', 'connection_type', 'direction', 'hangup_reason', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL = Counter( VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, 'The amount of failed called due to the device not responding to a push notification', ['direction', 'os', 'failed_reason'], ) VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL = Counter( VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY, 'The amount of push notifications that were successful processed by the app', ['direction', 'os'], ) VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL = Counter( VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, 'The amount of times an incoming call was presented at the middleware', ['action', 'os'], ) VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL = Counter( <|code_end|> , predict the next line using imports from the current file: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context including class names, function names, and sometimes code from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY,
Based on the snippet: <|code_start|>) VIALER_HANGUP_REASON_TOTAL = Counter( VIALER_HANGUP_REASON_TOTAL_KEY, 'The amount of why a call was ended for the Vialer app', [ 'app_version', 'connection_type', 'direction', 'hangup_reason', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL = Counter( VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, 'The amount of failed called due to the device not responding to a push notification', ['direction', 'os', 'failed_reason'], ) VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL = Counter( VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY, 'The amount of push notifications that were successful processed by the app', ['direction', 'os'], ) VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL = Counter( <|code_end|> , predict the immediate next line with the help of imports: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (classes, functions, sometimes code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY,
Given the code snippet: <|code_start|> VIALER_CALL_FAILURE_TOTAL_KEY, 'The amount of calls that failed during setup using the Vialer app', [ 'app_version', 'connection_type', 'direction', 'failed_reason', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_HANGUP_REASON_TOTAL = Counter( VIALER_HANGUP_REASON_TOTAL_KEY, 'The amount of why a call was ended for the Vialer app', [ 'app_version', 'connection_type', 'direction', 'hangup_reason', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL = Counter( <|code_end|> , generate the next line using the imports in this file: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (functions, classes, or occasionally code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY,
Given the code snippet: <|code_start|> 'failed_reason', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_HANGUP_REASON_TOTAL = Counter( VIALER_HANGUP_REASON_TOTAL_KEY, 'The amount of why a call was ended for the Vialer app', [ 'app_version', 'connection_type', 'direction', 'hangup_reason', 'network', 'network_operator', 'os', 'os_version', ], ) VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL = Counter( VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, 'The amount of failed called due to the device not responding to a push notification', ['direction', 'os', 'failed_reason'], ) VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL = Counter( <|code_end|> , generate the next line using the imports in this file: import os import sys import django import time from ast import literal_eval from random import randint, random from django.conf import settings from django.db import connection, DatabaseError from prometheus_client import Counter, Gauge, start_http_server from raven.contrib.django.models import client as raven_client from redis import RedisError from rediscluster.exceptions import RedisClusterException from app.cache import RedisClusterCache from app.models import GCM_PLATFORM, ResponseLog from main.prometheus.consts import ( ACTION_KEY, APP_VERSION_KEY, CODEC_KEY, CONNECTION_TYPE_KEY, DIRECTION_KEY, FAILED_REASON_KEY, HANGUP_REASON_KEY, MOS_KEY, NETWORK_KEY, NETWORK_OPERATOR_KEY, OS_KEY, OS_VERSION_KEY, VIALER_CALL_FAILURE_TOTAL_KEY, VIALER_CALL_SUCCESS_TOTAL_KEY, VIALER_HANGUP_REASON_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY, VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY) and context (functions, classes, or occasionally code) from other files: # Path: app/cache.py # class RedisClusterCache(object): # """ # Class used for accessing the redis cluster used for caching. # """ # def __init__(self): # self.client = self._create_client() # # def _create_client(self): # """ # Function to connect to the redis cluster and init the client. # """ # server_list = settings.REDIS_SERVER_LIST.replace(' ', '').split(',') # # nodes = [] # for server in server_list: # if ':' not in server: # continue # host, port = server.split(':') # nodes.append({'host': host, 'port': port}) # # return StrictRedisCluster(startup_nodes=nodes, decode_responses=True) # # def get(self, key): # return self.client.get(key) # # def exists(self, key): # return self.client.exists(key) # # def set(self, key, value, timeout=DEFAULT_TIMEOUT): # self.client.set(key, value, timeout) # # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: main/prometheus/consts.py # ACTION_KEY = 'action' # # APP_VERSION_KEY = 'app_version' # # CODEC_KEY = 'codec' # # CONNECTION_TYPE_KEY = 'connection_type' # # DIRECTION_KEY = 'direction' # # FAILED_REASON_KEY = 'failed_reason' # # HANGUP_REASON_KEY = 'hangup_reason' # # MOS_KEY = 'mos' # # NETWORK_KEY = 'network' # # NETWORK_OPERATOR_KEY = 'network_operator' # # OS_KEY = 'os' # # OS_VERSION_KEY = 'os_version' # # VIALER_CALL_FAILURE_TOTAL_KEY = 'vialer_call_failure_total' # # VIALER_CALL_SUCCESS_TOTAL_KEY = 'vialer_call_success_total' # # VIALER_HANGUP_REASON_TOTAL_KEY = 'vialer_hangup_reason_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_FAILED_TOTAL_KEY = 'vialer_middleware_incoming_call_failed_total' # # VIALER_MIDDLEWARE_INCOMING_CALL_SUCCESS_TOTAL_KEY = 'vialer_middleware_incoming_call_success_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_FAILED_TOTAL_KEY = 'vialer_middleware_push_notification_failed_total' # # VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY = 'vialer_middleware_push_notification_success_total' . Output only the next line.
VIALER_MIDDLEWARE_PUSH_NOTIFICATION_SUCCESS_TOTAL_KEY,
Given the following code snippet before the placeholder: <|code_start|> class IncomingCallPerformanceTest(TransactionTestCase): def setUp(self): super(IncomingCallPerformanceTest, self).setUp() self.client = APIClient() <|code_end|> , predict the next line using imports from the current file: from datetime import datetime, timedelta from unittest import mock from django.conf import settings from django.test import TransactionTestCase from rest_framework.test import APIClient from app.models import App, Device from .utils import mocked_send_apns_message, ThreadWithReturn import time and context including class names, function names, and sometimes code from other files: # Path: app/models.py # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: api/tests/utils.py # def mocked_send_apns_message(device, app, message_type, data=None): # cache.set('attempts', data.get('attempt', 1), 300) # print('WORKED APNS') # print(data.get('attempt', 1)) # # class ThreadWithReturn(Thread): # def __init__(self, *args, **kwargs): # super(ThreadWithReturn, self).__init__(*args, **kwargs) # # self._return = None # # def run(self): # if self._target: # self._return = self._target(*self._args, **self._kwargs) # # def join(self, *args, **kwargs): # super(ThreadWithReturn, self).join(*args, **kwargs) # # return self._return . Output only the next line.
self.ios_app, created = App.objects.get_or_create(platform='apns', app_id='com.voipgrid.vialer')
Given snippet: <|code_start|> class IncomingCallPerformanceTest(TransactionTestCase): def setUp(self): super(IncomingCallPerformanceTest, self).setUp() self.client = APIClient() self.ios_app, created = App.objects.get_or_create(platform='apns', app_id='com.voipgrid.vialer') two_weeks_ago = datetime.now() - timedelta(days=14) <|code_end|> , continue by predicting the next line. Consider current file imports: from datetime import datetime, timedelta from unittest import mock from django.conf import settings from django.test import TransactionTestCase from rest_framework.test import APIClient from app.models import App, Device from .utils import mocked_send_apns_message, ThreadWithReturn import time and context: # Path: app/models.py # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: api/tests/utils.py # def mocked_send_apns_message(device, app, message_type, data=None): # cache.set('attempts', data.get('attempt', 1), 300) # print('WORKED APNS') # print(data.get('attempt', 1)) # # class ThreadWithReturn(Thread): # def __init__(self, *args, **kwargs): # super(ThreadWithReturn, self).__init__(*args, **kwargs) # # self._return = None # # def run(self): # if self._target: # self._return = self._target(*self._args, **self._kwargs) # # def join(self, *args, **kwargs): # super(ThreadWithReturn, self).join(*args, **kwargs) # # return self._return which might include code, classes, or functions. Output only the next line.
Device.objects.create(
Next line prediction: <|code_start|> class IncomingCallPerformanceTest(TransactionTestCase): def setUp(self): super(IncomingCallPerformanceTest, self).setUp() self.client = APIClient() self.ios_app, created = App.objects.get_or_create(platform='apns', app_id='com.voipgrid.vialer') two_weeks_ago = datetime.now() - timedelta(days=14) Device.objects.create( name='test device', token='a652aee84bdec6c2859eec89a6e5b1a42c400fba43070f404148f27b502610b6', sip_user_id='123456789', os_version='8.3', client_version='1.0', last_seen=two_weeks_ago, app=self.ios_app, ) <|code_end|> . Use current file imports: (from datetime import datetime, timedelta from unittest import mock from django.conf import settings from django.test import TransactionTestCase from rest_framework.test import APIClient from app.models import App, Device from .utils import mocked_send_apns_message, ThreadWithReturn import time) and context including class names, function names, or small code snippets from other files: # Path: app/models.py # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: api/tests/utils.py # def mocked_send_apns_message(device, app, message_type, data=None): # cache.set('attempts', data.get('attempt', 1), 300) # print('WORKED APNS') # print(data.get('attempt', 1)) # # class ThreadWithReturn(Thread): # def __init__(self, *args, **kwargs): # super(ThreadWithReturn, self).__init__(*args, **kwargs) # # self._return = None # # def run(self): # if self._target: # self._return = self._target(*self._args, **self._kwargs) # # def join(self, *args, **kwargs): # super(ThreadWithReturn, self).join(*args, **kwargs) # # return self._return . Output only the next line.
@mock.patch('app.push.send_apns_message', side_effect=mocked_send_apns_message)
Here is a snippet: <|code_start|> class IncomingCallPerformanceTest(TransactionTestCase): def setUp(self): super(IncomingCallPerformanceTest, self).setUp() self.client = APIClient() self.ios_app, created = App.objects.get_or_create(platform='apns', app_id='com.voipgrid.vialer') two_weeks_ago = datetime.now() - timedelta(days=14) Device.objects.create( name='test device', token='a652aee84bdec6c2859eec89a6e5b1a42c400fba43070f404148f27b502610b6', sip_user_id='123456789', os_version='8.3', client_version='1.0', last_seen=two_weeks_ago, app=self.ios_app, ) @mock.patch('app.push.send_apns_message', side_effect=mocked_send_apns_message) def _execute_call(self, *mocks): call_data = { 'sip_user_id': '123456789', 'caller_id': 'Test name', 'phonenumber': '0123456789', 'call_id': 'sduiqayduiryqwuioeryqwer76789', } # Now the device exists, call it again in seperate thread. <|code_end|> . Write the next line using the current file imports: from datetime import datetime, timedelta from unittest import mock from django.conf import settings from django.test import TransactionTestCase from rest_framework.test import APIClient from app.models import App, Device from .utils import mocked_send_apns_message, ThreadWithReturn import time and context from other files: # Path: app/models.py # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: api/tests/utils.py # def mocked_send_apns_message(device, app, message_type, data=None): # cache.set('attempts', data.get('attempt', 1), 300) # print('WORKED APNS') # print(data.get('attempt', 1)) # # class ThreadWithReturn(Thread): # def __init__(self, *args, **kwargs): # super(ThreadWithReturn, self).__init__(*args, **kwargs) # # self._return = None # # def run(self): # if self._target: # self._return = self._target(*self._args, **self._kwargs) # # def join(self, *args, **kwargs): # super(ThreadWithReturn, self).join(*args, **kwargs) # # return self._return , which may include functions, classes, or code. Output only the next line.
thread = ThreadWithReturn(target=self.client.post, args=('/api/incoming-call/', call_data))
Here is a snippet: <|code_start|> @threaded def task_incoming_call_notify(device, unique_key, phonenumber, caller_id, attempt): """ Threaded task to send a call push notification. """ send_call_message(device, unique_key, phonenumber, caller_id, attempt) @threaded def task_notify_old_token(device, app): """ Threaded task to send a text push notification. """ msg = 'A other device has registered for the same account. You won\'t be reachable on this device' send_text_message(device, app, msg) @threaded def log_to_db(platform, roundtrip_time, available): """ Log the info in a seperate thread to the DB to make sure the log write does not block the api requests. """ <|code_end|> . Write the next line using the current file imports: from .decorators import threaded from .models import ResponseLog from .push import send_call_message, send_text_message and context from other files: # Path: app/decorators.py # def threaded(fn): # """ # Decorator to make a function run in his own thread. # """ # def wrapper(*args, **kwargs): # Thread(target=fn, args=args, kwargs=kwargs).start() # return wrapper # # Path: app/models.py # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/push.py # def send_call_message(device, unique_key, phonenumber, caller_id, attempt): # """ # Function to send the call push notification. # # Args: # device (Device): A Device object. # unique_key (string): String with the unique_key. # phonenumber (string): Phonenumber that is calling. # caller_id (string): ID of the caller. # attempt (int): The amount of attempts made. # """ # data = { # 'unique_key': unique_key, # 'phonenumber': phonenumber, # 'caller_id': caller_id, # 'attempt': attempt, # } # if device.app.platform == APNS_PLATFORM: # send_apns_message(device, device.app, TYPE_CALL, data) # elif device.app.platform == GCM_PLATFORM: # send_gcm_message(device, device.app, TYPE_CALL, data) # elif device.app.platform == ANDROID_PLATFORM: # send_fcm_message(device, device.app, TYPE_CALL, data) # else: # log_middleware_information( # '{0} | Trying to sent \'call\' notification to unknown platform:{1} device:{2}', # OrderedDict([ # ('unique_key', unique_key), # ('platform', device.app.platform), # ('token', device.token), # ]), # logging.WARNING, # device=device, # ) # # def send_text_message(device, app, message): # """ # Function to send a push notification with a message. # # Args: # device (Device): A Device object. # message (string): The message that needs to be send to the device. # """ # if app.platform == APNS_PLATFORM: # send_apns_message(device, app, TYPE_MESSAGE, {'message': message}) # elif app.platform == GCM_PLATFORM: # send_gcm_message(device, app, TYPE_MESSAGE, {'message': message}) # elif app.platform == ANDROID_PLATFORM: # send_fcm_message(device, app, TYPE_MESSAGE, {'message': message}) # else: # log_middleware_information( # 'Trying to sent \'message\' notification to unknown platform:{0} device:{1}', # OrderedDict([ # ('platform', device.app.platform), # ('token', device.token), # ]), # logging.WARNING, # device=device, # ) , which may include functions, classes, or code. Output only the next line.
ResponseLog.objects.create(
Continue the code snippet: <|code_start|> @threaded def task_incoming_call_notify(device, unique_key, phonenumber, caller_id, attempt): """ Threaded task to send a call push notification. """ <|code_end|> . Use current file imports: from .decorators import threaded from .models import ResponseLog from .push import send_call_message, send_text_message and context (classes, functions, or code) from other files: # Path: app/decorators.py # def threaded(fn): # """ # Decorator to make a function run in his own thread. # """ # def wrapper(*args, **kwargs): # Thread(target=fn, args=args, kwargs=kwargs).start() # return wrapper # # Path: app/models.py # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/push.py # def send_call_message(device, unique_key, phonenumber, caller_id, attempt): # """ # Function to send the call push notification. # # Args: # device (Device): A Device object. # unique_key (string): String with the unique_key. # phonenumber (string): Phonenumber that is calling. # caller_id (string): ID of the caller. # attempt (int): The amount of attempts made. # """ # data = { # 'unique_key': unique_key, # 'phonenumber': phonenumber, # 'caller_id': caller_id, # 'attempt': attempt, # } # if device.app.platform == APNS_PLATFORM: # send_apns_message(device, device.app, TYPE_CALL, data) # elif device.app.platform == GCM_PLATFORM: # send_gcm_message(device, device.app, TYPE_CALL, data) # elif device.app.platform == ANDROID_PLATFORM: # send_fcm_message(device, device.app, TYPE_CALL, data) # else: # log_middleware_information( # '{0} | Trying to sent \'call\' notification to unknown platform:{1} device:{2}', # OrderedDict([ # ('unique_key', unique_key), # ('platform', device.app.platform), # ('token', device.token), # ]), # logging.WARNING, # device=device, # ) # # def send_text_message(device, app, message): # """ # Function to send a push notification with a message. # # Args: # device (Device): A Device object. # message (string): The message that needs to be send to the device. # """ # if app.platform == APNS_PLATFORM: # send_apns_message(device, app, TYPE_MESSAGE, {'message': message}) # elif app.platform == GCM_PLATFORM: # send_gcm_message(device, app, TYPE_MESSAGE, {'message': message}) # elif app.platform == ANDROID_PLATFORM: # send_fcm_message(device, app, TYPE_MESSAGE, {'message': message}) # else: # log_middleware_information( # 'Trying to sent \'message\' notification to unknown platform:{0} device:{1}', # OrderedDict([ # ('platform', device.app.platform), # ('token', device.token), # ]), # logging.WARNING, # device=device, # ) . Output only the next line.
send_call_message(device, unique_key, phonenumber, caller_id, attempt)
Based on the snippet: <|code_start|> @threaded def task_incoming_call_notify(device, unique_key, phonenumber, caller_id, attempt): """ Threaded task to send a call push notification. """ send_call_message(device, unique_key, phonenumber, caller_id, attempt) @threaded def task_notify_old_token(device, app): """ Threaded task to send a text push notification. """ msg = 'A other device has registered for the same account. You won\'t be reachable on this device' <|code_end|> , predict the immediate next line with the help of imports: from .decorators import threaded from .models import ResponseLog from .push import send_call_message, send_text_message and context (classes, functions, sometimes code) from other files: # Path: app/decorators.py # def threaded(fn): # """ # Decorator to make a function run in his own thread. # """ # def wrapper(*args, **kwargs): # Thread(target=fn, args=args, kwargs=kwargs).start() # return wrapper # # Path: app/models.py # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/push.py # def send_call_message(device, unique_key, phonenumber, caller_id, attempt): # """ # Function to send the call push notification. # # Args: # device (Device): A Device object. # unique_key (string): String with the unique_key. # phonenumber (string): Phonenumber that is calling. # caller_id (string): ID of the caller. # attempt (int): The amount of attempts made. # """ # data = { # 'unique_key': unique_key, # 'phonenumber': phonenumber, # 'caller_id': caller_id, # 'attempt': attempt, # } # if device.app.platform == APNS_PLATFORM: # send_apns_message(device, device.app, TYPE_CALL, data) # elif device.app.platform == GCM_PLATFORM: # send_gcm_message(device, device.app, TYPE_CALL, data) # elif device.app.platform == ANDROID_PLATFORM: # send_fcm_message(device, device.app, TYPE_CALL, data) # else: # log_middleware_information( # '{0} | Trying to sent \'call\' notification to unknown platform:{1} device:{2}', # OrderedDict([ # ('unique_key', unique_key), # ('platform', device.app.platform), # ('token', device.token), # ]), # logging.WARNING, # device=device, # ) # # def send_text_message(device, app, message): # """ # Function to send a push notification with a message. # # Args: # device (Device): A Device object. # message (string): The message that needs to be send to the device. # """ # if app.platform == APNS_PLATFORM: # send_apns_message(device, app, TYPE_MESSAGE, {'message': message}) # elif app.platform == GCM_PLATFORM: # send_gcm_message(device, app, TYPE_MESSAGE, {'message': message}) # elif app.platform == ANDROID_PLATFORM: # send_fcm_message(device, app, TYPE_MESSAGE, {'message': message}) # else: # log_middleware_information( # 'Trying to sent \'message\' notification to unknown platform:{0} device:{1}', # OrderedDict([ # ('platform', device.app.platform), # ('token', device.token), # ]), # logging.WARNING, # device=device, # ) . Output only the next line.
send_text_message(device, app, msg)
Predict the next line after this snippet: <|code_start|> """ name = serializers.CharField(max_length=255, allow_blank=True, required=False) os_version = serializers.CharField(max_length=255, allow_blank=True, required=False) client_version = serializers.CharField(max_length=255, allow_blank=True, required=False) app = serializers.CharField(max_length=255) sandbox = serializers.BooleanField(default=False) remote_logging_id = serializers.CharField(max_length=255, allow_blank=True, required=False) class DeleteDeviceSerializer(TokenSerializer, SipUserIdSerializer): """ Serializer for the device view delete. """ app = serializers.CharField(max_length=255) class CallResponseSerializer(serializers.Serializer): """ Serializer for the call response view. """ unique_key = serializers.CharField(max_length=255) message_start_time = serializers.FloatField() available = serializers.BooleanField(default=True) class IncomingCallSerializer(SipUserIdSerializer): """ Serializer for the incoming call view. """ caller_id = serializers.CharField(max_length=255, default='', allow_blank=True) <|code_end|> using the current file's imports: from rest_framework import serializers from .validators import phone_number_validator, token_validator and any relevant context from other files: # Path: api/validators.py # def phone_number_validator(phone_number): # """ # Function to validate if a phone_number is in the required format. # # Args: # phone_number (string): The APNS or GCM push token. # # Raises: # ValidationError: When the phone_number is not correctly formated. # """ # phone_number_stripped = re.sub(r'[\+\(\)– - x]', '', phone_number) # # if not phone_number_stripped.isdigit(): # raise serializers.ValidationError('Not a valid phone number.') # # def token_validator(token): # """ # Function to validate if a token is in the required format. # # Args: # token (string): The APNS or GCM push token. # # Raises: # ValidationError: When the token is not correctly formated. # """ # if ' ' in token: # raise serializers.ValidationError('No whitespace allowed in token.') . Output only the next line.
phonenumber = serializers.CharField(max_length=32, validators=[phone_number_validator])
Given snippet: <|code_start|> serializer = self.serializer(data=data) serializer.is_valid() sandbox = serializer.validated_data['sandbox'] self.assertFalse(sandbox) def test_sandbox_value(self): """ Test if sandbox is set to true when given. """ data = { 'sip_user_id': '123456789', 'token': 'blaat', 'app': 'com.org.name', 'sandbox': 'True', } serializer = self.serializer(data=data) self.assertTrue(serializer.is_valid()) sandbox = serializer.validated_data['sandbox'] self.assertTrue(sandbox) class TestCallResponseSerializer(TestCase): def setUp(self): <|code_end|> , continue by predicting the next line. Consider current file imports: import time from django.test import TestCase from ..serializers import ( CallResponseSerializer, DeviceSerializer, IncomingCallSerializer, SipUserIdSerializer, TokenSerializer, ) and context: # Path: api/serializers.py # class CallResponseSerializer(serializers.Serializer): # """ # Serializer for the call response view. # """ # unique_key = serializers.CharField(max_length=255) # message_start_time = serializers.FloatField() # available = serializers.BooleanField(default=True) # # class DeviceSerializer(TokenSerializer, SipUserIdSerializer): # """ # Serializer for the device view post. # """ # name = serializers.CharField(max_length=255, allow_blank=True, required=False) # os_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # client_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # app = serializers.CharField(max_length=255) # sandbox = serializers.BooleanField(default=False) # remote_logging_id = serializers.CharField(max_length=255, allow_blank=True, required=False) # # class IncomingCallSerializer(SipUserIdSerializer): # """ # Serializer for the incoming call view. # """ # caller_id = serializers.CharField(max_length=255, default='', allow_blank=True) # phonenumber = serializers.CharField(max_length=32, validators=[phone_number_validator]) # call_id = serializers.CharField(max_length=255, default=None, allow_blank=True) # # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) # # class TokenSerializer(serializers.Serializer): # """ # Base serializer for the token field. # """ # token = serializers.CharField(max_length=250, validators=[token_validator]) which might include code, classes, or functions. Output only the next line.
self.serializer = CallResponseSerializer
Using the snippet: <|code_start|> class TestSipUserIdSerializer(TestCase): def setUp(self): self.serializer = SipUserIdSerializer def test_required_fields(self): """ Test if the sip_user_id field is required. """ data = {'no_sip_user_id': '123456789'} self.assertFalse(self.serializer(data=data).is_valid()) data = {'sip_user_id': '123456789'} self.assertTrue(self.serializer(data=data).is_valid()) def test_validation(self): """ Test if validation is done right. """ data = {'sip_user_id': '1234567890986'} self.assertFalse(self.serializer(data=data).is_valid()) class TestRegisterDeviceSerializer(TestCase): def setUp(self): <|code_end|> , determine the next line of code. You have imports: import time from django.test import TestCase from ..serializers import ( CallResponseSerializer, DeviceSerializer, IncomingCallSerializer, SipUserIdSerializer, TokenSerializer, ) and context (class names, function names, or code) available: # Path: api/serializers.py # class CallResponseSerializer(serializers.Serializer): # """ # Serializer for the call response view. # """ # unique_key = serializers.CharField(max_length=255) # message_start_time = serializers.FloatField() # available = serializers.BooleanField(default=True) # # class DeviceSerializer(TokenSerializer, SipUserIdSerializer): # """ # Serializer for the device view post. # """ # name = serializers.CharField(max_length=255, allow_blank=True, required=False) # os_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # client_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # app = serializers.CharField(max_length=255) # sandbox = serializers.BooleanField(default=False) # remote_logging_id = serializers.CharField(max_length=255, allow_blank=True, required=False) # # class IncomingCallSerializer(SipUserIdSerializer): # """ # Serializer for the incoming call view. # """ # caller_id = serializers.CharField(max_length=255, default='', allow_blank=True) # phonenumber = serializers.CharField(max_length=32, validators=[phone_number_validator]) # call_id = serializers.CharField(max_length=255, default=None, allow_blank=True) # # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) # # class TokenSerializer(serializers.Serializer): # """ # Base serializer for the token field. # """ # token = serializers.CharField(max_length=250, validators=[token_validator]) . Output only the next line.
self.serializer = DeviceSerializer
Here is a snippet: <|code_start|> """ Test if the unique_key and message_start_time fields are required. """ now = time.time() data = { 'no_unique_key': 'aghadgfagsdfjagsdjkfgakjdf', 'message_start_time': now, } self.assertFalse(self.serializer(data=data).is_valid()) data = { 'unique_key': 'aghadgfagsdfjagsdjkfgakjdf', 'no_message_start_time': '871926ahkjgjhkgf', } self.assertFalse(self.serializer(data=data).is_valid()) data = { 'unique_key': 'aghadgfagsdfjagsdjkfgakjdf', 'message_start_time': now, } self.assertTrue(self.serializer(data=data).is_valid()) class TestIncomingCallSerializer(TestCase): def setUp(self): <|code_end|> . Write the next line using the current file imports: import time from django.test import TestCase from ..serializers import ( CallResponseSerializer, DeviceSerializer, IncomingCallSerializer, SipUserIdSerializer, TokenSerializer, ) and context from other files: # Path: api/serializers.py # class CallResponseSerializer(serializers.Serializer): # """ # Serializer for the call response view. # """ # unique_key = serializers.CharField(max_length=255) # message_start_time = serializers.FloatField() # available = serializers.BooleanField(default=True) # # class DeviceSerializer(TokenSerializer, SipUserIdSerializer): # """ # Serializer for the device view post. # """ # name = serializers.CharField(max_length=255, allow_blank=True, required=False) # os_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # client_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # app = serializers.CharField(max_length=255) # sandbox = serializers.BooleanField(default=False) # remote_logging_id = serializers.CharField(max_length=255, allow_blank=True, required=False) # # class IncomingCallSerializer(SipUserIdSerializer): # """ # Serializer for the incoming call view. # """ # caller_id = serializers.CharField(max_length=255, default='', allow_blank=True) # phonenumber = serializers.CharField(max_length=32, validators=[phone_number_validator]) # call_id = serializers.CharField(max_length=255, default=None, allow_blank=True) # # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) # # class TokenSerializer(serializers.Serializer): # """ # Base serializer for the token field. # """ # token = serializers.CharField(max_length=250, validators=[token_validator]) , which may include functions, classes, or code. Output only the next line.
self.serializer = IncomingCallSerializer
Predict the next line for this snippet: <|code_start|> class TestTokenSerializer(TestCase): def setUp(self): self.serializer = TokenSerializer def test_required_fields(self): """ Test if the token field is required. """ data = {'no_token': 'blaat'} self.assertFalse(self.serializer(data=data).is_valid()) data = {'token': 'blaat'} self.assertTrue(self.serializer(data=data).is_valid()) def test_validation(self): """ Test if validation is done right. """ data = {'token': 'foo bar'} self.assertFalse(self.serializer(data=data).is_valid()) class TestSipUserIdSerializer(TestCase): def setUp(self): <|code_end|> with the help of current file imports: import time from django.test import TestCase from ..serializers import ( CallResponseSerializer, DeviceSerializer, IncomingCallSerializer, SipUserIdSerializer, TokenSerializer, ) and context from other files: # Path: api/serializers.py # class CallResponseSerializer(serializers.Serializer): # """ # Serializer for the call response view. # """ # unique_key = serializers.CharField(max_length=255) # message_start_time = serializers.FloatField() # available = serializers.BooleanField(default=True) # # class DeviceSerializer(TokenSerializer, SipUserIdSerializer): # """ # Serializer for the device view post. # """ # name = serializers.CharField(max_length=255, allow_blank=True, required=False) # os_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # client_version = serializers.CharField(max_length=255, allow_blank=True, required=False) # app = serializers.CharField(max_length=255) # sandbox = serializers.BooleanField(default=False) # remote_logging_id = serializers.CharField(max_length=255, allow_blank=True, required=False) # # class IncomingCallSerializer(SipUserIdSerializer): # """ # Serializer for the incoming call view. # """ # caller_id = serializers.CharField(max_length=255, default='', allow_blank=True) # phonenumber = serializers.CharField(max_length=32, validators=[phone_number_validator]) # call_id = serializers.CharField(max_length=255, default=None, allow_blank=True) # # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) # # class TokenSerializer(serializers.Serializer): # """ # Base serializer for the token field. # """ # token = serializers.CharField(max_length=250, validators=[token_validator]) , which may contain function names, class names, or code. Output only the next line.
self.serializer = SipUserIdSerializer
Here is a snippet: <|code_start|>LOG_NONCE = 'nonce' LOG_USERNAME = 'username' LOG_EMAIL = 'email' LOGENTRIES_HANDLERS = {} django_logger = logging.getLogger('django') def get_metrics(start_date, end_date, platform): """ Function to get a dict with metrics for the given date range and platform. Args: start_date (date): Start date to get metrics for. end_date (date): End date to get metrics for. platform (string): Platform to get metrics for. Returns: Dict containing the metrics. """ def _get_min(query): return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] def _get_max(query): return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] def _get_avg(query): return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] <|code_end|> . Write the next line using the current file imports: import logging from django.db.models import Avg, Max, Min from logentries import LogentriesHandler from .models import ResponseLog and context from other files: # Path: app/models.py # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) , which may include functions, classes, or code. Output only the next line.
base_query = ResponseLog.objects.filter(
Given snippet: <|code_start|> class VoipgridAuthenticationTestCase(TestCase): """ Class to test the VG authentication. """ def setUp(self): """ Setup authentication class. """ super(VoipgridAuthenticationTestCase, self).setUp() <|code_end|> , continue by predicting the next line. Consider current file imports: from django.test import TestCase from rest_framework.exceptions import AuthenticationFailed, PermissionDenied from ..authentication import VoipgridAuthentication from ..exceptions import UnavailableException and context: # Path: api/authentication.py # class VoipgridAuthentication(BaseAuthentication): # """ # Custom authentication. # """ # def _check_status_code(self, status_code): # """ # Function for checking the status code. # # Args: # status_code(int): That status code of a response. # """ # if status_code == 200: # return # elif status_code == 401: # raise AuthenticationFailed(detail=None) # elif status_code == 403: # raise PermissionDenied(detail=None) # else: # # Temporarily unavailable. # log_middleware_information( # 'Unsupported VG response code {0}', # OrderedDict([ # ('status_code', status_code), # ]), # logging.WARNING, # ) # raise UnavailableException(detail=None) # # def authenticate(self, request): # """ # Function for authentication against VoIPGRID api. # """ # if settings.TESTING: # return (AnonymousUser, None) # # # Get auth headers. # auth = get_authorization_header(request) # # if not auth: # # Raises 'Authentication credentials were not provided'. # raise NotAuthenticated(detail=None) # # # Serialize data to check for sip_user_id. # serializer = SipUserIdSerializer(data=request.data) # if not serializer.is_valid(raise_exception=False): # log_middleware_information( # 'BAD REQUEST! Authentication failed due to invalid sip_user_id in data:\n\n{0}', # OrderedDict([ # ('data', request.data), # ]), # logging.INFO, # ) # # This raises a bad request response. # raise ParseError(detail=None) # # # Get sip_user_id. # sip_user_id = serializer.validated_data['sip_user_id'] # # # Created new headers with old auth data. # headers = {'Authorization': auth} # # # Get user profile. # response = requests.get(settings.VG_API_USER_URL, headers=headers) # # Check status code. # self._check_status_code(response.status_code) # # # Parse to json. # json_response = response.json() # # # Get app account reference on systemuser. # app_account_url = json_response['app_account'] # # if not app_account_url: # # Has no app account and thus no access to api. # log_middleware_information( # 'No app account for systemuser {0} - {1}', # OrderedDict([ # ('id', json_response['id']), # (LOG_EMAIL, json_response['email']), # ]), # logging.INFO, # device=Device.objects.get(sip_user_id=sip_user_id), # ) # raise PermissionDenied(detail=None) # # # Get url for app account. # app_account_api_url = settings.VG_API_BASE_URL + app_account_url # # # Get app account. # response = requests.get(app_account_api_url, headers=headers) # # Check status code. # self._check_status_code(response.status_code) # # Get account id. # account_id = response.json()['account_id'] # # # Compare account id to sip user id the request is meant for. # if str(sip_user_id) != str(account_id): # # Raise permissions denied. # raise PermissionDenied(detail=None) # # # All good. # return (AnonymousUser, None) # # def authenticate_header(self, request): # return 'Basic' # # Path: api/exceptions.py # class UnavailableException(APIException): # status_code = HTTP_503_SERVICE_UNAVAILABLE # default_detail = 'Service temporarily unavailable, try again later.' which might include code, classes, or functions. Output only the next line.
self.authentication = VoipgridAuthentication()
Given the following code snippet before the placeholder: <|code_start|> class VoipgridAuthenticationTestCase(TestCase): """ Class to test the VG authentication. """ def setUp(self): """ Setup authentication class. """ super(VoipgridAuthenticationTestCase, self).setUp() self.authentication = VoipgridAuthentication() def test_check_status_code(self): """ Test status codes and exceptions raised. """ # Step 1: Status code 200. self.authentication._check_status_code(200) # Step 2: Status code 401. with self.assertRaises(AuthenticationFailed): self.authentication._check_status_code(401) # Step 3: Status code 403. with self.assertRaises(PermissionDenied): self.authentication._check_status_code(403) # Step 4: Status code other than tested. <|code_end|> , predict the next line using imports from the current file: from django.test import TestCase from rest_framework.exceptions import AuthenticationFailed, PermissionDenied from ..authentication import VoipgridAuthentication from ..exceptions import UnavailableException and context including class names, function names, and sometimes code from other files: # Path: api/authentication.py # class VoipgridAuthentication(BaseAuthentication): # """ # Custom authentication. # """ # def _check_status_code(self, status_code): # """ # Function for checking the status code. # # Args: # status_code(int): That status code of a response. # """ # if status_code == 200: # return # elif status_code == 401: # raise AuthenticationFailed(detail=None) # elif status_code == 403: # raise PermissionDenied(detail=None) # else: # # Temporarily unavailable. # log_middleware_information( # 'Unsupported VG response code {0}', # OrderedDict([ # ('status_code', status_code), # ]), # logging.WARNING, # ) # raise UnavailableException(detail=None) # # def authenticate(self, request): # """ # Function for authentication against VoIPGRID api. # """ # if settings.TESTING: # return (AnonymousUser, None) # # # Get auth headers. # auth = get_authorization_header(request) # # if not auth: # # Raises 'Authentication credentials were not provided'. # raise NotAuthenticated(detail=None) # # # Serialize data to check for sip_user_id. # serializer = SipUserIdSerializer(data=request.data) # if not serializer.is_valid(raise_exception=False): # log_middleware_information( # 'BAD REQUEST! Authentication failed due to invalid sip_user_id in data:\n\n{0}', # OrderedDict([ # ('data', request.data), # ]), # logging.INFO, # ) # # This raises a bad request response. # raise ParseError(detail=None) # # # Get sip_user_id. # sip_user_id = serializer.validated_data['sip_user_id'] # # # Created new headers with old auth data. # headers = {'Authorization': auth} # # # Get user profile. # response = requests.get(settings.VG_API_USER_URL, headers=headers) # # Check status code. # self._check_status_code(response.status_code) # # # Parse to json. # json_response = response.json() # # # Get app account reference on systemuser. # app_account_url = json_response['app_account'] # # if not app_account_url: # # Has no app account and thus no access to api. # log_middleware_information( # 'No app account for systemuser {0} - {1}', # OrderedDict([ # ('id', json_response['id']), # (LOG_EMAIL, json_response['email']), # ]), # logging.INFO, # device=Device.objects.get(sip_user_id=sip_user_id), # ) # raise PermissionDenied(detail=None) # # # Get url for app account. # app_account_api_url = settings.VG_API_BASE_URL + app_account_url # # # Get app account. # response = requests.get(app_account_api_url, headers=headers) # # Check status code. # self._check_status_code(response.status_code) # # Get account id. # account_id = response.json()['account_id'] # # # Compare account id to sip user id the request is meant for. # if str(sip_user_id) != str(account_id): # # Raise permissions denied. # raise PermissionDenied(detail=None) # # # All good. # return (AnonymousUser, None) # # def authenticate_header(self, request): # return 'Basic' # # Path: api/exceptions.py # class UnavailableException(APIException): # status_code = HTTP_503_SERVICE_UNAVAILABLE # default_detail = 'Service temporarily unavailable, try again later.' . Output only the next line.
with self.assertRaises(UnavailableException):
Predict the next line for this snippet: <|code_start|> ) # This raises a bad request response. raise ParseError(detail=None) # Get sip_user_id. sip_user_id = serializer.validated_data['sip_user_id'] # Created new headers with old auth data. headers = {'Authorization': auth} # Get user profile. response = requests.get(settings.VG_API_USER_URL, headers=headers) # Check status code. self._check_status_code(response.status_code) # Parse to json. json_response = response.json() # Get app account reference on systemuser. app_account_url = json_response['app_account'] if not app_account_url: # Has no app account and thus no access to api. log_middleware_information( 'No app account for systemuser {0} - {1}', OrderedDict([ ('id', json_response['id']), (LOG_EMAIL, json_response['email']), ]), logging.INFO, <|code_end|> with the help of current file imports: from collections import OrderedDict from django.conf import settings from django.contrib.auth.models import AnonymousUser from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import (AuthenticationFailed, NotAuthenticated, ParseError, PermissionDenied) from app.models import Device from app.utils import LOG_EMAIL, log_middleware_information from .exceptions import UnavailableException from .serializers import SipUserIdSerializer import logging import requests and context from other files: # Path: app/models.py # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: app/utils.py # LOG_EMAIL = 'email' # # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: api/exceptions.py # class UnavailableException(APIException): # status_code = HTTP_503_SERVICE_UNAVAILABLE # default_detail = 'Service temporarily unavailable, try again later.' # # Path: api/serializers.py # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) , which may contain function names, class names, or code. Output only the next line.
device=Device.objects.get(sip_user_id=sip_user_id),
Predict the next line for this snippet: <|code_start|> ('data', request.data), ]), logging.INFO, ) # This raises a bad request response. raise ParseError(detail=None) # Get sip_user_id. sip_user_id = serializer.validated_data['sip_user_id'] # Created new headers with old auth data. headers = {'Authorization': auth} # Get user profile. response = requests.get(settings.VG_API_USER_URL, headers=headers) # Check status code. self._check_status_code(response.status_code) # Parse to json. json_response = response.json() # Get app account reference on systemuser. app_account_url = json_response['app_account'] if not app_account_url: # Has no app account and thus no access to api. log_middleware_information( 'No app account for systemuser {0} - {1}', OrderedDict([ ('id', json_response['id']), <|code_end|> with the help of current file imports: from collections import OrderedDict from django.conf import settings from django.contrib.auth.models import AnonymousUser from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import (AuthenticationFailed, NotAuthenticated, ParseError, PermissionDenied) from app.models import Device from app.utils import LOG_EMAIL, log_middleware_information from .exceptions import UnavailableException from .serializers import SipUserIdSerializer import logging import requests and context from other files: # Path: app/models.py # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: app/utils.py # LOG_EMAIL = 'email' # # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: api/exceptions.py # class UnavailableException(APIException): # status_code = HTTP_503_SERVICE_UNAVAILABLE # default_detail = 'Service temporarily unavailable, try again later.' # # Path: api/serializers.py # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) , which may contain function names, class names, or code. Output only the next line.
(LOG_EMAIL, json_response['email']),
Given the code snippet: <|code_start|> class VoipgridAuthentication(BaseAuthentication): """ Custom authentication. """ def _check_status_code(self, status_code): """ Function for checking the status code. Args: status_code(int): That status code of a response. """ if status_code == 200: return elif status_code == 401: raise AuthenticationFailed(detail=None) elif status_code == 403: raise PermissionDenied(detail=None) else: # Temporarily unavailable. <|code_end|> , generate the next line using the imports in this file: from collections import OrderedDict from django.conf import settings from django.contrib.auth.models import AnonymousUser from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import (AuthenticationFailed, NotAuthenticated, ParseError, PermissionDenied) from app.models import Device from app.utils import LOG_EMAIL, log_middleware_information from .exceptions import UnavailableException from .serializers import SipUserIdSerializer import logging import requests and context (functions, classes, or occasionally code) from other files: # Path: app/models.py # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: app/utils.py # LOG_EMAIL = 'email' # # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: api/exceptions.py # class UnavailableException(APIException): # status_code = HTTP_503_SERVICE_UNAVAILABLE # default_detail = 'Service temporarily unavailable, try again later.' # # Path: api/serializers.py # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) . Output only the next line.
log_middleware_information(
Predict the next line for this snippet: <|code_start|> class VoipgridAuthentication(BaseAuthentication): """ Custom authentication. """ def _check_status_code(self, status_code): """ Function for checking the status code. Args: status_code(int): That status code of a response. """ if status_code == 200: return elif status_code == 401: raise AuthenticationFailed(detail=None) elif status_code == 403: raise PermissionDenied(detail=None) else: # Temporarily unavailable. log_middleware_information( 'Unsupported VG response code {0}', OrderedDict([ ('status_code', status_code), ]), logging.WARNING, ) <|code_end|> with the help of current file imports: from collections import OrderedDict from django.conf import settings from django.contrib.auth.models import AnonymousUser from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import (AuthenticationFailed, NotAuthenticated, ParseError, PermissionDenied) from app.models import Device from app.utils import LOG_EMAIL, log_middleware_information from .exceptions import UnavailableException from .serializers import SipUserIdSerializer import logging import requests and context from other files: # Path: app/models.py # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: app/utils.py # LOG_EMAIL = 'email' # # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: api/exceptions.py # class UnavailableException(APIException): # status_code = HTTP_503_SERVICE_UNAVAILABLE # default_detail = 'Service temporarily unavailable, try again later.' # # Path: api/serializers.py # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) , which may contain function names, class names, or code. Output only the next line.
raise UnavailableException(detail=None)
Continue the code snippet: <|code_start|> elif status_code == 401: raise AuthenticationFailed(detail=None) elif status_code == 403: raise PermissionDenied(detail=None) else: # Temporarily unavailable. log_middleware_information( 'Unsupported VG response code {0}', OrderedDict([ ('status_code', status_code), ]), logging.WARNING, ) raise UnavailableException(detail=None) def authenticate(self, request): """ Function for authentication against VoIPGRID api. """ if settings.TESTING: return (AnonymousUser, None) # Get auth headers. auth = get_authorization_header(request) if not auth: # Raises 'Authentication credentials were not provided'. raise NotAuthenticated(detail=None) # Serialize data to check for sip_user_id. <|code_end|> . Use current file imports: from collections import OrderedDict from django.conf import settings from django.contrib.auth.models import AnonymousUser from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.exceptions import (AuthenticationFailed, NotAuthenticated, ParseError, PermissionDenied) from app.models import Device from app.utils import LOG_EMAIL, log_middleware_information from .exceptions import UnavailableException from .serializers import SipUserIdSerializer import logging import requests and context (classes, functions, or code) from other files: # Path: app/models.py # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # Path: app/utils.py # LOG_EMAIL = 'email' # # def log_middleware_information(log_statement, dict_with_variables, log_level, device=None): # """ # Function that handles the logging for the middleware. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # log_level (int): The level on which to log. # device (Device): The device for which we want to log to Logentries. # """ # remote_logging_id = device.remote_logging_id if device and device.remote_logging_id else 'No remote logging ID' # django_log_statement = fill_log_statement(log_statement, dict_with_variables) # django_logger.log(log_level, '{0} - middleware - {1}'.format(remote_logging_id, django_log_statement)) # # if device and device.remote_logging_id: # log_statement = fill_log_statement(log_statement, dict_with_variables, anonymize=True) # logentries_token = device.app.logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # if device.app.partner_logentries_token: # # Log to the Logentries environment of the partner with a different token. # logentries_token = device.app.partner_logentries_token # log_to_logentries(log_statement, log_level, logentries_token, device, remote_logging_id) # # Path: api/exceptions.py # class UnavailableException(APIException): # status_code = HTTP_503_SERVICE_UNAVAILABLE # default_detail = 'Service temporarily unavailable, try again later.' # # Path: api/serializers.py # class SipUserIdSerializer(serializers.Serializer): # """ # Base serializer for the sip_user_id field. # """ # sip_user_id = serializers.IntegerField(max_value=999999999, min_value=int(1e8)) . Output only the next line.
serializer = SipUserIdSerializer(data=request.data)
Predict the next line after this snippet: <|code_start|> return new_urls + original_urls def last_day_of_month(self, any_day): """ Function to return the last day of the month. Args: any_date (date): Date of the month to determine to last day for. Returns: Date object with the last day of the month. """ next_month = any_day.replace(day=28) + datetime.timedelta(days=4) return next_month - datetime.timedelta(days=next_month.day) def view_metrics(self, request, **kwargs): """ View for getting metrics for the roundtrip times. """ month = request.GET.get('month', None) year = request.GET.get('year', None) start_date = datetime.date.today().replace(day=1) if month and year: start_date = datetime.date(int(year), int(month), 1) end_date = self.last_day_of_month(start_date) context = { 'metrics': [ <|code_end|> using the current file's imports: import datetime from django.conf.urls import url from django.contrib import admin from django.shortcuts import render from .models import ANDROID_PLATFORM, APNS_PLATFORM, App, Device, GCM_PLATFORM, ResponseLog from .utils import get_metrics and any relevant context from other files: # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results . Output only the next line.
get_metrics(start_date, end_date, APNS_PLATFORM),
Predict the next line for this snippet: <|code_start|> Returns: Date object with the last day of the month. """ next_month = any_day.replace(day=28) + datetime.timedelta(days=4) return next_month - datetime.timedelta(days=next_month.day) def view_metrics(self, request, **kwargs): """ View for getting metrics for the roundtrip times. """ month = request.GET.get('month', None) year = request.GET.get('year', None) start_date = datetime.date.today().replace(day=1) if month and year: start_date = datetime.date(int(year), int(month), 1) end_date = self.last_day_of_month(start_date) context = { 'metrics': [ get_metrics(start_date, end_date, APNS_PLATFORM), get_metrics(start_date, end_date, GCM_PLATFORM), get_metrics(start_date, end_date, ANDROID_PLATFORM), ], } return render(request, 'app/metrics.html', context=context) <|code_end|> with the help of current file imports: import datetime from django.conf.urls import url from django.contrib import admin from django.shortcuts import render from .models import ANDROID_PLATFORM, APNS_PLATFORM, App, Device, GCM_PLATFORM, ResponseLog from .utils import get_metrics and context from other files: # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results , which may contain function names, class names, or code. Output only the next line.
admin.site.register(Device, DeviceAdmin)
Predict the next line for this snippet: <|code_start|> def last_day_of_month(self, any_day): """ Function to return the last day of the month. Args: any_date (date): Date of the month to determine to last day for. Returns: Date object with the last day of the month. """ next_month = any_day.replace(day=28) + datetime.timedelta(days=4) return next_month - datetime.timedelta(days=next_month.day) def view_metrics(self, request, **kwargs): """ View for getting metrics for the roundtrip times. """ month = request.GET.get('month', None) year = request.GET.get('year', None) start_date = datetime.date.today().replace(day=1) if month and year: start_date = datetime.date(int(year), int(month), 1) end_date = self.last_day_of_month(start_date) context = { 'metrics': [ get_metrics(start_date, end_date, APNS_PLATFORM), <|code_end|> with the help of current file imports: import datetime from django.conf.urls import url from django.contrib import admin from django.shortcuts import render from .models import ANDROID_PLATFORM, APNS_PLATFORM, App, Device, GCM_PLATFORM, ResponseLog from .utils import get_metrics and context from other files: # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results , which may contain function names, class names, or code. Output only the next line.
get_metrics(start_date, end_date, GCM_PLATFORM),
Predict the next line for this snippet: <|code_start|> return new_urls + original_urls def last_day_of_month(self, any_day): """ Function to return the last day of the month. Args: any_date (date): Date of the month to determine to last day for. Returns: Date object with the last day of the month. """ next_month = any_day.replace(day=28) + datetime.timedelta(days=4) return next_month - datetime.timedelta(days=next_month.day) def view_metrics(self, request, **kwargs): """ View for getting metrics for the roundtrip times. """ month = request.GET.get('month', None) year = request.GET.get('year', None) start_date = datetime.date.today().replace(day=1) if month and year: start_date = datetime.date(int(year), int(month), 1) end_date = self.last_day_of_month(start_date) context = { 'metrics': [ <|code_end|> with the help of current file imports: import datetime from django.conf.urls import url from django.contrib import admin from django.shortcuts import render from .models import ANDROID_PLATFORM, APNS_PLATFORM, App, Device, GCM_PLATFORM, ResponseLog from .utils import get_metrics and context from other files: # Path: app/models.py # ANDROID_PLATFORM = 'android' # # APNS_PLATFORM = 'apns' # # class App(models.Model): # """ # Model that contains information about the supported apps by the middleware. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # app_id = models.CharField(max_length=255) # # push_key = models.CharField(max_length=255) # logentries_token = models.CharField(max_length=255, blank=False, null=False, default='') # partner_logentries_token = models.CharField(max_length=255, blank=True, null=True, default='') # # def __str__(self): # return '{0} for {1}'.format(self.app_id, self.platform) # # class Meta: # unique_together = ('app_id', 'platform') # # class Device(models.Model): # """ # Model for all device who register at the middleware. # """ # # FIXME: We need this to be backwards compatible for one release. # id = models.CharField(max_length=255, unique=True) # # name = models.CharField(max_length=255, blank=True, null=True) # sip_user_id = models.CharField(max_length=255, unique=True, primary_key=True) # os_version = models.CharField(max_length=255, blank=True, null=True) # client_version = models.CharField(max_length=255, blank=True, null=True) # token = models.CharField(max_length=250) # sandbox = models.BooleanField(default=False) # last_seen = models.DateTimeField(blank=True, null=True) # app = models.ForeignKey(App) # remote_logging_id = models.CharField(max_length=255, blank=True, null=True) # # def __str__(self): # return '{0} - {1}'.format(self.sip_user_id, self.name) # # # FIXME: We need this to be backwards compatible for one release. # def save(self, *args, **kwargs): # """ # Make sure the id fields stays the same as sip_user_id so we are # backwards compatible. # """ # if self.sip_user_id: # self.id = self.sip_user_id # obj = super(Device, self).save(*args, **kwargs) # return obj # # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results , which may contain function names, class names, or code. Output only the next line.
get_metrics(start_date, end_date, APNS_PLATFORM),
Based on the snippet: <|code_start|> log3 = ResponseLog.objects.create( platform=platform, roundtrip_time=2.5, available=True, ) log3.date = self.first_of_month.replace(day=3) log3.save() # Not available logs. log4 = ResponseLog.objects.create( platform=platform, roundtrip_time=4.0, available=False, ) log4.date = self.first_of_month.replace(day=2) log4.save() log5 = ResponseLog.objects.create( platform=platform, roundtrip_time=6.0, available=False, ) log5.date = self.first_of_month.replace(day=2) log5.save() def test_get_metrics(self): """ Test for getting metrics for 1 platform. """ <|code_end|> , predict the immediate next line with the help of imports: from collections import OrderedDict from django.test import TestCase from ..models import GCM_PLATFORM, ResponseLog from ..utils import fill_log_statement, get_metrics import datetime and context (classes, functions, sometimes code) from other files: # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def fill_log_statement(log_statement, dict_with_variables, anonymize=False): # """ # Function that anonymizes and inserts variables into log statements. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # anonymize (bool): Boolean to check if we want to anonymize. # # Returns: # str: String containing the given variables. # """ # anonymize_keys = [ # LOG_SIP_USER_ID, # LOG_CALLER_ID, # LOG_CALL_TO, # LOG_CALL_FROM, # LOG_CONTACT, # LOG_DIGEST_USERNAME, # LOG_NONCE, # LOG_USERNAME, # LOG_EMAIL, # ] # # if anonymize: # for key in dict_with_variables.keys(): # if key in anonymize_keys: # dict_with_variables[key] = key.upper() # # list_with_variables = [dict_with_variables[x] for x in dict_with_variables] # updated_log_statement = log_statement.format(*list_with_variables) # return updated_log_statement # # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results . Output only the next line.
self._create_entries(GCM_PLATFORM)
Predict the next line after this snippet: <|code_start|> class GetMetricsTestCase(TestCase): """ Test for the get_metrics utils function. """ def setUp(self): """ Setup start and end date. """ super(GetMetricsTestCase, self).setUp() self.first_of_month = datetime.date.today().replace(day=1) self.end_date = self.first_of_month.replace(day=3) def _create_entries(self, platform): """ Create 4 entries for tests. """ # Available logs. <|code_end|> using the current file's imports: from collections import OrderedDict from django.test import TestCase from ..models import GCM_PLATFORM, ResponseLog from ..utils import fill_log_statement, get_metrics import datetime and any relevant context from other files: # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def fill_log_statement(log_statement, dict_with_variables, anonymize=False): # """ # Function that anonymizes and inserts variables into log statements. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # anonymize (bool): Boolean to check if we want to anonymize. # # Returns: # str: String containing the given variables. # """ # anonymize_keys = [ # LOG_SIP_USER_ID, # LOG_CALLER_ID, # LOG_CALL_TO, # LOG_CALL_FROM, # LOG_CONTACT, # LOG_DIGEST_USERNAME, # LOG_NONCE, # LOG_USERNAME, # LOG_EMAIL, # ] # # if anonymize: # for key in dict_with_variables.keys(): # if key in anonymize_keys: # dict_with_variables[key] = key.upper() # # list_with_variables = [dict_with_variables[x] for x in dict_with_variables] # updated_log_statement = log_statement.format(*list_with_variables) # return updated_log_statement # # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results . Output only the next line.
log1 = ResponseLog.objects.create(
Using the snippet: <|code_start|> """ self._create_entries(GCM_PLATFORM) metrics = get_metrics(self.first_of_month, self.end_date, GCM_PLATFORM) self.assertEquals(metrics['total_count'], 5) self.assertEquals(metrics['available']['count'], 3) self.assertEquals(metrics['available']['avg'], 2.0) self.assertEquals(metrics['available']['min'], 1.5) self.assertEquals(metrics['available']['max'], 2.5) self.assertEquals(metrics['not_available']['count'], 2) self.assertEquals(metrics['not_available']['avg'], 5.0) self.assertEquals(metrics['not_available']['min'], 4.0) self.assertEquals(metrics['not_available']['max'], 6.0) class LogTestCase(TestCase): """ Test case to check if the logged information is anonymized correctly. """ def test_if_log_statement_is_anonymized(self): """ Test if the data is correctly anonymized when logging. """ cleaned_log_statement = ('Sip user ID: SIP_USER_ID, Caller ID: CALLER_ID, Call to: CALL_TO, ' 'Call from: CALL_FROM, Contact: CONTACT, Digest username: DIGEST_USERNAME, ' 'Nonce: NONCE, Username: USERNAME') <|code_end|> , determine the next line of code. You have imports: from collections import OrderedDict from django.test import TestCase from ..models import GCM_PLATFORM, ResponseLog from ..utils import fill_log_statement, get_metrics import datetime and context (class names, function names, or code) available: # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def fill_log_statement(log_statement, dict_with_variables, anonymize=False): # """ # Function that anonymizes and inserts variables into log statements. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # anonymize (bool): Boolean to check if we want to anonymize. # # Returns: # str: String containing the given variables. # """ # anonymize_keys = [ # LOG_SIP_USER_ID, # LOG_CALLER_ID, # LOG_CALL_TO, # LOG_CALL_FROM, # LOG_CONTACT, # LOG_DIGEST_USERNAME, # LOG_NONCE, # LOG_USERNAME, # LOG_EMAIL, # ] # # if anonymize: # for key in dict_with_variables.keys(): # if key in anonymize_keys: # dict_with_variables[key] = key.upper() # # list_with_variables = [dict_with_variables[x] for x in dict_with_variables] # updated_log_statement = log_statement.format(*list_with_variables) # return updated_log_statement # # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results . Output only the next line.
log_statement = fill_log_statement(
Based on the snippet: <|code_start|> platform=platform, roundtrip_time=2.5, available=True, ) log3.date = self.first_of_month.replace(day=3) log3.save() # Not available logs. log4 = ResponseLog.objects.create( platform=platform, roundtrip_time=4.0, available=False, ) log4.date = self.first_of_month.replace(day=2) log4.save() log5 = ResponseLog.objects.create( platform=platform, roundtrip_time=6.0, available=False, ) log5.date = self.first_of_month.replace(day=2) log5.save() def test_get_metrics(self): """ Test for getting metrics for 1 platform. """ self._create_entries(GCM_PLATFORM) <|code_end|> , predict the immediate next line with the help of imports: from collections import OrderedDict from django.test import TestCase from ..models import GCM_PLATFORM, ResponseLog from ..utils import fill_log_statement, get_metrics import datetime and context (classes, functions, sometimes code) from other files: # Path: app/models.py # GCM_PLATFORM = 'gcm' # # class ResponseLog(models.Model): # """ # Model for logging info about the device response. # """ # platform = models.CharField(choices=PLATFORM_CHOICES, max_length=10) # roundtrip_time = models.FloatField() # available = models.BooleanField() # date = models.DateTimeField(auto_now_add=True) # # Path: app/utils.py # def fill_log_statement(log_statement, dict_with_variables, anonymize=False): # """ # Function that anonymizes and inserts variables into log statements. # # Args: # log_statement (str): The message to log. # dict_with_variables (OrderedDict): OrderedDict that contains the # variables we want to insert into the logging statement. # anonymize (bool): Boolean to check if we want to anonymize. # # Returns: # str: String containing the given variables. # """ # anonymize_keys = [ # LOG_SIP_USER_ID, # LOG_CALLER_ID, # LOG_CALL_TO, # LOG_CALL_FROM, # LOG_CONTACT, # LOG_DIGEST_USERNAME, # LOG_NONCE, # LOG_USERNAME, # LOG_EMAIL, # ] # # if anonymize: # for key in dict_with_variables.keys(): # if key in anonymize_keys: # dict_with_variables[key] = key.upper() # # list_with_variables = [dict_with_variables[x] for x in dict_with_variables] # updated_log_statement = log_statement.format(*list_with_variables) # return updated_log_statement # # def get_metrics(start_date, end_date, platform): # """ # Function to get a dict with metrics for the given date range and platform. # # Args: # start_date (date): Start date to get metrics for. # end_date (date): End date to get metrics for. # platform (string): Platform to get metrics for. # # Returns: # Dict containing the metrics. # """ # def _get_min(query): # return query.aggregate(Min('roundtrip_time'))['roundtrip_time__min'] # # def _get_max(query): # return query.aggregate(Max('roundtrip_time'))['roundtrip_time__max'] # # def _get_avg(query): # return query.aggregate(Avg('roundtrip_time'))['roundtrip_time__avg'] # # base_query = ResponseLog.objects.filter( # platform=platform, date__range=(start_date, end_date)).order_by('roundtrip_time') # total_count = base_query.count() # # percentile = int(total_count * 0.95) # # available_query = base_query.filter(available=True) # available_count = available_query.count() # avg_available = _get_avg(available_query[:percentile]) # min_available = _get_min(available_query[:percentile]) # max_available = _get_max(available_query[:percentile]) # # not_available_query = base_query.filter(available=False) # not_available_count = not_available_query.count() # avg_not_available = _get_avg(not_available_query[:percentile]) # min_not_available = _get_min(not_available_query[:percentile]) # max_not_available = _get_max(not_available_query[:percentile]) # # results = { # 'platform': platform, # 'start_date': start_date, # 'end_date': end_date, # 'total_count': total_count, # 'available': { # 'count': available_count, # 'avg': avg_available, # 'min': min_available, # 'max': max_available, # }, # 'not_available': { # 'count': not_available_count, # 'avg': avg_not_available, # 'min': min_not_available, # 'max': max_not_available, # }, # } # # return results . Output only the next line.
metrics = get_metrics(self.first_of_month, self.end_date, GCM_PLATFORM)
Here is a snippet: <|code_start|> def silentremove(filename): '''Remove a file. If the file does not exist, stay silent.''' try: os.remove(filename) except OSError as e: if e.errno != errno.ENOENT: raise class CryptoTest(unittest.TestCase): ''' Test crypto functions. Since crypto is essentially just a wrapper around the PyCrypto library, we will not test this module extensively. ''' def test_create_key(self): silentremove('key.pem') <|code_end|> . Write the next line using the current file imports: import unittest import os import errno from nose.tools import assert_equal from tsukiji import crypto as cr and context from other files: # Path: tsukiji/crypto.py # KEYFILE_NAME = 'key.pem' # def create_key(): # def retrieve_key(): # def get_public_bytestring(): # def decrypt_message(message): # def encrypt_message(key_string, message): , which may include functions, classes, or code. Output only the next line.
cr.create_key()
Using the snippet: <|code_start|> now = datetime.datetime.now() next_year = now.replace(year=now.year + 1) last_year = now.replace(year=now.year - 1) class OrderBookTest(unittest.TestCase): def setUp(self): '''Clean the orderbook before every test.''' <|code_end|> , determine the next line of code. You have imports: from nose.tools import assert_equal, assert_is, assert_is_none, assert_is_not_none from tsukiji import orderbook as ob from tsukiji.crypto import get_public_bytestring import unittest import datetime import datetime and context (class names, function names, or code) available: # Path: tsukiji/orderbook.py # def create_ask(price, quantity, timeout): # def create_bid(price, quantity, timeout): # def create_trade(recipient, quantity, trade_id): # def create_confirm(recipient, trade_id): # def create_cancel(recipient, trade_id): # def create_greeting(): # def create_greeting_response(peers): # def create_msg(options=None): # def trade_offer(their_offer, own_offer): # def get_offer(id, message_id): # def remove_offer(id, message_id): # def clean_offers(f): # def func_wrapper(*args, **kwargs): # def get_asks(): # def get_own_asks(): # def get_bids(): # def get_own_bids(): # def match_bid(bid): # def match_incoming_bid(bid): # def match_ask(ask): # def match_incoming_ask(ask): # def lowest_offer(offers): # def highest_offer(offers): # # Path: tsukiji/crypto.py # def get_public_bytestring(): # key = retrieve_key() # return key.publickey().exportKey() . Output only the next line.
ob.message_id = 0
Continue the code snippet: <|code_start|> now = datetime.datetime.now() next_year = now.replace(year=now.year + 1) last_year = now.replace(year=now.year - 1) class OrderBookTest(unittest.TestCase): def setUp(self): '''Clean the orderbook before every test.''' ob.message_id = 0 ob.offers = [] ob.trades = [] <|code_end|> . Use current file imports: from nose.tools import assert_equal, assert_is, assert_is_none, assert_is_not_none from tsukiji import orderbook as ob from tsukiji.crypto import get_public_bytestring import unittest import datetime import datetime and context (classes, functions, or code) from other files: # Path: tsukiji/orderbook.py # def create_ask(price, quantity, timeout): # def create_bid(price, quantity, timeout): # def create_trade(recipient, quantity, trade_id): # def create_confirm(recipient, trade_id): # def create_cancel(recipient, trade_id): # def create_greeting(): # def create_greeting_response(peers): # def create_msg(options=None): # def trade_offer(their_offer, own_offer): # def get_offer(id, message_id): # def remove_offer(id, message_id): # def clean_offers(f): # def func_wrapper(*args, **kwargs): # def get_asks(): # def get_own_asks(): # def get_bids(): # def get_own_bids(): # def match_bid(bid): # def match_incoming_bid(bid): # def match_ask(ask): # def match_incoming_ask(ask): # def lowest_offer(offers): # def highest_offer(offers): # # Path: tsukiji/crypto.py # def get_public_bytestring(): # key = retrieve_key() # return key.publickey().exportKey() . Output only the next line.
self.public_id = get_public_bytestring()
Predict the next line for this snippet: <|code_start|> class CreateCommentResult(Result): def __init__(self, response): super(CreateCommentResult, self).__init__(response) self.comment_id = -1 if self.ok: serialized = response.json() if "id" in serialized: self.comment_id = int(serialized["id"]) class SelectCommentResult(Result): def __init__(self, response): super(SelectCommentResult, self).__init__(response) self.raw_comments = [] self.older = False self.newer = False if self.ok: serialized = response.json() if "comments" in serialized: self.raw_comments = serialized["comments"] self.older = serialized["older"] self.newer = serialized["newer"] def comments(self): <|code_end|> with the help of current file imports: from pykintone.result import Result from pykintone.comment import RecordComment, Mention and context from other files: # Path: pykintone/result.py # class Result(object): # # def __init__(self, response): # self.ok = response.ok # self.message = "" # self.error = None # if not self.ok: # _e = response.json() # self.error = Error(_e["message"], _e["id"], _e["code"]) # self.detail = {} # if "errors" in _e: # self.detail = _e["errors"] # # Path: pykintone/comment.py # class RecordComment(ps.kintoneStructure): # # def __init__(self): # super(RecordComment, self).__init__() # self.comment_id = -1 # self.created_at = None # self.creator = None # self.mentions = [] # # self._property_details.append(ps.PropertyDetail("comment_id", field_name="id")) # self._property_details.append(ps.PropertyDetail("created_at", ps.FieldType.CREATED_TIME, field_name="createdAt")) # self._property_details.append(ps.PropertyDetail("creator", ps.FieldType.CREATOR)) # # class Mention(): # # def __init__(self, code, target_type): # self.code = code # self.target_type = target_type # # @classmethod # def deserialize(cls, mention_dict): # return Mention(mention_dict["code"], mention_dict["type"]) # # def serialize(self): # return { # "code": self.code, # "type": self.target_type # } , which may contain function names, class names, or code. Output only the next line.
cs = [RecordComment.deserialize(cd) for cd in self.raw_comments]
Given snippet: <|code_start|> class CreateCommentResult(Result): def __init__(self, response): super(CreateCommentResult, self).__init__(response) self.comment_id = -1 if self.ok: serialized = response.json() if "id" in serialized: self.comment_id = int(serialized["id"]) class SelectCommentResult(Result): def __init__(self, response): super(SelectCommentResult, self).__init__(response) self.raw_comments = [] self.older = False self.newer = False if self.ok: serialized = response.json() if "comments" in serialized: self.raw_comments = serialized["comments"] self.older = serialized["older"] self.newer = serialized["newer"] def comments(self): cs = [RecordComment.deserialize(cd) for cd in self.raw_comments] for c in cs: <|code_end|> , continue by predicting the next line. Consider current file imports: from pykintone.result import Result from pykintone.comment import RecordComment, Mention and context: # Path: pykintone/result.py # class Result(object): # # def __init__(self, response): # self.ok = response.ok # self.message = "" # self.error = None # if not self.ok: # _e = response.json() # self.error = Error(_e["message"], _e["id"], _e["code"]) # self.detail = {} # if "errors" in _e: # self.detail = _e["errors"] # # Path: pykintone/comment.py # class RecordComment(ps.kintoneStructure): # # def __init__(self): # super(RecordComment, self).__init__() # self.comment_id = -1 # self.created_at = None # self.creator = None # self.mentions = [] # # self._property_details.append(ps.PropertyDetail("comment_id", field_name="id")) # self._property_details.append(ps.PropertyDetail("created_at", ps.FieldType.CREATED_TIME, field_name="createdAt")) # self._property_details.append(ps.PropertyDetail("creator", ps.FieldType.CREATOR)) # # class Mention(): # # def __init__(self, code, target_type): # self.code = code # self.target_type = target_type # # @classmethod # def deserialize(cls, mention_dict): # return Mention(mention_dict["code"], mention_dict["type"]) # # def serialize(self): # return { # "code": self.code, # "type": self.target_type # } which might include code, classes, or functions. Output only the next line.
c.mentions = [Mention.deserialize(m) for m in c.mentions]
Based on the snippet: <|code_start|> def serialize(f): return f if not isinstance(f, ff.BaseField) else f.serialize() targets = fields if isinstance(fields, (list, tuple)) else [fields] properties = {} for t in targets: st = serialize(t) if "code" in st and "type" in st: properties[st["code"]] = st formatted = {"properties": properties} envelope = self.__pack(formatted, app_id, revision) return envelope def __pack(self, pack, app_id="", revision=-1): _p = pack.copy() _p["app"] = app_id if app_id else self.app_id if revision > -1: _p["revision"] = revision return _p @classmethod def load_properties(cls, properties): fields = [] for k in properties: p = properties[k] field_type = None field_name = p["type"].upper() <|code_end|> , predict the immediate next line with the help of imports: from pykintone.structure import FieldType from pykintone.application_settings.base_administration_api import BaseAdministrationAPI from pykintone.application_settings.form_layout import Layout import pykintone.application_settings.form_field as ff import pykintone.application_settings.setting_result as sr and context (classes, functions, sometimes code) from other files: # Path: pykintone/structure.py # class FieldType(Enum): # DATE = "DATE" # TIME = "TIME" # DATETIME = "DATETIME" # CREATED_TIME = "CREATED_TIME" # UPDATED_TIME = "UPDATED_TIME" # USER_SELECT = "USER_SELECT" # CREATOR = "CREATOR" # MODIFIER = "MODIFIER" # FILE = "FILE" # RECORD_NUMBER = "RECORD_NUMBER" # NUMBER = "NUMBER" # SUBTABLE = "SUBTABLE" # CALC = "CALC" # CATEGORY = "CATEGORY" # CHECK_BOX = "CHECK_BOX" # DROP_DOWN = "DROP_DOWN" # HR = "HR" # LABEL = "LABEL" # LINK = "LINK" # MULTI_LINE_TEXT = "MULTI_LINE_TEXT" # MULTI_SELECT = "MULTI_SELECT" # RADIO_BUTTON = "RADIO_BUTTON" # RICH_TEXT = "RICH_TEXT" # SINGLE_LINE_TEXT = "SINGLE_LINE_TEXT" # SPACER = "SPACER" # STATUS = "STATUS" # STATUS_ASSIGNEE = "STATUS_ASSIGNEE" # ID = "__ID__" # REVISION = "__REVISION__" # TIME_STAMP = "__TIME_STAMP__" # STRUCTURE = "__STRUCTURE__" # # Path: pykintone/application_settings/base_administration_api.py # class BaseAdministrationAPI(BaseAPI): # # def __init__(self, account, api_token="", requests_options=(), app_id=""): # super(BaseAdministrationAPI, self).__init__(account, api_token, requests_options, app_id) # self.__test_mode = False # self._commit_revision = -1 # self._cached_changes = False # # def as_test_mode(self): # self.__test_mode = True # return self # # def _request(self, method, url, params_or_data, headers=None, use_api_token=True): # result = super(BaseAdministrationAPI, self)._request(method, url, params_or_data, headers, use_api_token) # if method in ("POST", "PUT", "DELETE"): # self._cached_changes = True # return result # # def __enter__(self): # self._commit_revision = -1 # return self # # def __exit__(self, exc_type, exc_val, exc_tb): # if self._cached_changes and not self.app_id: # raise Exception("There are some changes to be committed, but no application id.") # elif not (self._cached_changes and self.app_id): # return None # # admin = self.__get_admin() # result = None # exit_type = "Commit" # if exc_type is None and not self.__test_mode: # result = admin.commit_settings(self.app_id, self._commit_revision) # else: # exit_type = "Rollback" # result = admin.rollback_settings(self.app_id, self._commit_revision) # # if result.ok: # self._commit_revision = -1 # self._cached_changes = False # else: # raise Exception(exit_type + " failed. {0}".format(result.message)) # # def __get_admin(self): # from pykintone.application_settings.administrator import Administrator # admin = Administrator(self.account, self.api_token, self.requests_options, self.app_id) # return admin # # Path: pykintone/application_settings/form_layout.py # class Layout(ps.kintoneStructure): # # def __init__(self): # super(Layout, self).__init__() # self.layout_type = "ROW" # self.code = "" # self.fields = [LayoutField] # self._property_details.append(ps.PropertyDetail("layout_type", field_name="type")) # # @classmethod # def create(cls, fields, layout_type="", code=""): # instance = Layout() # instance.layout_type = layout_type if layout_type else instance.layout_type # instance.code = code if code else instance.code # # if not (isinstance(fields, (list, tuple)) and len(fields) > 0): # raise Exception("Layout fields have to be array, and it must have at least one field.") # # def convert(f): # if isinstance(f, LayoutField): # return f # elif isinstance(f, (list, tuple)): # return LayoutField.create(*f) # elif isinstance(f, dict): # return LayoutField.create(**f) # else: # return LayoutField.create(f) # # fs = [convert(f) for f in fields] # instance.fields = fs # return instance # # def serialize(self): # return self._serialize(lambda name, value, pd: (name, value), ignore_missing=True) # # @classmethod # def deserialize(cls, json_body): # return cls._deserialize(json_body, lambda f: (f, "")) . Output only the next line.
candidates = [e for e in list(FieldType) if e.value == field_name]
Using the snippet: <|code_start|> return r def create(self, comment, mentions=()): """ create comment :param comment: :param mentions: list of pair of code and type("USER", "GROUP", and so on) :return: """ data = { "app": self.app_id, "record": self.record_id, "comment": { "text": comment, } } if len(mentions) > 0: _mentions = [] for m in mentions: if isinstance(m, (list, tuple)): if len(m) == 2: _mentions.append({ "code": m[0], "type": m[1] }) else: raise Exception("mention have to have code and target type. ex.[('user_1', 'USER')]") <|code_end|> , determine the next line of code. You have imports: import json import requests import pykintone.comment_result as cr from pykintone.comment import Mention and context (class names, function names, or code) available: # Path: pykintone/comment.py # class Mention(): # # def __init__(self, code, target_type): # self.code = code # self.target_type = target_type # # @classmethod # def deserialize(cls, mention_dict): # return Mention(mention_dict["code"], mention_dict["type"]) # # def serialize(self): # return { # "code": self.code, # "type": self.target_type # } . Output only the next line.
elif isinstance(m, Mention):
Predict the next line after this snippet: <|code_start|># -*- coding: utf-8 -*- class TestAppModel(kintoneModel): def __init__(self): super(TestAppModel, self).__init__() self.my_key = "" self.stringField = "" self.numberField = 0 self.radio = "" self.checkbox = [] self.dateField = datetime.now() self.time = datetime.now() self.datetimeField = datetime.now() self.user_select = sf.UserSelect() self.created_time = None self.updated_time = None self.creator = None self.modifier = None self.changeLogs = [] self.attachfile = sf.File() <|code_end|> using the current file's imports: import unittest import tests.envs as envs import pykintone import pykintone.structure_field as sf import os from datetime import datetime from pykintone.model import kintoneModel from pykintone.structure import PropertyDetail, FieldType and any relevant context from other files: # Path: pykintone/model.py # class kintoneModel(ps.kintoneStructure): # # def __init__(self): # super(kintoneModel, self).__init__() # self.record_id = -1 # self.revision = -1 # self._property_details.append(ps.PropertyDetail("record_id", field_name="$id")) # self._property_details.append(ps.PropertyDetail("revision", field_name="$revision")) # # @classmethod # def record_to_model(cls, record_json): # def get_value_and_type(f): return f["value"], f["type"] # return cls._deserialize(record_json, get_value_and_type) # # def to_record(self): # def convert_to_key_and_value(field_name, value, property_detail=None): # key = field_name # formatted = { # "value": value # } # if field_name in ["$id", "$revision"]: # if value > -1: # key = field_name[1:] # escape $ # else: # key = None # # return key, formatted # # return self._serialize(convert_to_key_and_value) # # Path: pykintone/structure.py # class PropertyDetail(object): # def __init__(self, name, field_type=None, sub_type=None, unsent=False, field_name=""): # self.name = name # self.field_type = field_type # self.sub_type = sub_type # self.unsent = unsent # self.field_name = field_name # # def to_property_name(self, field_name): # if self.field_name == field_name: # return self.name # else: # return field_name # # def to_field_name(self): # if self.field_name: # return self.field_name # else: # return self.name # # class FieldType(Enum): # DATE = "DATE" # TIME = "TIME" # DATETIME = "DATETIME" # CREATED_TIME = "CREATED_TIME" # UPDATED_TIME = "UPDATED_TIME" # USER_SELECT = "USER_SELECT" # CREATOR = "CREATOR" # MODIFIER = "MODIFIER" # FILE = "FILE" # RECORD_NUMBER = "RECORD_NUMBER" # NUMBER = "NUMBER" # SUBTABLE = "SUBTABLE" # CALC = "CALC" # CATEGORY = "CATEGORY" # CHECK_BOX = "CHECK_BOX" # DROP_DOWN = "DROP_DOWN" # HR = "HR" # LABEL = "LABEL" # LINK = "LINK" # MULTI_LINE_TEXT = "MULTI_LINE_TEXT" # MULTI_SELECT = "MULTI_SELECT" # RADIO_BUTTON = "RADIO_BUTTON" # RICH_TEXT = "RICH_TEXT" # SINGLE_LINE_TEXT = "SINGLE_LINE_TEXT" # SPACER = "SPACER" # STATUS = "STATUS" # STATUS_ASSIGNEE = "STATUS_ASSIGNEE" # ID = "__ID__" # REVISION = "__REVISION__" # TIME_STAMP = "__TIME_STAMP__" # STRUCTURE = "__STRUCTURE__" . Output only the next line.
self._property_details.append(PropertyDetail("time", FieldType.TIME))
Given the following code snippet before the placeholder: <|code_start|># -*- coding: utf-8 -*- class TestAppModel(kintoneModel): def __init__(self): super(TestAppModel, self).__init__() self.my_key = "" self.stringField = "" self.numberField = 0 self.radio = "" self.checkbox = [] self.dateField = datetime.now() self.time = datetime.now() self.datetimeField = datetime.now() self.user_select = sf.UserSelect() self.created_time = None self.updated_time = None self.creator = None self.modifier = None self.changeLogs = [] self.attachfile = sf.File() <|code_end|> , predict the next line using imports from the current file: import unittest import tests.envs as envs import pykintone import pykintone.structure_field as sf import os from datetime import datetime from pykintone.model import kintoneModel from pykintone.structure import PropertyDetail, FieldType and context including class names, function names, and sometimes code from other files: # Path: pykintone/model.py # class kintoneModel(ps.kintoneStructure): # # def __init__(self): # super(kintoneModel, self).__init__() # self.record_id = -1 # self.revision = -1 # self._property_details.append(ps.PropertyDetail("record_id", field_name="$id")) # self._property_details.append(ps.PropertyDetail("revision", field_name="$revision")) # # @classmethod # def record_to_model(cls, record_json): # def get_value_and_type(f): return f["value"], f["type"] # return cls._deserialize(record_json, get_value_and_type) # # def to_record(self): # def convert_to_key_and_value(field_name, value, property_detail=None): # key = field_name # formatted = { # "value": value # } # if field_name in ["$id", "$revision"]: # if value > -1: # key = field_name[1:] # escape $ # else: # key = None # # return key, formatted # # return self._serialize(convert_to_key_and_value) # # Path: pykintone/structure.py # class PropertyDetail(object): # def __init__(self, name, field_type=None, sub_type=None, unsent=False, field_name=""): # self.name = name # self.field_type = field_type # self.sub_type = sub_type # self.unsent = unsent # self.field_name = field_name # # def to_property_name(self, field_name): # if self.field_name == field_name: # return self.name # else: # return field_name # # def to_field_name(self): # if self.field_name: # return self.field_name # else: # return self.name # # class FieldType(Enum): # DATE = "DATE" # TIME = "TIME" # DATETIME = "DATETIME" # CREATED_TIME = "CREATED_TIME" # UPDATED_TIME = "UPDATED_TIME" # USER_SELECT = "USER_SELECT" # CREATOR = "CREATOR" # MODIFIER = "MODIFIER" # FILE = "FILE" # RECORD_NUMBER = "RECORD_NUMBER" # NUMBER = "NUMBER" # SUBTABLE = "SUBTABLE" # CALC = "CALC" # CATEGORY = "CATEGORY" # CHECK_BOX = "CHECK_BOX" # DROP_DOWN = "DROP_DOWN" # HR = "HR" # LABEL = "LABEL" # LINK = "LINK" # MULTI_LINE_TEXT = "MULTI_LINE_TEXT" # MULTI_SELECT = "MULTI_SELECT" # RADIO_BUTTON = "RADIO_BUTTON" # RICH_TEXT = "RICH_TEXT" # SINGLE_LINE_TEXT = "SINGLE_LINE_TEXT" # SPACER = "SPACER" # STATUS = "STATUS" # STATUS_ASSIGNEE = "STATUS_ASSIGNEE" # ID = "__ID__" # REVISION = "__REVISION__" # TIME_STAMP = "__TIME_STAMP__" # STRUCTURE = "__STRUCTURE__" . Output only the next line.
self._property_details.append(PropertyDetail("time", FieldType.TIME))
Based on the snippet: <|code_start|> if pn in properties: v, t = get_value_and_type(field) initial_value = getattr(instance, pn) value = instance._field_to_property(v, t, pd, initial_value) setattr(instance, pn, value) is_set = True return instance if is_set else None @classmethod def _field_to_property(cls, field_value, field_type=None, property_detail=None, initial_value=None): value = field_value # configure property's field type # from user definition _field_type = None if not property_detail else property_detail.field_type # from type value in field if not _field_type and field_type: f = [e for e in list(FieldType) if e.value == field_type] if len(f) > 0: _field_type = f[0] _field_type = _field_type if _field_type else cls._estimate_type_from_property(initial_value) if not _field_type: pass elif _field_type in (FieldType.ID, FieldType.REVISION, FieldType.RECORD_NUMBER): value = int(value) elif _field_type == FieldType.NUMBER: value = float(value) elif _field_type == FieldType.DATE: <|code_end|> , predict the immediate next line with the help of imports: import re import inspect import pykintone.structure_field as sf from enum import Enum from datetime import datetime from pykintone.account import kintoneService as ks and context (classes, functions, sometimes code) from other files: # Path: pykintone/account.py # class kintoneService(object): # ENCODE = "utf-8" # SELECT_LIMIT = 500 # UPDATE_LIMIT = 100 # # DATE_FORMAT = "%Y-%m-%d" # TIME_FORMAT = "%H:%M" # DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" # from tzlocal import get_localzone # __TIME_ZONE = get_localzone() # # def __init__(self, account): # self.account = account # self.__apps = [] # # def __len__(self): # return len(self.__apps) # # def app(self, app_id="", api_token="", app_name=""): # from pykintone.application import Application # if not app_id: # return self.__apps[0] # else: # existed = [a for a in self.__apps if a.app_id == app_id] # # register if not exist # if len(existed) > 0: # return existed[0] # else: # _a = Application(self.account, app_id, api_token, app_name) # self.__apps.append(_a) # return _a # # def administration(self,requests_options=()): # from pykintone.application_settings.administrator import Administrator # return Administrator(self.account, requests_options=requests_options) # # def user_api(self, requests_options=()): # from pykintone.user_api import UserAPI # api = UserAPI(self.account, requests_options) # return api # # @classmethod # def value_to_date(cls, value): # return value if not value else datetime.strptime(value, cls.DATE_FORMAT) # # @classmethod # def value_to_time(cls, value): # return value if not value else datetime.strptime(value, cls.TIME_FORMAT) # # @classmethod # def value_to_datetime(cls, value): # if value: # d = datetime.strptime(value, cls.DATETIME_FORMAT) # return cls._to_local(d) # else: # return None # # @classmethod # def value_to_timestamp(cls, value): # if value: # d = datetime.strptime(value, cls.TIMESTAMP_FORMAT) # return cls._to_local(d) # else: # return None # # @classmethod # def _to_local(cls, d): # utc = d.replace(tzinfo=pytz.utc) # configure timezone (on kintone, time is utc) # local = utc.astimezone(cls.__TIME_ZONE).replace(tzinfo=None) # to local, and to native # return local # # @classmethod # def date_to_value(cls, date): # return date.strftime(cls.DATE_FORMAT) # # @classmethod # def time_to_value(cls, time): # return time.strftime(cls.TIME_FORMAT) # # @classmethod # def datetime_to_value(cls, dt): # local = dt.replace(tzinfo=cls.__TIME_ZONE) # utc = local.astimezone(pytz.utc) # value = utc.strftime(cls.DATETIME_FORMAT) # return value # # @classmethod # def get_default_field_list(cls, as_str=False): # from pykintone.structure import FieldType # fields = [ # FieldType.CATEGORY, # FieldType.STATUS, # FieldType.RECORD_NUMBER, # FieldType.CREATED_TIME, # FieldType.CREATOR, # FieldType.STATUS_ASSIGNEE, # FieldType.UPDATED_TIME, # FieldType.MODIFIER # ] # if as_str: # str_fields = [f.value for f in fields] # return str_fields # else: # return fields . Output only the next line.
value = ks.value_to_date(value)
Predict the next line after this snippet: <|code_start|> metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.video.transcoder_v1.types.ListJobsRequest): The initial request object. response (google.cloud.video.transcoder_v1.types.ListJobsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = services.ListJobsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[services.ListJobsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response <|code_end|> using the current file's imports: from typing import ( Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, ) from google.cloud.video.transcoder_v1.types import resources from google.cloud.video.transcoder_v1.types import services and any relevant context from other files: # Path: google/cloud/video/transcoder_v1/types/resources.py # class Job(proto.Message): # class ProcessingState(proto.Enum): # class JobTemplate(proto.Message): # class JobConfig(proto.Message): # class Input(proto.Message): # class Output(proto.Message): # class EditAtom(proto.Message): # class AdBreak(proto.Message): # class ElementaryStream(proto.Message): # class MuxStream(proto.Message): # class Manifest(proto.Message): # class ManifestType(proto.Enum): # class PubsubDestination(proto.Message): # class SpriteSheet(proto.Message): # class Overlay(proto.Message): # class FadeType(proto.Enum): # class NormalizedCoordinate(proto.Message): # class Image(proto.Message): # class AnimationStatic(proto.Message): # class AnimationFade(proto.Message): # class AnimationEnd(proto.Message): # class Animation(proto.Message): # class PreprocessingConfig(proto.Message): # class Color(proto.Message): # class Denoise(proto.Message): # class Deblock(proto.Message): # class Audio(proto.Message): # class Crop(proto.Message): # class Pad(proto.Message): # class VideoStream(proto.Message): # class H264CodecSettings(proto.Message): # class H265CodecSettings(proto.Message): # class Vp9CodecSettings(proto.Message): # class AudioStream(proto.Message): # class AudioMapping(proto.Message): # class TextStream(proto.Message): # class TextMapping(proto.Message): # class SegmentSettings(proto.Message): # PROCESSING_STATE_UNSPECIFIED = 0 # PENDING = 1 # RUNNING = 2 # SUCCEEDED = 3 # FAILED = 4 # MANIFEST_TYPE_UNSPECIFIED = 0 # HLS = 1 # DASH = 2 # FADE_TYPE_UNSPECIFIED = 0 # FADE_IN = 1 # FADE_OUT = 2 # # Path: google/cloud/video/transcoder_v1/types/services.py # class CreateJobRequest(proto.Message): # class ListJobsRequest(proto.Message): # class GetJobRequest(proto.Message): # class DeleteJobRequest(proto.Message): # class ListJobsResponse(proto.Message): # class CreateJobTemplateRequest(proto.Message): # class ListJobTemplatesRequest(proto.Message): # class GetJobTemplateRequest(proto.Message): # class DeleteJobTemplateRequest(proto.Message): # class ListJobTemplatesResponse(proto.Message): # def raw_page(self): # def raw_page(self): . Output only the next line.
def __iter__(self) -> Iterator[resources.Job]:
Based on the snippet: <|code_start|># # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class ListJobsPager: """A pager for iterating through ``list_jobs`` requests. This class thinly wraps an initial :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and provides an ``__iter__`` method to iterate through its ``jobs`` field. If there are more pages, the ``__iter__`` method will make additional ``ListJobs`` requests and continue to iterate through the ``jobs`` field on the corresponding responses. All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, <|code_end|> , predict the immediate next line with the help of imports: from typing import ( Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, ) from google.cloud.video.transcoder_v1.types import resources from google.cloud.video.transcoder_v1.types import services and context (classes, functions, sometimes code) from other files: # Path: google/cloud/video/transcoder_v1/types/resources.py # class Job(proto.Message): # class ProcessingState(proto.Enum): # class JobTemplate(proto.Message): # class JobConfig(proto.Message): # class Input(proto.Message): # class Output(proto.Message): # class EditAtom(proto.Message): # class AdBreak(proto.Message): # class ElementaryStream(proto.Message): # class MuxStream(proto.Message): # class Manifest(proto.Message): # class ManifestType(proto.Enum): # class PubsubDestination(proto.Message): # class SpriteSheet(proto.Message): # class Overlay(proto.Message): # class FadeType(proto.Enum): # class NormalizedCoordinate(proto.Message): # class Image(proto.Message): # class AnimationStatic(proto.Message): # class AnimationFade(proto.Message): # class AnimationEnd(proto.Message): # class Animation(proto.Message): # class PreprocessingConfig(proto.Message): # class Color(proto.Message): # class Denoise(proto.Message): # class Deblock(proto.Message): # class Audio(proto.Message): # class Crop(proto.Message): # class Pad(proto.Message): # class VideoStream(proto.Message): # class H264CodecSettings(proto.Message): # class H265CodecSettings(proto.Message): # class Vp9CodecSettings(proto.Message): # class AudioStream(proto.Message): # class AudioMapping(proto.Message): # class TextStream(proto.Message): # class TextMapping(proto.Message): # class SegmentSettings(proto.Message): # PROCESSING_STATE_UNSPECIFIED = 0 # PENDING = 1 # RUNNING = 2 # SUCCEEDED = 3 # FAILED = 4 # MANIFEST_TYPE_UNSPECIFIED = 0 # HLS = 1 # DASH = 2 # FADE_TYPE_UNSPECIFIED = 0 # FADE_IN = 1 # FADE_OUT = 2 # # Path: google/cloud/video/transcoder_v1/types/services.py # class CreateJobRequest(proto.Message): # class ListJobsRequest(proto.Message): # class GetJobRequest(proto.Message): # class DeleteJobRequest(proto.Message): # class ListJobsResponse(proto.Message): # class CreateJobTemplateRequest(proto.Message): # class ListJobTemplatesRequest(proto.Message): # class GetJobTemplateRequest(proto.Message): # class DeleteJobTemplateRequest(proto.Message): # class ListJobTemplatesResponse(proto.Message): # def raw_page(self): # def raw_page(self): . Output only the next line.
method: Callable[..., services.ListJobsResponse],
Continue the code snippet: <|code_start|>__protobuf__ = proto.module( package="google.cloud.video.transcoder.v1", manifest={ "CreateJobRequest", "ListJobsRequest", "GetJobRequest", "DeleteJobRequest", "ListJobsResponse", "CreateJobTemplateRequest", "ListJobTemplatesRequest", "GetJobTemplateRequest", "DeleteJobTemplateRequest", "ListJobTemplatesResponse", }, ) class CreateJobRequest(proto.Message): r"""Request message for ``TranscoderService.CreateJob``. Attributes: parent (str): Required. The parent location to create and process this job. Format: ``projects/{project}/locations/{location}`` job (google.cloud.video.transcoder_v1.types.Job): Required. Parameters for creating transcoding job. """ parent = proto.Field(proto.STRING, number=1,) <|code_end|> . Use current file imports: import proto # type: ignore from google.cloud.video.transcoder_v1.types import resources and context (classes, functions, or code) from other files: # Path: google/cloud/video/transcoder_v1/types/resources.py # class Job(proto.Message): # class ProcessingState(proto.Enum): # class JobTemplate(proto.Message): # class JobConfig(proto.Message): # class Input(proto.Message): # class Output(proto.Message): # class EditAtom(proto.Message): # class AdBreak(proto.Message): # class ElementaryStream(proto.Message): # class MuxStream(proto.Message): # class Manifest(proto.Message): # class ManifestType(proto.Enum): # class PubsubDestination(proto.Message): # class SpriteSheet(proto.Message): # class Overlay(proto.Message): # class FadeType(proto.Enum): # class NormalizedCoordinate(proto.Message): # class Image(proto.Message): # class AnimationStatic(proto.Message): # class AnimationFade(proto.Message): # class AnimationEnd(proto.Message): # class Animation(proto.Message): # class PreprocessingConfig(proto.Message): # class Color(proto.Message): # class Denoise(proto.Message): # class Deblock(proto.Message): # class Audio(proto.Message): # class Crop(proto.Message): # class Pad(proto.Message): # class VideoStream(proto.Message): # class H264CodecSettings(proto.Message): # class H265CodecSettings(proto.Message): # class Vp9CodecSettings(proto.Message): # class AudioStream(proto.Message): # class AudioMapping(proto.Message): # class TextStream(proto.Message): # class TextMapping(proto.Message): # class SegmentSettings(proto.Message): # PROCESSING_STATE_UNSPECIFIED = 0 # PENDING = 1 # RUNNING = 2 # SUCCEEDED = 3 # FAILED = 4 # MANIFEST_TYPE_UNSPECIFIED = 0 # HLS = 1 # DASH = 2 # FADE_TYPE_UNSPECIFIED = 0 # FADE_IN = 1 # FADE_OUT = 2 . Output only the next line.
job = proto.Field(proto.MESSAGE, number=2, message=resources.Job,)
Given the code snippet: <|code_start|> self.delete_job: gapic_v1.method.wrap_method( self.delete_job, default_timeout=60.0, client_info=client_info, ), self.create_job_template: gapic_v1.method.wrap_method( self.create_job_template, default_timeout=60.0, client_info=client_info, ), self.list_job_templates: gapic_v1.method.wrap_method( self.list_job_templates, default_timeout=60.0, client_info=client_info, ), self.get_job_template: gapic_v1.method.wrap_method( self.get_job_template, default_timeout=60.0, client_info=client_info, ), self.delete_job_template: gapic_v1.method.wrap_method( self.delete_job_template, default_timeout=60.0, client_info=client_info, ), } def close(self): """Closes resources associated with the transport. .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ raise NotImplementedError() @property def create_job( self, ) -> Callable[ <|code_end|> , generate the next line using the imports in this file: import abc import pkg_resources import google.auth # type: ignore import google.api_core from typing import Awaitable, Callable, Dict, Optional, Sequence, Union from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.video.transcoder_v1.types import resources from google.cloud.video.transcoder_v1.types import services from google.protobuf import empty_pb2 # type: ignore and context (functions, classes, or occasionally code) from other files: # Path: google/cloud/video/transcoder_v1/types/resources.py # class Job(proto.Message): # class ProcessingState(proto.Enum): # class JobTemplate(proto.Message): # class JobConfig(proto.Message): # class Input(proto.Message): # class Output(proto.Message): # class EditAtom(proto.Message): # class AdBreak(proto.Message): # class ElementaryStream(proto.Message): # class MuxStream(proto.Message): # class Manifest(proto.Message): # class ManifestType(proto.Enum): # class PubsubDestination(proto.Message): # class SpriteSheet(proto.Message): # class Overlay(proto.Message): # class FadeType(proto.Enum): # class NormalizedCoordinate(proto.Message): # class Image(proto.Message): # class AnimationStatic(proto.Message): # class AnimationFade(proto.Message): # class AnimationEnd(proto.Message): # class Animation(proto.Message): # class PreprocessingConfig(proto.Message): # class Color(proto.Message): # class Denoise(proto.Message): # class Deblock(proto.Message): # class Audio(proto.Message): # class Crop(proto.Message): # class Pad(proto.Message): # class VideoStream(proto.Message): # class H264CodecSettings(proto.Message): # class H265CodecSettings(proto.Message): # class Vp9CodecSettings(proto.Message): # class AudioStream(proto.Message): # class AudioMapping(proto.Message): # class TextStream(proto.Message): # class TextMapping(proto.Message): # class SegmentSettings(proto.Message): # PROCESSING_STATE_UNSPECIFIED = 0 # PENDING = 1 # RUNNING = 2 # SUCCEEDED = 3 # FAILED = 4 # MANIFEST_TYPE_UNSPECIFIED = 0 # HLS = 1 # DASH = 2 # FADE_TYPE_UNSPECIFIED = 0 # FADE_IN = 1 # FADE_OUT = 2 # # Path: google/cloud/video/transcoder_v1/types/services.py # class CreateJobRequest(proto.Message): # class ListJobsRequest(proto.Message): # class GetJobRequest(proto.Message): # class DeleteJobRequest(proto.Message): # class ListJobsResponse(proto.Message): # class CreateJobTemplateRequest(proto.Message): # class ListJobTemplatesRequest(proto.Message): # class GetJobTemplateRequest(proto.Message): # class DeleteJobTemplateRequest(proto.Message): # class ListJobTemplatesResponse(proto.Message): # def raw_page(self): # def raw_page(self): . Output only the next line.
[services.CreateJobRequest], Union[resources.Job, Awaitable[resources.Job]]
Given snippet: <|code_start|> self.delete_job: gapic_v1.method.wrap_method( self.delete_job, default_timeout=60.0, client_info=client_info, ), self.create_job_template: gapic_v1.method.wrap_method( self.create_job_template, default_timeout=60.0, client_info=client_info, ), self.list_job_templates: gapic_v1.method.wrap_method( self.list_job_templates, default_timeout=60.0, client_info=client_info, ), self.get_job_template: gapic_v1.method.wrap_method( self.get_job_template, default_timeout=60.0, client_info=client_info, ), self.delete_job_template: gapic_v1.method.wrap_method( self.delete_job_template, default_timeout=60.0, client_info=client_info, ), } def close(self): """Closes resources associated with the transport. .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ raise NotImplementedError() @property def create_job( self, ) -> Callable[ <|code_end|> , continue by predicting the next line. Consider current file imports: import abc import pkg_resources import google.auth # type: ignore import google.api_core from typing import Awaitable, Callable, Dict, Optional, Sequence, Union from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.video.transcoder_v1.types import resources from google.cloud.video.transcoder_v1.types import services from google.protobuf import empty_pb2 # type: ignore and context: # Path: google/cloud/video/transcoder_v1/types/resources.py # class Job(proto.Message): # class ProcessingState(proto.Enum): # class JobTemplate(proto.Message): # class JobConfig(proto.Message): # class Input(proto.Message): # class Output(proto.Message): # class EditAtom(proto.Message): # class AdBreak(proto.Message): # class ElementaryStream(proto.Message): # class MuxStream(proto.Message): # class Manifest(proto.Message): # class ManifestType(proto.Enum): # class PubsubDestination(proto.Message): # class SpriteSheet(proto.Message): # class Overlay(proto.Message): # class FadeType(proto.Enum): # class NormalizedCoordinate(proto.Message): # class Image(proto.Message): # class AnimationStatic(proto.Message): # class AnimationFade(proto.Message): # class AnimationEnd(proto.Message): # class Animation(proto.Message): # class PreprocessingConfig(proto.Message): # class Color(proto.Message): # class Denoise(proto.Message): # class Deblock(proto.Message): # class Audio(proto.Message): # class Crop(proto.Message): # class Pad(proto.Message): # class VideoStream(proto.Message): # class H264CodecSettings(proto.Message): # class H265CodecSettings(proto.Message): # class Vp9CodecSettings(proto.Message): # class AudioStream(proto.Message): # class AudioMapping(proto.Message): # class TextStream(proto.Message): # class TextMapping(proto.Message): # class SegmentSettings(proto.Message): # PROCESSING_STATE_UNSPECIFIED = 0 # PENDING = 1 # RUNNING = 2 # SUCCEEDED = 3 # FAILED = 4 # MANIFEST_TYPE_UNSPECIFIED = 0 # HLS = 1 # DASH = 2 # FADE_TYPE_UNSPECIFIED = 0 # FADE_IN = 1 # FADE_OUT = 2 # # Path: google/cloud/video/transcoder_v1/types/services.py # class CreateJobRequest(proto.Message): # class ListJobsRequest(proto.Message): # class GetJobRequest(proto.Message): # class DeleteJobRequest(proto.Message): # class ListJobsResponse(proto.Message): # class CreateJobTemplateRequest(proto.Message): # class ListJobTemplatesRequest(proto.Message): # class GetJobTemplateRequest(proto.Message): # class DeleteJobTemplateRequest(proto.Message): # class ListJobTemplatesResponse(proto.Message): # def raw_page(self): # def raw_page(self): which might include code, classes, or functions. Output only the next line.
[services.CreateJobRequest], Union[resources.Job, Awaitable[resources.Job]]
Here is a snippet: <|code_start|> def login_required(func): @functools.wraps(func) def wrapper(*args, **kw): act_name = kw.get('activity') act = check_acatvity(act_name) sid = session.get('{}_user'.format(act_name)) if sid: <|code_end|> . Write the next line using the current file imports: import functools from flask import session, flash, redirect, url_for, abort from web.Model.database import Members from web.Model.RegChecks import check_acatvity and context from other files: # Path: web/Model/database.py # class Members(db.Model): # __bind_key__ = 'activity' # sid = db.Column(db.Integer, primary_key=True, autoincrement=True) # name = db.Column(db.Text) # stu_code = db.Column(db.Text) # qq = db.Column(db.Text) # phone = db.Column(db.Text) # team = db.Column(db.Text, default="") # activity = db.Column(db.VARCHAR(10), db.ForeignKey('activities.activity_name')) # # __has_submitted = None # # def __init__(self, name, stu_code, qq, phone, activity): # self.name = name # self.stu_code = stu_code # self.qq = qq # self.phone = phone # self.activity = activity # # def get_id(self): # return self.sid # # def get_act_name(self): # return self.activity # # @property # def team_str(self): # if self.team is None: # return "" # return self.team # # @property # def has_submit(self): # if self.__has_submitted is None: # member_submit_his = UploadHistory.query.filter_by(activity=self.activity, sid=self.sid).first() # self.__has_submitted = member_submit_his is not None # return self.__has_submitted # # def __repr__(self): # return "{0} {1}".format(self.name, self.stu_code) # # Path: web/Model/RegChecks.py # def check_acatvity(name): # act = Activities.query.filter_by(activity_name=name).first() # if not act: # abort(404) # return act , which may include functions, classes, or code. Output only the next line.
member = Members.query.filter_by(sid=str(sid)).first()
Here is a snippet: <|code_start|> def login_required(func): @functools.wraps(func) def wrapper(*args, **kw): act_name = kw.get('activity') <|code_end|> . Write the next line using the current file imports: import functools from flask import session, flash, redirect, url_for, abort from web.Model.database import Members from web.Model.RegChecks import check_acatvity and context from other files: # Path: web/Model/database.py # class Members(db.Model): # __bind_key__ = 'activity' # sid = db.Column(db.Integer, primary_key=True, autoincrement=True) # name = db.Column(db.Text) # stu_code = db.Column(db.Text) # qq = db.Column(db.Text) # phone = db.Column(db.Text) # team = db.Column(db.Text, default="") # activity = db.Column(db.VARCHAR(10), db.ForeignKey('activities.activity_name')) # # __has_submitted = None # # def __init__(self, name, stu_code, qq, phone, activity): # self.name = name # self.stu_code = stu_code # self.qq = qq # self.phone = phone # self.activity = activity # # def get_id(self): # return self.sid # # def get_act_name(self): # return self.activity # # @property # def team_str(self): # if self.team is None: # return "" # return self.team # # @property # def has_submit(self): # if self.__has_submitted is None: # member_submit_his = UploadHistory.query.filter_by(activity=self.activity, sid=self.sid).first() # self.__has_submitted = member_submit_his is not None # return self.__has_submitted # # def __repr__(self): # return "{0} {1}".format(self.name, self.stu_code) # # Path: web/Model/RegChecks.py # def check_acatvity(name): # act = Activities.query.filter_by(activity_name=name).first() # if not act: # abort(404) # return act , which may include functions, classes, or code. Output only the next line.
act = check_acatvity(act_name)
Here is a snippet: <|code_start|> def check_acatvity(name): act = Activities.query.filter_by(activity_name=name).first() if not act: abort(404) return act def check_user_exist(stucode, act_name): <|code_end|> . Write the next line using the current file imports: from flask import abort from web.Model.database import Activities, Members and context from other files: # Path: web/Model/database.py # class Activities(db.Model): # __bind_key__ = 'activity' # activity_name = db.Column(db.VARCHAR(10), primary_key=True, unique=True) # title = db.Column(db.Text) # reg_enable = db.Column(db.Boolean, default=True) # team_enable = db.Column(db.Boolean, default=False) # upload_enable = db.Column(db.Boolean, default=False) # note = db.Column(db.Text, default="") # rank = db.Column(db.Integer, default=0) # hide = db.Column(db.Boolean, default=False) # # def __init__(self, activity_name, title, reg_enable, team_enable, upload_enable, note, rank): # self.activity_name = activity_name # self.title = title # self.team_enable = team_enable # self.upload_enable = upload_enable # self.reg_enable = reg_enable # self.note = note # self.rank = rank # self.hide = False # # def __repr__(self): # return "{0} {1} {2}".format(self.activity_name, self.team_enable, self.upload_enable) # # class Members(db.Model): # __bind_key__ = 'activity' # sid = db.Column(db.Integer, primary_key=True, autoincrement=True) # name = db.Column(db.Text) # stu_code = db.Column(db.Text) # qq = db.Column(db.Text) # phone = db.Column(db.Text) # team = db.Column(db.Text, default="") # activity = db.Column(db.VARCHAR(10), db.ForeignKey('activities.activity_name')) # # __has_submitted = None # # def __init__(self, name, stu_code, qq, phone, activity): # self.name = name # self.stu_code = stu_code # self.qq = qq # self.phone = phone # self.activity = activity # # def get_id(self): # return self.sid # # def get_act_name(self): # return self.activity # # @property # def team_str(self): # if self.team is None: # return "" # return self.team # # @property # def has_submit(self): # if self.__has_submitted is None: # member_submit_his = UploadHistory.query.filter_by(activity=self.activity, sid=self.sid).first() # self.__has_submitted = member_submit_his is not None # return self.__has_submitted # # def __repr__(self): # return "{0} {1}".format(self.name, self.stu_code) , which may include functions, classes, or code. Output only the next line.
stu = Members.query.filter_by(stu_code=stucode, activity=act_name).first()
Continue the code snippet: <|code_start|> FileRequired(message='请选择文件'), FileAllowed(['zip', 'rar'], '请使用zip或rar压缩格式提交'), ], description="文件请打包压缩后上传,推荐使用ZIP格式~~") button = SubmitField('提交') class Login(Form): name = StringField('姓名', [validators.required()], description="就是你的名字") stucode = StringField('学号', [validators.required()], description="学号") button = SubmitField('提交') class TeamModify(Form): team = TextAreaField('队员信息', description="组队参加请按照\"姓名 学号\"一人一行填写在文本框内 如: 王尼玛 22150xxxx") button = SubmitField('提交') class ActModify(Form): name = StringField('活动名(网址)', [validators.required()], description="建议英文") title = StringField('活动Title', [validators.required()], description="显示标题") note = TextAreaField('Note', description="显示于活动页下方") rank = IntegerField('排序', [validators.NumberRange(min=0, max=10)], description="0在最上面") reg_enable = BooleanField('开放报名') team_enable = BooleanField('允许组队') upload_enable = BooleanField('开放上传') hide = BooleanField('隐藏显示') button = SubmitField('提交') def getChoices(): <|code_end|> . Use current file imports: from flask_wtf import Form from flask_wtf.file import FileField, FileAllowed, FileRequired from wtforms import SubmitField, StringField, validators, TextAreaField, BooleanField, SelectField, IntegerField, PasswordField from .database import Activities and context (classes, functions, or code) from other files: # Path: web/Model/database.py # class Activities(db.Model): # __bind_key__ = 'activity' # activity_name = db.Column(db.VARCHAR(10), primary_key=True, unique=True) # title = db.Column(db.Text) # reg_enable = db.Column(db.Boolean, default=True) # team_enable = db.Column(db.Boolean, default=False) # upload_enable = db.Column(db.Boolean, default=False) # note = db.Column(db.Text, default="") # rank = db.Column(db.Integer, default=0) # hide = db.Column(db.Boolean, default=False) # # def __init__(self, activity_name, title, reg_enable, team_enable, upload_enable, note, rank): # self.activity_name = activity_name # self.title = title # self.team_enable = team_enable # self.upload_enable = upload_enable # self.reg_enable = reg_enable # self.note = note # self.rank = rank # self.hide = False # # def __repr__(self): # return "{0} {1} {2}".format(self.activity_name, self.team_enable, self.upload_enable) . Output only the next line.
return list((o.activity_name, o.activity_name) for o in Activities.query.all())
Using the snippet: <|code_start|> TODO require_setting """ def __init__(self): """ Setting definitions in base_settings are indispensable """ self._callbacks = {} self.add_key_callback('LOGGERS', set_loggers) for i in dir(base_settings): if not i.startswith('_'): self[i] = getattr(base_settings, i) self._module = None def __getattr__(self, key): try: return self[key] except KeyError: raise AttributeError('Has no attribute %s' % key) def __getitem__(self, key): try: return super(Settings, self).__getitem__(key) except KeyError: try: return super(Settings, self).__getitem__(key.lower()) except KeyError: <|code_end|> , determine the next line of code. You have imports: from torext.utils import SingletonMixin from torext.errors import SettingsError from torext.log import set_loggers from torext import base_settings and context (class names, function names, or code) available: # Path: torext/utils.py # class SingletonMixin(object): # """Globally hold one instance class # # Usage: # >>> class SpecObject(SingletonMixin): # ... pass # # >>> ins = SpecObject.instance() # """ # @classmethod # def instance(cls, *args, **kwgs): # """Will be the only instance""" # if not hasattr(cls, "_instance"): # cls._instance = cls(*args, **kwgs) # return cls._instance # # Path: torext/errors.py # class SettingsError(TorextException): # pass # # Path: torext/log.py # def set_loggers(loggers): # for name, config in loggers.items(): # set_logger(name, **config) . Output only the next line.
raise SettingsError('Key "%s" is not defined in settings' % key)
Here is a snippet: <|code_start|> by import torext module, a Settings object will be instanced and stored globally, then it can be involved in any place like this: >>> import torext >>> print torext.settings or >>> from torext import settings >>> print settings getting value from settings is like from a normal dict: >>> settings['DEBUG'] True >>> settings.get('PORT') 8000 >>> settings.get('WTF', None) None notice that you can use lower case word to get or set the value: >>> settings['debug'] is settings.get('DEBUG') True >>> settings['port'] = 8765 >>> settings['PORT'] 8765 TODO require_setting """ def __init__(self): """ Setting definitions in base_settings are indispensable """ self._callbacks = {} <|code_end|> . Write the next line using the current file imports: from torext.utils import SingletonMixin from torext.errors import SettingsError from torext.log import set_loggers from torext import base_settings and context from other files: # Path: torext/utils.py # class SingletonMixin(object): # """Globally hold one instance class # # Usage: # >>> class SpecObject(SingletonMixin): # ... pass # # >>> ins = SpecObject.instance() # """ # @classmethod # def instance(cls, *args, **kwgs): # """Will be the only instance""" # if not hasattr(cls, "_instance"): # cls._instance = cls(*args, **kwgs) # return cls._instance # # Path: torext/errors.py # class SettingsError(TorextException): # pass # # Path: torext/log.py # def set_loggers(loggers): # for name, config in loggers.items(): # set_logger(name, **config) , which may include functions, classes, or code. Output only the next line.
self.add_key_callback('LOGGERS', set_loggers)
Given snippet: <|code_start|>#!/usr/bin/env python # -*- coding: utf-8 -*- class ModuleSearcher(object): def __init__(self, label): assert settings['PROJECT'], 'you must set PROJECT first' self.import_path = settings['PROJECT'] + '.' + label self._handlers = [] def get_handlers(self): module = __import__(self.import_path, fromlist=[settings['PROJECT']]) try: self._handlers = getattr(module, 'handlers') except AttributeError as e: # TODO enhanced traceback <|code_end|> , continue by predicting the next line. Consider current file imports: from torext import settings from torext.errors import URLRouteError from torext.log import app_log and context: # Path: torext/make_settings.py # class Settings(dict, SingletonMixin): # def __init__(self): # def __getattr__(self, key): # def __getitem__(self, key): # def __setitem__(self, key, value): # def __str__(self): # def add_key_callback(self, key, callback): # # Path: torext/errors.py # class URLRouteError(TorextException): # """error in router""" # # Path: torext/log.py # def _color(lvl): # def __init__(self, # fmt='%(color)s[%(fixed_levelname)s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s', # datefmt='%Y-%m-%d %H:%M:%S', # color=False, # tab=' '): # def _format_record(self, record): # def format(self, record): # def __init__(self, *args, **kwgs): # def set_logger(name, # level='INFO', # fmt=None, # datefmt=None, # propagate=1, # remove_handlers=False): # def set_loggers(loggers): # def set_nose_formatter(logging_options): # def test_all(): # FIXED_LEVELNAMES = { # 'DEBUG': 'DEBG', # 'WARNING': 'WARN', # 'ERROR': 'ERRO' # } # HANDLER_TYPES = { # 'stream': BaseStreamHandler, # } # class BaseFormatter(logging.Formatter): # class BaseStreamHandler(logging.StreamHandler): which might include code, classes, or functions. Output only the next line.
raise URLRouteError('Caught error when router was getting handlers from module: %s' % e)
Given snippet: <|code_start|>#!/usr/bin/env python # -*- coding: utf-8 -*- class ModuleSearcher(object): def __init__(self, label): assert settings['PROJECT'], 'you must set PROJECT first' self.import_path = settings['PROJECT'] + '.' + label self._handlers = [] def get_handlers(self): module = __import__(self.import_path, fromlist=[settings['PROJECT']]) try: self._handlers = getattr(module, 'handlers') except AttributeError as e: # TODO enhanced traceback raise URLRouteError('Caught error when router was getting handlers from module: %s' % e) <|code_end|> , continue by predicting the next line. Consider current file imports: from torext import settings from torext.errors import URLRouteError from torext.log import app_log and context: # Path: torext/make_settings.py # class Settings(dict, SingletonMixin): # def __init__(self): # def __getattr__(self, key): # def __getitem__(self, key): # def __setitem__(self, key, value): # def __str__(self): # def add_key_callback(self, key, callback): # # Path: torext/errors.py # class URLRouteError(TorextException): # """error in router""" # # Path: torext/log.py # def _color(lvl): # def __init__(self, # fmt='%(color)s[%(fixed_levelname)s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s', # datefmt='%Y-%m-%d %H:%M:%S', # color=False, # tab=' '): # def _format_record(self, record): # def format(self, record): # def __init__(self, *args, **kwgs): # def set_logger(name, # level='INFO', # fmt=None, # datefmt=None, # propagate=1, # remove_handlers=False): # def set_loggers(loggers): # def set_nose_formatter(logging_options): # def test_all(): # FIXED_LEVELNAMES = { # 'DEBUG': 'DEBG', # 'WARNING': 'WARN', # 'ERROR': 'ERRO' # } # HANDLER_TYPES = { # 'stream': BaseStreamHandler, # } # class BaseFormatter(logging.Formatter): # class BaseStreamHandler(logging.StreamHandler): which might include code, classes, or functions. Output only the next line.
app_log.debug('got handlers from module %s' % self.import_path)
Predict the next line for this snippet: <|code_start|>#!/usr/bin/env python # -*- coding: utf-8 -*- def test_logging(): msgs = [ '中文 utf8', '始める utf8', ] if PY2: msgs += [ u'中文 gbk'.encode('gbk'), u'中文 unicode', u'始める shift_jis'.encode('shift_jis'), u'始める unicode', ] for i in msgs: yield do_logging, i def logging_setup(): <|code_end|> with the help of current file imports: import logging from torext.log import set_logger from nose.tools import with_setup from torext.compat import PY2 and context from other files: # Path: torext/log.py # def set_logger(name, # level='INFO', # fmt=None, # datefmt=None, # propagate=1, # remove_handlers=False): # """ # This function will clear the previous handlers and set only one handler, # which will only be StreamHandler for the logger. # # This function is designed to be able to called multiple times in a context. # # Note that if a logger has no handlers, it will be added a handler automatically when it is used. # """ # logger = logging.getLogger(name) # logger.setLevel(getattr(logging, level)) # logger.propagate = propagate # # if remove_handlers: # logger.handlers = [] # return # # handler = None # for h in logger.handlers: # if isinstance(h, logging.StreamHandler): # # use existing instead of clean and create # handler = h # break # if not handler: # handler = logging.StreamHandler() # logger.addHandler(handler) # # formatter_kwgs = {} # for i in ('fmt', 'datefmt'): # if locals()[i] is not None: # formatter_kwgs[i] = locals()[i] # handler.setFormatter(BaseFormatter(**formatter_kwgs)) # # Path: torext/compat.py # PY2 = sys.version_info.major == 2 , which may contain function names, class names, or code. Output only the next line.
set_logger('')
Using the snippet: <|code_start|>#!/usr/bin/env python # -*- coding: utf-8 -*- def test_logging(): msgs = [ '中文 utf8', '始める utf8', ] <|code_end|> , determine the next line of code. You have imports: import logging from torext.log import set_logger from nose.tools import with_setup from torext.compat import PY2 and context (class names, function names, or code) available: # Path: torext/log.py # def set_logger(name, # level='INFO', # fmt=None, # datefmt=None, # propagate=1, # remove_handlers=False): # """ # This function will clear the previous handlers and set only one handler, # which will only be StreamHandler for the logger. # # This function is designed to be able to called multiple times in a context. # # Note that if a logger has no handlers, it will be added a handler automatically when it is used. # """ # logger = logging.getLogger(name) # logger.setLevel(getattr(logging, level)) # logger.propagate = propagate # # if remove_handlers: # logger.handlers = [] # return # # handler = None # for h in logger.handlers: # if isinstance(h, logging.StreamHandler): # # use existing instead of clean and create # handler = h # break # if not handler: # handler = logging.StreamHandler() # logger.addHandler(handler) # # formatter_kwgs = {} # for i in ('fmt', 'datefmt'): # if locals()[i] is not None: # formatter_kwgs[i] = locals()[i] # handler.setFormatter(BaseFormatter(**formatter_kwgs)) # # Path: torext/compat.py # PY2 = sys.version_info.major == 2 . Output only the next line.
if PY2:
Given snippet: <|code_start|> self.has_varargs = bool(spec.varargs) self.has_kwargs = bool(spec.keywords) if func.__doc__: doc = func.__doc__ if '\n' in doc: doc = ' '.join(i.strip() for i in doc.split('\n')) else: doc = "Command '%s' in manage script" % func.__name__ self.doc = doc def parse_args(self, all_args=None): if all_args is not None: all_args = all_args[:] else: all_args = sys.argv[2:] _kw_pos = [] for loop, i in enumerate(all_args): if i.startswith('--'): _kw_pos.append(loop) if _kw_pos: # Check positions all_args_len = len(all_args) _fixed_kw_pos = _kw_pos + [all_args_len] for loop, i in enumerate(_fixed_kw_pos): if i == all_args_len: continue if _fixed_kw_pos[loop + 1] - i != 2: <|code_end|> , continue by predicting the next line. Consider current file imports: import sys import inspect import functools import time from .errors import CommandArgumentError and context: # Path: torext/errors.py # class CommandArgumentError(TorextException): # pass which might include code, classes, or functions. Output only the next line.
raise CommandArgumentError(
Predict the next line after this snippet: <|code_start|> """ return log in unicode """ self._format_record(record) record_dict = {} for k, v in record.__dict__.items(): if isinstance(k, str): k = decode_(k, 'utf8') if isinstance(v, str): v = decode_(v, 'utf8', 'replace') record_dict[k] = v if 'color' in self.fmt or 'end_color' in self.fmt: record_dict['color'], record_dict['end_color'] = _color(record.levelno) log = self.ufmt % record_dict if record.exc_text: if log[-1:] != '\n': log += '\n' log += decode_(record.exc_text, 'utf8', 'replace') log = log.replace('\n', '\n' + self.tab) return log class BaseStreamHandler(logging.StreamHandler): def __init__(self, *args, **kwgs): <|code_end|> using the current file's imports: import sys import logging import curses from torext.utils import split_kwargs from torext.compat import unicode_ as u_, decode_ from nose.plugins.logcapture import MyMemoryHandler and any relevant context from other files: # Path: torext/utils.py # def split_kwargs(kwgs_tuple, kwgs): # _kwgs = {} # for i in kwgs_tuple: # if i in kwgs: # _kwgs[i] = kwgs.pop(i) # return _kwgs # # Path: torext/compat.py # def unicode_(s, *args): # return unicode(s, *args) # # def decode_(s, *args): # return s.decode(*args) . Output only the next line.
_kwgs = split_kwargs(
Predict the next line after this snippet: <|code_start|>#!/usr/bin/env python # -*- coding: utf-8 -*- try: except ImportError: MyMemoryHandler = None root_logger = logging.getLogger() app_log = logging.getLogger('torext.app') request_log = logging.getLogger('torext.request') # borrow from tornado.options._LogFormatter.__init__ def _color(lvl): try: except ImportError: curses = None color = False if curses and sys.stderr.isatty(): try: curses.setupterm() if curses.tigetnum("colors") > 0: color = True except: pass if not color: <|code_end|> using the current file's imports: import sys import logging import curses from torext.utils import split_kwargs from torext.compat import unicode_ as u_, decode_ from nose.plugins.logcapture import MyMemoryHandler and any relevant context from other files: # Path: torext/utils.py # def split_kwargs(kwgs_tuple, kwgs): # _kwgs = {} # for i in kwgs_tuple: # if i in kwgs: # _kwgs[i] = kwgs.pop(i) # return _kwgs # # Path: torext/compat.py # def unicode_(s, *args): # return unicode(s, *args) # # def decode_(s, *args): # return s.decode(*args) . Output only the next line.
return u_(''), u_('')
Given the code snippet: <|code_start|> # when oauth-parameters is generated, `all_args` contain `args` and `post_args` url = "http://api.twitter.com/1" + path + ".json" if access_token: all_args = {} all_args.update(args) all_args.update(post_args or {}) method = "POST" if post_args is not None else "GET" oauth = self._oauth_request_parameters( url, access_token, all_args, method=method) args.update(oauth) if args: url += "?" + urlencode(args) callback = self.async_callback(self._on_twitter_request, callback) http = httpclient.AsyncHTTPClient() if post_args is not None: http.fetch(url, method="POST", body=urlencode(post_args), callback=callback) else: http.fetch(url, callback=callback) def _on_twitter_request(self, callback, response): if response.error: logging.warning("Error response %s fetching %s", response.error, response.request.url) callback(None) return callback(escape.json_decode(response.body)) def _oauth_consumer_token(self): return dict( <|code_end|> , generate the next line using the imports in this file: import logging import hashlib import requests import time from tornado import escape from tornado import httpclient from tornado import gen from tornado.escape import json_decode from tornado.httputil import url_concat from tornado.auth import OAuthMixin, OAuth2Mixin from tornado.util import bytes_type from torext import settings from torext.compat import urlencode, quote, urljoin and context (functions, classes, or occasionally code) from other files: # Path: torext/make_settings.py # class Settings(dict, SingletonMixin): # def __init__(self): # def __getattr__(self, key): # def __getitem__(self, key): # def __setitem__(self, key, value): # def __str__(self): # def add_key_callback(self, key, callback): # # Path: torext/compat.py # PY3 = sys.version_info.major == 3 # PY2 = sys.version_info.major == 2 # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): . Output only the next line.
key=settings['TWITTER']['consumer_key'],
Here is a snippet: <|code_start|> _OAUTH_REQUEST_TOKEN_URL = "http://api.twitter.com/oauth/request_token" _OAUTH_ACCESS_TOKEN_URL = "http://api.twitter.com/oauth/access_token" _OAUTH_AUTHORIZE_URL = "http://api.twitter.com/oauth/authorize" _OAUTH_AUTHENTICATE_URL = "http://api.twitter.com/oauth/authenticate" def authenticate_redirect(self): http = httpclient.AsyncHTTPClient() http.fetch(self._oauth_request_token_url(), self.async_callback( self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None)) def twitter_request(self, path, callback, access_token=None, post_args=None, **args): # Add the OAuth resource request signature if we have credentials # NOTE varibles:: # :url used to send request, and bear encoded `args`. # :args keyword-arguments that additionaly added to oauth parameters, # lay on `url`. # :post_args use to judge request method, must be passed as post-data # :all_args as every argument in request take activity # when oauth-parameters is generated, `all_args` contain `args` and `post_args` url = "http://api.twitter.com/1" + path + ".json" if access_token: all_args = {} all_args.update(args) all_args.update(post_args or {}) method = "POST" if post_args is not None else "GET" oauth = self._oauth_request_parameters( url, access_token, all_args, method=method) args.update(oauth) if args: <|code_end|> . Write the next line using the current file imports: import logging import hashlib import requests import time from tornado import escape from tornado import httpclient from tornado import gen from tornado.escape import json_decode from tornado.httputil import url_concat from tornado.auth import OAuthMixin, OAuth2Mixin from tornado.util import bytes_type from torext import settings from torext.compat import urlencode, quote, urljoin and context from other files: # Path: torext/make_settings.py # class Settings(dict, SingletonMixin): # def __init__(self): # def __getattr__(self, key): # def __getitem__(self, key): # def __setitem__(self, key, value): # def __str__(self): # def add_key_callback(self, key, callback): # # Path: torext/compat.py # PY3 = sys.version_info.major == 3 # PY2 = sys.version_info.major == 2 # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): , which may include functions, classes, or code. Output only the next line.
url += "?" + urlencode(args)
Here is a snippet: <|code_start|> def _oauth_consumer_token(self): return dict(key=settings.networks['weibo']['consumer_key'], secret=settings.networks['weibo']['consumer_secret']) def _oauth_get_user(self, access_token, callback): callback = self.async_callback(self._parse_user_response, callback) self.weibo_request( "/users/show", access_token=access_token, callback=callback, user_id=access_token["user_id"]) def _parse_user_response(self, callback, user): if user: user["username"] = user["screen_name"] callback(user) class DoubanOAuthMixin(OAuthMixin): _OAUTH_REQUEST_TOKEN_URL = "http://www.douban.com/service/auth/request_token" _OAUTH_ACCESS_TOKEN_URL = "http://www.douban.com/service/auth/access_token" _OAUTH_AUTHORIZE_URL = "http://www.douban.com/service/auth/authorize" _OAUTH_API_DOMAIN = "api.douban.com" _OAUTH_VERSION = "1.0" def douban_request(self, path, callback, access_token=None, post_args=None, **args): # due to some special string like ``@`` may appear in url, # and they are required to be quoted before generated to be oauth parameters, # (unfortunately tornado don't voluntarily do that) # we forwardly quote the url before it is handled. <|code_end|> . Write the next line using the current file imports: import logging import hashlib import requests import time from tornado import escape from tornado import httpclient from tornado import gen from tornado.escape import json_decode from tornado.httputil import url_concat from tornado.auth import OAuthMixin, OAuth2Mixin from tornado.util import bytes_type from torext import settings from torext.compat import urlencode, quote, urljoin and context from other files: # Path: torext/make_settings.py # class Settings(dict, SingletonMixin): # def __init__(self): # def __getattr__(self, key): # def __getitem__(self, key): # def __setitem__(self, key, value): # def __str__(self): # def add_key_callback(self, key, callback): # # Path: torext/compat.py # PY3 = sys.version_info.major == 3 # PY2 = sys.version_info.major == 2 # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): , which may include functions, classes, or code. Output only the next line.
url = quote("http://" + self._OAUTH_API_DOMAIN + path, ':/')
Using the snippet: <|code_start|> if response.error and not response.body: logging.warning("Error response %s fetching %s", response.error, response.request.url) callback(None) return callback(response) return def _oauth_consumer_token(self): return dict(key=settings.networks['renren']['consumer_key'], secret=settings.networks['renren']['consumer_secret']) ########## # others # ########## class FacebookAuthMixin(object): def authenticate_redirect(self, callback_uri=None, cancel_uri=None, extended_permissions=None): """Authenticates/installs this app for the current user.""" self.require_setting("facebook_api_key", "Facebook Connect") callback_uri = callback_uri or self.request.uri args = { "api_key": self.settings["facebook_api_key"], "v": "1.0", "fbconnect": "true", "display": "page", <|code_end|> , determine the next line of code. You have imports: import logging import hashlib import requests import time from tornado import escape from tornado import httpclient from tornado import gen from tornado.escape import json_decode from tornado.httputil import url_concat from tornado.auth import OAuthMixin, OAuth2Mixin from tornado.util import bytes_type from torext import settings from torext.compat import urlencode, quote, urljoin and context (class names, function names, or code) available: # Path: torext/make_settings.py # class Settings(dict, SingletonMixin): # def __init__(self): # def __getattr__(self, key): # def __getitem__(self, key): # def __setitem__(self, key, value): # def __str__(self): # def add_key_callback(self, key, callback): # # Path: torext/compat.py # PY3 = sys.version_info.major == 3 # PY2 = sys.version_info.major == 2 # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): # def unicode_(s, *args): # def decode_(s, *args): # def bytes_(s): # def str_(s): . Output only the next line.
"next": urljoin(self.request.full_url(), callback_uri),