Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Given the code snippet: <|code_start|> "type": "Pig",
"input_datasource": {
"type": "swift",
"source": "sahara_tests/scenario/defaults/"
"edp-examples/edp-pig/"
"top-todoers/data/input"
},
"output_datasource": {
"type": "hdfs",
"destination": "/user/hadoop/edp-output"
},
"main_lib": {
"type": "swift",
"source": "sahara_tests/scenario/defaults/"
"edp-examples/edp-pig/"
"top-todoers/example.pig"
}
}
]
}
}
@mock.patch('sahara_tests.scenario.utils.read_scenario_config')
def test_generate_config_feature(self, m_readscenarioconfig):
"""Check the generate_config method when features are specified."""
m_readscenarioconfig.return_value = self._cluster_config
# "template_variables" can be empty because read_scenario_config,
# which is its only users, is a mock variable.
# "files" is used to loop over the read keys, so at one fake
# file name is needed.
<|code_end|>
, generate the next line using the imports in this file:
import os
import testtools
from unittest import mock
from sahara_tests.scenario import utils
and context (functions, classes, or occasionally code) from other files:
# Path: sahara_tests/scenario/utils.py
# SCENARIO_RESOURCES_DIR = pkg.resource_filename(version.version_info.package,
# 'scenario')
# TEST_TEMPLATE_DIR = os.path.join(SCENARIO_RESOURCES_DIR, 'defaults/')
# DEFAULT_TEMPLATE_VARS = [os.path.join(TEST_TEMPLATE_DIR,
# 'credentials.yaml.mako'),
# os.path.join(TEST_TEMPLATE_DIR,
# 'edp.yaml.mako')]
# TEST_TEMPLATE_PATH = os.path.join(SCENARIO_RESOURCES_DIR,
# 'testcase.py.mako')
# DEFAULT_STESTR_CONF = os.path.join(SCENARIO_RESOURCES_DIR, 'stestr.conf')
# def rand_name(name=''):
# def run_tests(concurrency, test_dir_path):
# def create_testcase_file(testcases, credentials, network, report,
# use_api_v2=False):
# def get_templates_variables(files, variable_file, verbose_run, scenario_args,
# auth_values):
# def generate_config(files, template_variables, auth_values, verbose_run,
# features_list=None):
# def get_default_templates(plugin, version, release, scenario_arguments,
# features=None):
# def get_auth_values(cloud_config, args):
# def _merge_dicts_sections(dict_with_section, dict_for_merge, section):
# def is_template_file(config_file):
# def read_template_variables(variable_file=None, verbose=False,
# scenario_args=None, auth_values=None):
# def read_scenario_config(scenario_config, template_vars=None,
# verbose=False):
. Output only the next line. | result = utils.generate_config(['dummyfile.yaml'], None, |
Predict the next line after this snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
TEMPEST_CONF = config.CONF
NODE_GROUP_TEMPLATE = 'node group template'
class Scenario(images.SaharaImageCLITest,
node_group_templates.SaharaNodeGroupCLITest,
<|code_end|>
using the current file's imports:
import testtools
from tempest import config
from tempest.lib import decorators
from sahara_tempest_plugin.tests.cli import clusters
from sahara_tempest_plugin.tests.cli import cluster_templates
from sahara_tempest_plugin.tests.cli import images
from sahara_tempest_plugin.tests.cli import node_group_templates
from sahara_tempest_plugin.tests.cli import plugins
from sahara_tempest_plugin.tests.cli import job_binaries
from sahara_tempest_plugin.tests.cli import jobs
from sahara_tempest_plugin.tests.cli import job_templates
from sahara_tempest_plugin.tests.cli import data_sources
from sahara_tempest_plugin.tests.cli import job_types
and any relevant context from other files:
# Path: sahara_tempest_plugin/tests/cli/cluster_templates.py
# class SaharaClusterTemplateCLITest(base.ClientTestBase):
# def openstack_cluster_template_list(self):
# def openstack_cluster_template_create(self, ng_master, ng_worker):
# def openstack_cluster_template_show(self, cluster_template_name):
# def openstack_cluster_template_update(self, cluster_template_name):
# def openstack_cluster_template_delete(self, cluster_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/images.py
# class SaharaImageCLITest(base.ClientTestBase):
# def openstack_image_list(self):
# def openstack_image_register(self, name_to_register, username):
# def openstack_image_show(self, image_name):
# def openstack_image_tags_add(self, image_name):
# def openstack_image_tags_set(self, image_name):
# def openstack_image_tags_remove(self, image_name):
# def openstack_image_unregister(self, image_name):
# def negative_unregister_not_existing_image(self, image_name):
#
# Path: sahara_tempest_plugin/tests/cli/plugins.py
# class SaharaPluginCLITest(base.ClientTestBase):
# def openstack_plugin_list(self):
# def openstack_plugin_show(self):
# def openstack_plugin_configs_get(self):
# def openstack_plugin_update(self):
# def _update_with_json_file(self, update_dict):
#
# Path: sahara_tempest_plugin/tests/cli/job_binaries.py
# class SaharaJobBinaryCLITest(base.ClientTestBase):
# def openstack_job_binary_list(self):
# def openstack_job_binary_create(self, job_internal=True):
# def openstack_job_binary_download(self, job_binary_name,
# original_file=None):
# def openstack_job_binary_show(self, job_binary_name):
# def openstack_job_binary_update(self, job_binary_name, flag=None):
# def openstack_job_binary_delete(self, job_binary_name):
# def negative_delete_removed_job_binary(self, job_binary_name):
# def negative_try_to_update_protected_jb(self, job_binary_name):
# def filter_job_binaries_in_list(self):
#
# Path: sahara_tempest_plugin/tests/cli/jobs.py
# DELETE_RES = '''\
# job "%s" deletion has been started.
# '''
# class SaharaJobCLITest(base.ClientTestBase):
# def openstack_job_list(self):
# def openstack_job_execute(self, cluster_name, job_template_name, input,
# output):
# def openstack_job_show(self, job_id):
# def openstack_job_update(self, job_id):
# def openstack_job_delete(self, job_id):
#
# Path: sahara_tempest_plugin/tests/cli/job_templates.py
# class SaharaJobTemplateCLITest(base.ClientTestBase):
# def openstack_job_template_list(self):
# def openstack_job_template_create(self, job_binary_name):
# def openstack_job_template_show(self, job_template_name):
# def openstack_job_template_update(self, job_template_name):
# def openstack_job_template_delete(self, job_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/data_sources.py
# class SaharaDataSourceCLITest(base.ClientTestBase):
# def openstack_data_source_list(self):
# def openstack_data_source_create(self):
# def openstack_data_source_show(self, data_source_name):
# def openstack_data_source_update(self, data_source_name):
# def openstack_data_source_delete(self, data_source_name):
. Output only the next line. | cluster_templates.SaharaClusterTemplateCLITest, |
Using the snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
TEMPEST_CONF = config.CONF
NODE_GROUP_TEMPLATE = 'node group template'
class Scenario(images.SaharaImageCLITest,
node_group_templates.SaharaNodeGroupCLITest,
cluster_templates.SaharaClusterTemplateCLITest,
clusters.SaharaClusterCLITest,
<|code_end|>
, determine the next line of code. You have imports:
import testtools
from tempest import config
from tempest.lib import decorators
from sahara_tempest_plugin.tests.cli import clusters
from sahara_tempest_plugin.tests.cli import cluster_templates
from sahara_tempest_plugin.tests.cli import images
from sahara_tempest_plugin.tests.cli import node_group_templates
from sahara_tempest_plugin.tests.cli import plugins
from sahara_tempest_plugin.tests.cli import job_binaries
from sahara_tempest_plugin.tests.cli import jobs
from sahara_tempest_plugin.tests.cli import job_templates
from sahara_tempest_plugin.tests.cli import data_sources
from sahara_tempest_plugin.tests.cli import job_types
and context (class names, function names, or code) available:
# Path: sahara_tempest_plugin/tests/cli/cluster_templates.py
# class SaharaClusterTemplateCLITest(base.ClientTestBase):
# def openstack_cluster_template_list(self):
# def openstack_cluster_template_create(self, ng_master, ng_worker):
# def openstack_cluster_template_show(self, cluster_template_name):
# def openstack_cluster_template_update(self, cluster_template_name):
# def openstack_cluster_template_delete(self, cluster_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/images.py
# class SaharaImageCLITest(base.ClientTestBase):
# def openstack_image_list(self):
# def openstack_image_register(self, name_to_register, username):
# def openstack_image_show(self, image_name):
# def openstack_image_tags_add(self, image_name):
# def openstack_image_tags_set(self, image_name):
# def openstack_image_tags_remove(self, image_name):
# def openstack_image_unregister(self, image_name):
# def negative_unregister_not_existing_image(self, image_name):
#
# Path: sahara_tempest_plugin/tests/cli/plugins.py
# class SaharaPluginCLITest(base.ClientTestBase):
# def openstack_plugin_list(self):
# def openstack_plugin_show(self):
# def openstack_plugin_configs_get(self):
# def openstack_plugin_update(self):
# def _update_with_json_file(self, update_dict):
#
# Path: sahara_tempest_plugin/tests/cli/job_binaries.py
# class SaharaJobBinaryCLITest(base.ClientTestBase):
# def openstack_job_binary_list(self):
# def openstack_job_binary_create(self, job_internal=True):
# def openstack_job_binary_download(self, job_binary_name,
# original_file=None):
# def openstack_job_binary_show(self, job_binary_name):
# def openstack_job_binary_update(self, job_binary_name, flag=None):
# def openstack_job_binary_delete(self, job_binary_name):
# def negative_delete_removed_job_binary(self, job_binary_name):
# def negative_try_to_update_protected_jb(self, job_binary_name):
# def filter_job_binaries_in_list(self):
#
# Path: sahara_tempest_plugin/tests/cli/jobs.py
# DELETE_RES = '''\
# job "%s" deletion has been started.
# '''
# class SaharaJobCLITest(base.ClientTestBase):
# def openstack_job_list(self):
# def openstack_job_execute(self, cluster_name, job_template_name, input,
# output):
# def openstack_job_show(self, job_id):
# def openstack_job_update(self, job_id):
# def openstack_job_delete(self, job_id):
#
# Path: sahara_tempest_plugin/tests/cli/job_templates.py
# class SaharaJobTemplateCLITest(base.ClientTestBase):
# def openstack_job_template_list(self):
# def openstack_job_template_create(self, job_binary_name):
# def openstack_job_template_show(self, job_template_name):
# def openstack_job_template_update(self, job_template_name):
# def openstack_job_template_delete(self, job_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/data_sources.py
# class SaharaDataSourceCLITest(base.ClientTestBase):
# def openstack_data_source_list(self):
# def openstack_data_source_create(self):
# def openstack_data_source_show(self, data_source_name):
# def openstack_data_source_update(self, data_source_name):
# def openstack_data_source_delete(self, data_source_name):
. Output only the next line. | plugins.SaharaPluginCLITest, |
Given the code snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
TEMPEST_CONF = config.CONF
NODE_GROUP_TEMPLATE = 'node group template'
class Scenario(images.SaharaImageCLITest,
node_group_templates.SaharaNodeGroupCLITest,
cluster_templates.SaharaClusterTemplateCLITest,
clusters.SaharaClusterCLITest,
plugins.SaharaPluginCLITest,
<|code_end|>
, generate the next line using the imports in this file:
import testtools
from tempest import config
from tempest.lib import decorators
from sahara_tempest_plugin.tests.cli import clusters
from sahara_tempest_plugin.tests.cli import cluster_templates
from sahara_tempest_plugin.tests.cli import images
from sahara_tempest_plugin.tests.cli import node_group_templates
from sahara_tempest_plugin.tests.cli import plugins
from sahara_tempest_plugin.tests.cli import job_binaries
from sahara_tempest_plugin.tests.cli import jobs
from sahara_tempest_plugin.tests.cli import job_templates
from sahara_tempest_plugin.tests.cli import data_sources
from sahara_tempest_plugin.tests.cli import job_types
and context (functions, classes, or occasionally code) from other files:
# Path: sahara_tempest_plugin/tests/cli/cluster_templates.py
# class SaharaClusterTemplateCLITest(base.ClientTestBase):
# def openstack_cluster_template_list(self):
# def openstack_cluster_template_create(self, ng_master, ng_worker):
# def openstack_cluster_template_show(self, cluster_template_name):
# def openstack_cluster_template_update(self, cluster_template_name):
# def openstack_cluster_template_delete(self, cluster_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/images.py
# class SaharaImageCLITest(base.ClientTestBase):
# def openstack_image_list(self):
# def openstack_image_register(self, name_to_register, username):
# def openstack_image_show(self, image_name):
# def openstack_image_tags_add(self, image_name):
# def openstack_image_tags_set(self, image_name):
# def openstack_image_tags_remove(self, image_name):
# def openstack_image_unregister(self, image_name):
# def negative_unregister_not_existing_image(self, image_name):
#
# Path: sahara_tempest_plugin/tests/cli/plugins.py
# class SaharaPluginCLITest(base.ClientTestBase):
# def openstack_plugin_list(self):
# def openstack_plugin_show(self):
# def openstack_plugin_configs_get(self):
# def openstack_plugin_update(self):
# def _update_with_json_file(self, update_dict):
#
# Path: sahara_tempest_plugin/tests/cli/job_binaries.py
# class SaharaJobBinaryCLITest(base.ClientTestBase):
# def openstack_job_binary_list(self):
# def openstack_job_binary_create(self, job_internal=True):
# def openstack_job_binary_download(self, job_binary_name,
# original_file=None):
# def openstack_job_binary_show(self, job_binary_name):
# def openstack_job_binary_update(self, job_binary_name, flag=None):
# def openstack_job_binary_delete(self, job_binary_name):
# def negative_delete_removed_job_binary(self, job_binary_name):
# def negative_try_to_update_protected_jb(self, job_binary_name):
# def filter_job_binaries_in_list(self):
#
# Path: sahara_tempest_plugin/tests/cli/jobs.py
# DELETE_RES = '''\
# job "%s" deletion has been started.
# '''
# class SaharaJobCLITest(base.ClientTestBase):
# def openstack_job_list(self):
# def openstack_job_execute(self, cluster_name, job_template_name, input,
# output):
# def openstack_job_show(self, job_id):
# def openstack_job_update(self, job_id):
# def openstack_job_delete(self, job_id):
#
# Path: sahara_tempest_plugin/tests/cli/job_templates.py
# class SaharaJobTemplateCLITest(base.ClientTestBase):
# def openstack_job_template_list(self):
# def openstack_job_template_create(self, job_binary_name):
# def openstack_job_template_show(self, job_template_name):
# def openstack_job_template_update(self, job_template_name):
# def openstack_job_template_delete(self, job_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/data_sources.py
# class SaharaDataSourceCLITest(base.ClientTestBase):
# def openstack_data_source_list(self):
# def openstack_data_source_create(self):
# def openstack_data_source_show(self, data_source_name):
# def openstack_data_source_update(self, data_source_name):
# def openstack_data_source_delete(self, data_source_name):
. Output only the next line. | job_binaries.SaharaJobBinaryCLITest, |
Given the code snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
TEMPEST_CONF = config.CONF
NODE_GROUP_TEMPLATE = 'node group template'
class Scenario(images.SaharaImageCLITest,
node_group_templates.SaharaNodeGroupCLITest,
cluster_templates.SaharaClusterTemplateCLITest,
clusters.SaharaClusterCLITest,
plugins.SaharaPluginCLITest,
job_binaries.SaharaJobBinaryCLITest,
<|code_end|>
, generate the next line using the imports in this file:
import testtools
from tempest import config
from tempest.lib import decorators
from sahara_tempest_plugin.tests.cli import clusters
from sahara_tempest_plugin.tests.cli import cluster_templates
from sahara_tempest_plugin.tests.cli import images
from sahara_tempest_plugin.tests.cli import node_group_templates
from sahara_tempest_plugin.tests.cli import plugins
from sahara_tempest_plugin.tests.cli import job_binaries
from sahara_tempest_plugin.tests.cli import jobs
from sahara_tempest_plugin.tests.cli import job_templates
from sahara_tempest_plugin.tests.cli import data_sources
from sahara_tempest_plugin.tests.cli import job_types
and context (functions, classes, or occasionally code) from other files:
# Path: sahara_tempest_plugin/tests/cli/cluster_templates.py
# class SaharaClusterTemplateCLITest(base.ClientTestBase):
# def openstack_cluster_template_list(self):
# def openstack_cluster_template_create(self, ng_master, ng_worker):
# def openstack_cluster_template_show(self, cluster_template_name):
# def openstack_cluster_template_update(self, cluster_template_name):
# def openstack_cluster_template_delete(self, cluster_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/images.py
# class SaharaImageCLITest(base.ClientTestBase):
# def openstack_image_list(self):
# def openstack_image_register(self, name_to_register, username):
# def openstack_image_show(self, image_name):
# def openstack_image_tags_add(self, image_name):
# def openstack_image_tags_set(self, image_name):
# def openstack_image_tags_remove(self, image_name):
# def openstack_image_unregister(self, image_name):
# def negative_unregister_not_existing_image(self, image_name):
#
# Path: sahara_tempest_plugin/tests/cli/plugins.py
# class SaharaPluginCLITest(base.ClientTestBase):
# def openstack_plugin_list(self):
# def openstack_plugin_show(self):
# def openstack_plugin_configs_get(self):
# def openstack_plugin_update(self):
# def _update_with_json_file(self, update_dict):
#
# Path: sahara_tempest_plugin/tests/cli/job_binaries.py
# class SaharaJobBinaryCLITest(base.ClientTestBase):
# def openstack_job_binary_list(self):
# def openstack_job_binary_create(self, job_internal=True):
# def openstack_job_binary_download(self, job_binary_name,
# original_file=None):
# def openstack_job_binary_show(self, job_binary_name):
# def openstack_job_binary_update(self, job_binary_name, flag=None):
# def openstack_job_binary_delete(self, job_binary_name):
# def negative_delete_removed_job_binary(self, job_binary_name):
# def negative_try_to_update_protected_jb(self, job_binary_name):
# def filter_job_binaries_in_list(self):
#
# Path: sahara_tempest_plugin/tests/cli/jobs.py
# DELETE_RES = '''\
# job "%s" deletion has been started.
# '''
# class SaharaJobCLITest(base.ClientTestBase):
# def openstack_job_list(self):
# def openstack_job_execute(self, cluster_name, job_template_name, input,
# output):
# def openstack_job_show(self, job_id):
# def openstack_job_update(self, job_id):
# def openstack_job_delete(self, job_id):
#
# Path: sahara_tempest_plugin/tests/cli/job_templates.py
# class SaharaJobTemplateCLITest(base.ClientTestBase):
# def openstack_job_template_list(self):
# def openstack_job_template_create(self, job_binary_name):
# def openstack_job_template_show(self, job_template_name):
# def openstack_job_template_update(self, job_template_name):
# def openstack_job_template_delete(self, job_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/data_sources.py
# class SaharaDataSourceCLITest(base.ClientTestBase):
# def openstack_data_source_list(self):
# def openstack_data_source_create(self):
# def openstack_data_source_show(self, data_source_name):
# def openstack_data_source_update(self, data_source_name):
# def openstack_data_source_delete(self, data_source_name):
. Output only the next line. | jobs.SaharaJobCLITest, |
Continue the code snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
TEMPEST_CONF = config.CONF
NODE_GROUP_TEMPLATE = 'node group template'
class Scenario(images.SaharaImageCLITest,
node_group_templates.SaharaNodeGroupCLITest,
cluster_templates.SaharaClusterTemplateCLITest,
clusters.SaharaClusterCLITest,
plugins.SaharaPluginCLITest,
job_binaries.SaharaJobBinaryCLITest,
jobs.SaharaJobCLITest,
<|code_end|>
. Use current file imports:
import testtools
from tempest import config
from tempest.lib import decorators
from sahara_tempest_plugin.tests.cli import clusters
from sahara_tempest_plugin.tests.cli import cluster_templates
from sahara_tempest_plugin.tests.cli import images
from sahara_tempest_plugin.tests.cli import node_group_templates
from sahara_tempest_plugin.tests.cli import plugins
from sahara_tempest_plugin.tests.cli import job_binaries
from sahara_tempest_plugin.tests.cli import jobs
from sahara_tempest_plugin.tests.cli import job_templates
from sahara_tempest_plugin.tests.cli import data_sources
from sahara_tempest_plugin.tests.cli import job_types
and context (classes, functions, or code) from other files:
# Path: sahara_tempest_plugin/tests/cli/cluster_templates.py
# class SaharaClusterTemplateCLITest(base.ClientTestBase):
# def openstack_cluster_template_list(self):
# def openstack_cluster_template_create(self, ng_master, ng_worker):
# def openstack_cluster_template_show(self, cluster_template_name):
# def openstack_cluster_template_update(self, cluster_template_name):
# def openstack_cluster_template_delete(self, cluster_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/images.py
# class SaharaImageCLITest(base.ClientTestBase):
# def openstack_image_list(self):
# def openstack_image_register(self, name_to_register, username):
# def openstack_image_show(self, image_name):
# def openstack_image_tags_add(self, image_name):
# def openstack_image_tags_set(self, image_name):
# def openstack_image_tags_remove(self, image_name):
# def openstack_image_unregister(self, image_name):
# def negative_unregister_not_existing_image(self, image_name):
#
# Path: sahara_tempest_plugin/tests/cli/plugins.py
# class SaharaPluginCLITest(base.ClientTestBase):
# def openstack_plugin_list(self):
# def openstack_plugin_show(self):
# def openstack_plugin_configs_get(self):
# def openstack_plugin_update(self):
# def _update_with_json_file(self, update_dict):
#
# Path: sahara_tempest_plugin/tests/cli/job_binaries.py
# class SaharaJobBinaryCLITest(base.ClientTestBase):
# def openstack_job_binary_list(self):
# def openstack_job_binary_create(self, job_internal=True):
# def openstack_job_binary_download(self, job_binary_name,
# original_file=None):
# def openstack_job_binary_show(self, job_binary_name):
# def openstack_job_binary_update(self, job_binary_name, flag=None):
# def openstack_job_binary_delete(self, job_binary_name):
# def negative_delete_removed_job_binary(self, job_binary_name):
# def negative_try_to_update_protected_jb(self, job_binary_name):
# def filter_job_binaries_in_list(self):
#
# Path: sahara_tempest_plugin/tests/cli/jobs.py
# DELETE_RES = '''\
# job "%s" deletion has been started.
# '''
# class SaharaJobCLITest(base.ClientTestBase):
# def openstack_job_list(self):
# def openstack_job_execute(self, cluster_name, job_template_name, input,
# output):
# def openstack_job_show(self, job_id):
# def openstack_job_update(self, job_id):
# def openstack_job_delete(self, job_id):
#
# Path: sahara_tempest_plugin/tests/cli/job_templates.py
# class SaharaJobTemplateCLITest(base.ClientTestBase):
# def openstack_job_template_list(self):
# def openstack_job_template_create(self, job_binary_name):
# def openstack_job_template_show(self, job_template_name):
# def openstack_job_template_update(self, job_template_name):
# def openstack_job_template_delete(self, job_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/data_sources.py
# class SaharaDataSourceCLITest(base.ClientTestBase):
# def openstack_data_source_list(self):
# def openstack_data_source_create(self):
# def openstack_data_source_show(self, data_source_name):
# def openstack_data_source_update(self, data_source_name):
# def openstack_data_source_delete(self, data_source_name):
. Output only the next line. | job_templates.SaharaJobTemplateCLITest, |
Predict the next line after this snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
TEMPEST_CONF = config.CONF
NODE_GROUP_TEMPLATE = 'node group template'
class Scenario(images.SaharaImageCLITest,
node_group_templates.SaharaNodeGroupCLITest,
cluster_templates.SaharaClusterTemplateCLITest,
clusters.SaharaClusterCLITest,
plugins.SaharaPluginCLITest,
job_binaries.SaharaJobBinaryCLITest,
jobs.SaharaJobCLITest,
job_templates.SaharaJobTemplateCLITest,
<|code_end|>
using the current file's imports:
import testtools
from tempest import config
from tempest.lib import decorators
from sahara_tempest_plugin.tests.cli import clusters
from sahara_tempest_plugin.tests.cli import cluster_templates
from sahara_tempest_plugin.tests.cli import images
from sahara_tempest_plugin.tests.cli import node_group_templates
from sahara_tempest_plugin.tests.cli import plugins
from sahara_tempest_plugin.tests.cli import job_binaries
from sahara_tempest_plugin.tests.cli import jobs
from sahara_tempest_plugin.tests.cli import job_templates
from sahara_tempest_plugin.tests.cli import data_sources
from sahara_tempest_plugin.tests.cli import job_types
and any relevant context from other files:
# Path: sahara_tempest_plugin/tests/cli/cluster_templates.py
# class SaharaClusterTemplateCLITest(base.ClientTestBase):
# def openstack_cluster_template_list(self):
# def openstack_cluster_template_create(self, ng_master, ng_worker):
# def openstack_cluster_template_show(self, cluster_template_name):
# def openstack_cluster_template_update(self, cluster_template_name):
# def openstack_cluster_template_delete(self, cluster_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/images.py
# class SaharaImageCLITest(base.ClientTestBase):
# def openstack_image_list(self):
# def openstack_image_register(self, name_to_register, username):
# def openstack_image_show(self, image_name):
# def openstack_image_tags_add(self, image_name):
# def openstack_image_tags_set(self, image_name):
# def openstack_image_tags_remove(self, image_name):
# def openstack_image_unregister(self, image_name):
# def negative_unregister_not_existing_image(self, image_name):
#
# Path: sahara_tempest_plugin/tests/cli/plugins.py
# class SaharaPluginCLITest(base.ClientTestBase):
# def openstack_plugin_list(self):
# def openstack_plugin_show(self):
# def openstack_plugin_configs_get(self):
# def openstack_plugin_update(self):
# def _update_with_json_file(self, update_dict):
#
# Path: sahara_tempest_plugin/tests/cli/job_binaries.py
# class SaharaJobBinaryCLITest(base.ClientTestBase):
# def openstack_job_binary_list(self):
# def openstack_job_binary_create(self, job_internal=True):
# def openstack_job_binary_download(self, job_binary_name,
# original_file=None):
# def openstack_job_binary_show(self, job_binary_name):
# def openstack_job_binary_update(self, job_binary_name, flag=None):
# def openstack_job_binary_delete(self, job_binary_name):
# def negative_delete_removed_job_binary(self, job_binary_name):
# def negative_try_to_update_protected_jb(self, job_binary_name):
# def filter_job_binaries_in_list(self):
#
# Path: sahara_tempest_plugin/tests/cli/jobs.py
# DELETE_RES = '''\
# job "%s" deletion has been started.
# '''
# class SaharaJobCLITest(base.ClientTestBase):
# def openstack_job_list(self):
# def openstack_job_execute(self, cluster_name, job_template_name, input,
# output):
# def openstack_job_show(self, job_id):
# def openstack_job_update(self, job_id):
# def openstack_job_delete(self, job_id):
#
# Path: sahara_tempest_plugin/tests/cli/job_templates.py
# class SaharaJobTemplateCLITest(base.ClientTestBase):
# def openstack_job_template_list(self):
# def openstack_job_template_create(self, job_binary_name):
# def openstack_job_template_show(self, job_template_name):
# def openstack_job_template_update(self, job_template_name):
# def openstack_job_template_delete(self, job_template_name):
#
# Path: sahara_tempest_plugin/tests/cli/data_sources.py
# class SaharaDataSourceCLITest(base.ClientTestBase):
# def openstack_data_source_list(self):
# def openstack_data_source_create(self):
# def openstack_data_source_show(self, data_source_name):
# def openstack_data_source_update(self, data_source_name):
# def openstack_data_source_delete(self, data_source_name):
. Output only the next line. | data_sources.SaharaDataSourceCLITest, |
Predict the next line after this snippet: <|code_start|> project_name = os.environ.get('OS_PROJECT_NAME', '')
os.environ['OS_USERNAME'] = 'demo_env'
os.environ['OS_PASSWORD'] = 'demopwd_env'
os.environ['OS_AUTH_URL'] = 'http://localhost:5000/v2.0'
os.environ['OS_PROJECT_NAME'] = 'project_env'
sys.argv = ['sahara_tests/scenario/runner.py',
'-V',
'sahara_tests/unit/scenario/templatevars_complete.ini',
'-p', 'spark', '-v', '1.3.1', '--release', 'liberty',
'--count', '4']
runner.main()
expected = {
'os_username': 'demo_env',
'os_password': 'demopwd_env',
'os_tenant': 'project_env',
'os_auth_url': 'http://localhost:5000/v2.0'
}
self.assertTrue(self._isDictContainSubset(
expected, mock_validate.call_args_list[0][0][0]['credentials']))
os.environ['OS_USERNAME'] = username
os.environ['OS_PASSWORD'] = password
os.environ['OS_AUTHURL'] = auth_url
os.environ['OS_PROJECT_NAME'] = project_name
@mock.patch('sahara_tests.scenario.validation.validate')
@mock.patch('subprocess.Popen',
return_value=_create_subprocess_communicate_mock())
@mock.patch('sys.exit', return_value=None)
def test_credentials_clouds(self, mock_sys, mock_sub, mock_validate):
<|code_end|>
using the current file's imports:
import os
import sys
import pkg_resources as pkg
import testtools
from jsonschema import exceptions
from unittest import mock
from sahara_tests import version
from sahara_tests.scenario import runner
and any relevant context from other files:
# Path: sahara_tests/version.py
. Output only the next line. | unit_dir = pkg.resource_filename(version.version_info.package, |
Predict the next line for this snippet: <|code_start|> '%Y%m%d_%H%M%S')
test_info['status'] = CHECK_FAILED_STATUS
test_info['traceback'] = traceback.format_exception(
*sys.exc_info())
if exit_with_error:
raise
finally:
test_time = timeutils.utcnow() - started_at
test_info['duration'] = test_time.seconds
return wrapper
return decorator
class BaseTestCase(base.BaseTestCase):
@classmethod
def setUpClass(cls):
super(BaseTestCase, cls).setUpClass()
cls.network = None
cls.credentials = None
cls.testcase = None
cls._results = []
cls.report = False
cls.results_dir = '.'
cls.default_templ_dir = '.'
cls.use_api_v2 = False
def setUp(self):
super(BaseTestCase, self).setUp()
self._init_clients()
<|code_end|>
with the help of current file imports:
import functools
import logging
import os
import sys
import time
import traceback
import fixtures
import prettytable
import six
from oslo_utils import timeutils
from tempest.lib import base
from tempest.lib.common import ssh as connection
from tempest.lib import exceptions as exc
from sahara_tests.scenario import clients
from sahara_tests.scenario import timeouts
from sahara_tests.scenario import utils
from sahara_tests.utils import crypto as ssh
from sahara_tests.utils import url as utils_url
and context from other files:
# Path: sahara_tests/scenario/timeouts.py
# class Defaults(object):
# def __init__(self, config):
# def init_defaults(cls, config):
#
# Path: sahara_tests/scenario/utils.py
# SCENARIO_RESOURCES_DIR = pkg.resource_filename(version.version_info.package,
# 'scenario')
# TEST_TEMPLATE_DIR = os.path.join(SCENARIO_RESOURCES_DIR, 'defaults/')
# DEFAULT_TEMPLATE_VARS = [os.path.join(TEST_TEMPLATE_DIR,
# 'credentials.yaml.mako'),
# os.path.join(TEST_TEMPLATE_DIR,
# 'edp.yaml.mako')]
# TEST_TEMPLATE_PATH = os.path.join(SCENARIO_RESOURCES_DIR,
# 'testcase.py.mako')
# DEFAULT_STESTR_CONF = os.path.join(SCENARIO_RESOURCES_DIR, 'stestr.conf')
# def rand_name(name=''):
# def run_tests(concurrency, test_dir_path):
# def create_testcase_file(testcases, credentials, network, report,
# use_api_v2=False):
# def get_templates_variables(files, variable_file, verbose_run, scenario_args,
# auth_values):
# def generate_config(files, template_variables, auth_values, verbose_run,
# features_list=None):
# def get_default_templates(plugin, version, release, scenario_arguments,
# features=None):
# def get_auth_values(cloud_config, args):
# def _merge_dicts_sections(dict_with_section, dict_for_merge, section):
# def is_template_file(config_file):
# def read_template_variables(variable_file=None, verbose=False,
# scenario_args=None, auth_values=None):
# def read_scenario_config(scenario_config, template_vars=None,
# verbose=False):
#
# Path: sahara_tests/utils/url.py
# def url_schema_remover(url):
, which may contain function names, class names, or code. Output only the next line. | timeouts.Defaults.init_defaults(self.testcase) |
Next line prediction: <|code_start|> return [job_id, input_id, output_id, configs]
@track_result("Check EDP jobs", False)
def check_run_jobs(self):
batching = self.testcase.get('edp_batching',
len(self.testcase['edp_jobs_flow']))
batching_size = batching
jobs = self.testcase.get('edp_jobs_flow', [])
pre_exec = []
for job in jobs:
pre_exec.append(self._prepare_job_running(job))
batching -= 1
if not batching:
self._job_batching(pre_exec)
pre_exec = []
batching = batching_size
self.check_verification(self.cluster_id)
def _job_batching(self, pre_exec):
job_exec_ids = []
for job_exec in pre_exec:
job_exec_ids.append(self._run_job(*job_exec))
self._poll_jobs_status(job_exec_ids)
def _create_datasources(self, job):
def create(ds, name):
credential_vars = {}
source = ds.get('source', None)
<|code_end|>
. Use current file imports:
(import functools
import logging
import os
import sys
import time
import traceback
import fixtures
import prettytable
import six
from oslo_utils import timeutils
from tempest.lib import base
from tempest.lib.common import ssh as connection
from tempest.lib import exceptions as exc
from sahara_tests.scenario import clients
from sahara_tests.scenario import timeouts
from sahara_tests.scenario import utils
from sahara_tests.utils import crypto as ssh
from sahara_tests.utils import url as utils_url)
and context including class names, function names, or small code snippets from other files:
# Path: sahara_tests/scenario/timeouts.py
# class Defaults(object):
# def __init__(self, config):
# def init_defaults(cls, config):
#
# Path: sahara_tests/scenario/utils.py
# SCENARIO_RESOURCES_DIR = pkg.resource_filename(version.version_info.package,
# 'scenario')
# TEST_TEMPLATE_DIR = os.path.join(SCENARIO_RESOURCES_DIR, 'defaults/')
# DEFAULT_TEMPLATE_VARS = [os.path.join(TEST_TEMPLATE_DIR,
# 'credentials.yaml.mako'),
# os.path.join(TEST_TEMPLATE_DIR,
# 'edp.yaml.mako')]
# TEST_TEMPLATE_PATH = os.path.join(SCENARIO_RESOURCES_DIR,
# 'testcase.py.mako')
# DEFAULT_STESTR_CONF = os.path.join(SCENARIO_RESOURCES_DIR, 'stestr.conf')
# def rand_name(name=''):
# def run_tests(concurrency, test_dir_path):
# def create_testcase_file(testcases, credentials, network, report,
# use_api_v2=False):
# def get_templates_variables(files, variable_file, verbose_run, scenario_args,
# auth_values):
# def generate_config(files, template_variables, auth_values, verbose_run,
# features_list=None):
# def get_default_templates(plugin, version, release, scenario_arguments,
# features=None):
# def get_auth_values(cloud_config, args):
# def _merge_dicts_sections(dict_with_section, dict_for_merge, section):
# def is_template_file(config_file):
# def read_template_variables(variable_file=None, verbose=False,
# scenario_args=None, auth_values=None):
# def read_scenario_config(scenario_config, template_vars=None,
# verbose=False):
#
# Path: sahara_tests/utils/url.py
# def url_schema_remover(url):
. Output only the next line. | destination = None if source else utils.rand_name( |
Using the snippet: <|code_start|> self._job_batching(pre_exec)
pre_exec = []
batching = batching_size
self.check_verification(self.cluster_id)
def _job_batching(self, pre_exec):
job_exec_ids = []
for job_exec in pre_exec:
job_exec_ids.append(self._run_job(*job_exec))
self._poll_jobs_status(job_exec_ids)
def _create_datasources(self, job):
def create(ds, name):
credential_vars = {}
source = ds.get('source', None)
destination = None if source else utils.rand_name(
ds['destination'])
if ds['type'] == 'swift':
url = self._create_swift_data(source, destination)
credential_vars = {
'credential_user': self.credentials['os_username'],
'credential_pass': self.credentials['os_password']
}
elif ds['type'] == 's3':
url = self._create_s3_data(source, destination)
credential_vars = {
's3_credentials': {
'accesskey': self.credentials['s3_accesskey'],
'secretkey': self.credentials['s3_secretkey'],
<|code_end|>
, determine the next line of code. You have imports:
import functools
import logging
import os
import sys
import time
import traceback
import fixtures
import prettytable
import six
from oslo_utils import timeutils
from tempest.lib import base
from tempest.lib.common import ssh as connection
from tempest.lib import exceptions as exc
from sahara_tests.scenario import clients
from sahara_tests.scenario import timeouts
from sahara_tests.scenario import utils
from sahara_tests.utils import crypto as ssh
from sahara_tests.utils import url as utils_url
and context (class names, function names, or code) available:
# Path: sahara_tests/scenario/timeouts.py
# class Defaults(object):
# def __init__(self, config):
# def init_defaults(cls, config):
#
# Path: sahara_tests/scenario/utils.py
# SCENARIO_RESOURCES_DIR = pkg.resource_filename(version.version_info.package,
# 'scenario')
# TEST_TEMPLATE_DIR = os.path.join(SCENARIO_RESOURCES_DIR, 'defaults/')
# DEFAULT_TEMPLATE_VARS = [os.path.join(TEST_TEMPLATE_DIR,
# 'credentials.yaml.mako'),
# os.path.join(TEST_TEMPLATE_DIR,
# 'edp.yaml.mako')]
# TEST_TEMPLATE_PATH = os.path.join(SCENARIO_RESOURCES_DIR,
# 'testcase.py.mako')
# DEFAULT_STESTR_CONF = os.path.join(SCENARIO_RESOURCES_DIR, 'stestr.conf')
# def rand_name(name=''):
# def run_tests(concurrency, test_dir_path):
# def create_testcase_file(testcases, credentials, network, report,
# use_api_v2=False):
# def get_templates_variables(files, variable_file, verbose_run, scenario_args,
# auth_values):
# def generate_config(files, template_variables, auth_values, verbose_run,
# features_list=None):
# def get_default_templates(plugin, version, release, scenario_arguments,
# features=None):
# def get_auth_values(cloud_config, args):
# def _merge_dicts_sections(dict_with_section, dict_for_merge, section):
# def is_template_file(config_file):
# def read_template_variables(variable_file=None, verbose=False,
# scenario_args=None, auth_values=None):
# def read_scenario_config(scenario_config, template_vars=None,
# verbose=False):
#
# Path: sahara_tests/utils/url.py
# def url_schema_remover(url):
. Output only the next line. | 'endpoint': utils_url.url_schema_remover( |
Here is a snippet: <|code_start|> return ([0, ] + throughputs, forecasts)
def _make_recent_history_table(self, throughputs):
recent_history = []
for i in range(0, len(self.filter_summaries)):
summary = self.filter_summaries[i]
recent_history.append(
[summary.created_on, throughputs[i], summary.complete, summary.total, summary.pct_complete]
)
return recent_history
def get_context_data(self, filter_id, **kwargs):
self.find_summaries_or_404(filter_id)
throughputs, forecasts = self._generate_forecast()
recent_history = self._make_recent_history_table(throughputs)
context = dict(
filter_id=filter_id,
recent_history=recent_history,
forecasts=forecasts,
start_date=self.start_date,
end_date=self.latest_summary.created_on
)
return context
class Refresh(View):
def get(self, request, *args, **kwargs):
cache.set('dashboard_data', [])
<|code_end|>
. Write the next line using the current file imports:
import datetime
from dateutil.relativedelta import relativedelta
from django.core.cache import cache
from django.http import JsonResponse, Http404
from django.shortcuts import redirect
from django.views.generic import View, TemplateView
from .jobs import generate_dashboard
from .services import summaries, predictions
and context from other files:
# Path: dashboard/jobs.py
# @job
# def generate_dashboard():
# logger = logging.getLogger("dashboard.jobs.generate_dashboard")
# logger.info("Start")
# sheet_id = settings.GOOGLE_SPREADSHEET_ID
# data = sheets.load_sheet(sheet_id, settings.GOOGLE_SPREADSHEET_AUTH_FILE)
# for row in data:
# row.xtras = _add_target_date(row.xtras, row.xtras.get('_target_date'))
# if row.xtras.get('_jira_filter'):
# row.xtras = _add_current_jira_summary(row.xtras, row.xtras['_jira_filter'], logger)
# if row.xtras.get('jira_summary'):
# row.xtras = _add_week_ago_summary(row.xtras, row.xtras['jira_summary'], logger)
# row.xtras = _add_forecasts(row.xtras, row.xtras['jira_summary'], logger)
# cache.set('dashboard_data', data, None)
# cache.set('dashboard_data_updated', datetime.datetime.now(get_default_timezone()), None)
# logger.info("End")
# return True
#
# Path: dashboard/services/summaries.py
# SAVED = "saved"
# UPDATED = "updated"
# def fill_updated_at(summary):
# def create(filter_id, complete, incomplete, total, created_on=None):
# def store(summary_obj):
# def for_date(filter_id, date):
# def for_date_range(filter_id, start_date, end_date=None):
# def latest_update():
#
# Path: dashboard/services/predictions.py
# def throughput_history(summaries):
# def forecast(throughputs, backlog_size, num_simulations=10000, seed=None):
# def for_project(filter_id, backlog_size, start_date):
, which may include functions, classes, or code. Output only the next line. | generate_dashboard.delay() |
Predict the next line for this snippet: <|code_start|>
class HealthCheck(View):
def get(self, request, *args, **kwargs):
last_access = cache.get('health_access', None)
content = {
'health': 'ok',
'last_access': last_access,
}
cache.set('health_access', datetime.datetime.now(), 60)
return JsonResponse(content)
class Dashboard(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **kwargs):
data = cache.get('dashboard_data', [])
<|code_end|>
with the help of current file imports:
import datetime
from dateutil.relativedelta import relativedelta
from django.core.cache import cache
from django.http import JsonResponse, Http404
from django.shortcuts import redirect
from django.views.generic import View, TemplateView
from .jobs import generate_dashboard
from .services import summaries, predictions
and context from other files:
# Path: dashboard/jobs.py
# @job
# def generate_dashboard():
# logger = logging.getLogger("dashboard.jobs.generate_dashboard")
# logger.info("Start")
# sheet_id = settings.GOOGLE_SPREADSHEET_ID
# data = sheets.load_sheet(sheet_id, settings.GOOGLE_SPREADSHEET_AUTH_FILE)
# for row in data:
# row.xtras = _add_target_date(row.xtras, row.xtras.get('_target_date'))
# if row.xtras.get('_jira_filter'):
# row.xtras = _add_current_jira_summary(row.xtras, row.xtras['_jira_filter'], logger)
# if row.xtras.get('jira_summary'):
# row.xtras = _add_week_ago_summary(row.xtras, row.xtras['jira_summary'], logger)
# row.xtras = _add_forecasts(row.xtras, row.xtras['jira_summary'], logger)
# cache.set('dashboard_data', data, None)
# cache.set('dashboard_data_updated', datetime.datetime.now(get_default_timezone()), None)
# logger.info("End")
# return True
#
# Path: dashboard/services/summaries.py
# SAVED = "saved"
# UPDATED = "updated"
# def fill_updated_at(summary):
# def create(filter_id, complete, incomplete, total, created_on=None):
# def store(summary_obj):
# def for_date(filter_id, date):
# def for_date_range(filter_id, start_date, end_date=None):
# def latest_update():
#
# Path: dashboard/services/predictions.py
# def throughput_history(summaries):
# def forecast(throughputs, backlog_size, num_simulations=10000, seed=None):
# def for_project(filter_id, backlog_size, start_date):
, which may contain function names, class names, or code. Output only the next line. | updated = summaries.latest_update() |
Next line prediction: <|code_start|>class Forecast(TemplateView):
template_name = "forecast.html"
def find_summaries_or_404(self, filter_id):
self.filter_summaries = summaries.for_date_range(filter_id, self.start_date)
if not self.filter_summaries:
raise Http404("No filter with id: {}".format(filter_id))
self.latest_summary = self.filter_summaries.last()
@property
def days_ago(self):
try:
days_ago = int(self.request.GET.get('days_ago', 30))
except ValueError:
days_ago = 30
return days_ago
@property
def start_date(self):
return datetime.date.today() - relativedelta(days=self.days_ago - 1)
@property
def scope(self):
try:
scope = int(self.request.GET.get('scope', ''))
except (TypeError, ValueError):
scope = self.latest_summary.incomplete
return scope
def _generate_forecast(self):
<|code_end|>
. Use current file imports:
(import datetime
from dateutil.relativedelta import relativedelta
from django.core.cache import cache
from django.http import JsonResponse, Http404
from django.shortcuts import redirect
from django.views.generic import View, TemplateView
from .jobs import generate_dashboard
from .services import summaries, predictions)
and context including class names, function names, or small code snippets from other files:
# Path: dashboard/jobs.py
# @job
# def generate_dashboard():
# logger = logging.getLogger("dashboard.jobs.generate_dashboard")
# logger.info("Start")
# sheet_id = settings.GOOGLE_SPREADSHEET_ID
# data = sheets.load_sheet(sheet_id, settings.GOOGLE_SPREADSHEET_AUTH_FILE)
# for row in data:
# row.xtras = _add_target_date(row.xtras, row.xtras.get('_target_date'))
# if row.xtras.get('_jira_filter'):
# row.xtras = _add_current_jira_summary(row.xtras, row.xtras['_jira_filter'], logger)
# if row.xtras.get('jira_summary'):
# row.xtras = _add_week_ago_summary(row.xtras, row.xtras['jira_summary'], logger)
# row.xtras = _add_forecasts(row.xtras, row.xtras['jira_summary'], logger)
# cache.set('dashboard_data', data, None)
# cache.set('dashboard_data_updated', datetime.datetime.now(get_default_timezone()), None)
# logger.info("End")
# return True
#
# Path: dashboard/services/summaries.py
# SAVED = "saved"
# UPDATED = "updated"
# def fill_updated_at(summary):
# def create(filter_id, complete, incomplete, total, created_on=None):
# def store(summary_obj):
# def for_date(filter_id, date):
# def for_date_range(filter_id, start_date, end_date=None):
# def latest_update():
#
# Path: dashboard/services/predictions.py
# def throughput_history(summaries):
# def forecast(throughputs, backlog_size, num_simulations=10000, seed=None):
# def for_project(filter_id, backlog_size, start_date):
. Output only the next line. | throughputs = predictions.throughput_history(self.filter_summaries) |
Here is a snippet: <|code_start|># -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-16 13:33
from __future__ import unicode_literals
def migrate_fill_updated_at(apps, schema_editor):
"""Fill the updated_at column if it's not already."""
ProjectSummary = apps.get_model('dashboard', 'ProjectSummary')
for summary in ProjectSummary.objects.all():
<|code_end|>
. Write the next line using the current file imports:
from django.db import migrations
from ..services.summaries import fill_updated_at
and context from other files:
# Path: dashboard/services/summaries.py
# def fill_updated_at(summary):
# """Set updated_at to created_on at 11:59 pm UTC"""
# summary.updated_at = timezone.now().replace(
# year=summary.created_on.year,
# month=summary.created_on.month,
# day=summary.created_on.day,
# hour=23,
# minute=59,
# second=59,
# )
# return summary
, which may include functions, classes, or code. Output only the next line. | summary = fill_updated_at(summary) |
Given the code snippet: <|code_start|>
urlpatterns = [
url(r'^$', Dashboard.as_view(), name="dashboard"),
url(r'^forecast/(?P<filter_id>[0-9]+)/$', Forecast.as_view(), name="forecast"),
url(r'^refresh/$', Refresh.as_view(), name="refresh"),
<|code_end|>
, generate the next line using the imports in this file:
from django.conf.urls import url
from .views import HealthCheck, Dashboard, Refresh, Forecast
and context (functions, classes, or occasionally code) from other files:
# Path: dashboard/views.py
# class HealthCheck(View):
# def get(self, request, *args, **kwargs):
# last_access = cache.get('health_access', None)
# content = {
# 'health': 'ok',
# 'last_access': last_access,
# }
# cache.set('health_access', datetime.datetime.now(), 60)
# return JsonResponse(content)
#
# class Dashboard(TemplateView):
# template_name = "dashboard.html"
#
# def get_context_data(self, **kwargs):
# data = cache.get('dashboard_data', [])
# updated = summaries.latest_update()
# context = dict(data=data, updated=updated)
# return context
#
# class Refresh(View):
# def get(self, request, *args, **kwargs):
# cache.set('dashboard_data', [])
# generate_dashboard.delay()
# return redirect('dashboard')
#
# class Forecast(TemplateView):
# template_name = "forecast.html"
#
# def find_summaries_or_404(self, filter_id):
# self.filter_summaries = summaries.for_date_range(filter_id, self.start_date)
# if not self.filter_summaries:
# raise Http404("No filter with id: {}".format(filter_id))
# self.latest_summary = self.filter_summaries.last()
#
# @property
# def days_ago(self):
# try:
# days_ago = int(self.request.GET.get('days_ago', 30))
# except ValueError:
# days_ago = 30
# return days_ago
#
# @property
# def start_date(self):
# return datetime.date.today() - relativedelta(days=self.days_ago - 1)
#
# @property
# def scope(self):
# try:
# scope = int(self.request.GET.get('scope', ''))
# except (TypeError, ValueError):
# scope = self.latest_summary.incomplete
# return scope
#
# def _generate_forecast(self):
# throughputs = predictions.throughput_history(self.filter_summaries)
#
# try:
# forecast = predictions.forecast(throughputs, self.scope)
# forecast = [self.latest_summary.created_on + relativedelta(days=int(f)) for f in forecast]
# forecasts = {self.days_ago: {'percentiles': forecast, 'scope': self.scope, 'actual_scope': self.latest_summary.incomplete}}
# except ValueError:
# forecasts = {}
# return ([0, ] + throughputs, forecasts)
#
# def _make_recent_history_table(self, throughputs):
# recent_history = []
# for i in range(0, len(self.filter_summaries)):
# summary = self.filter_summaries[i]
# recent_history.append(
# [summary.created_on, throughputs[i], summary.complete, summary.total, summary.pct_complete]
# )
# return recent_history
#
# def get_context_data(self, filter_id, **kwargs):
# self.find_summaries_or_404(filter_id)
#
# throughputs, forecasts = self._generate_forecast()
# recent_history = self._make_recent_history_table(throughputs)
#
# context = dict(
# filter_id=filter_id,
# recent_history=recent_history,
# forecasts=forecasts,
# start_date=self.start_date,
# end_date=self.latest_summary.created_on
# )
# return context
. Output only the next line. | url(r'^health/$', HealthCheck.as_view(), name="health"), |
Given snippet: <|code_start|>
urlpatterns = [
url(r'^$', Dashboard.as_view(), name="dashboard"),
url(r'^forecast/(?P<filter_id>[0-9]+)/$', Forecast.as_view(), name="forecast"),
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.conf.urls import url
from .views import HealthCheck, Dashboard, Refresh, Forecast
and context:
# Path: dashboard/views.py
# class HealthCheck(View):
# def get(self, request, *args, **kwargs):
# last_access = cache.get('health_access', None)
# content = {
# 'health': 'ok',
# 'last_access': last_access,
# }
# cache.set('health_access', datetime.datetime.now(), 60)
# return JsonResponse(content)
#
# class Dashboard(TemplateView):
# template_name = "dashboard.html"
#
# def get_context_data(self, **kwargs):
# data = cache.get('dashboard_data', [])
# updated = summaries.latest_update()
# context = dict(data=data, updated=updated)
# return context
#
# class Refresh(View):
# def get(self, request, *args, **kwargs):
# cache.set('dashboard_data', [])
# generate_dashboard.delay()
# return redirect('dashboard')
#
# class Forecast(TemplateView):
# template_name = "forecast.html"
#
# def find_summaries_or_404(self, filter_id):
# self.filter_summaries = summaries.for_date_range(filter_id, self.start_date)
# if not self.filter_summaries:
# raise Http404("No filter with id: {}".format(filter_id))
# self.latest_summary = self.filter_summaries.last()
#
# @property
# def days_ago(self):
# try:
# days_ago = int(self.request.GET.get('days_ago', 30))
# except ValueError:
# days_ago = 30
# return days_ago
#
# @property
# def start_date(self):
# return datetime.date.today() - relativedelta(days=self.days_ago - 1)
#
# @property
# def scope(self):
# try:
# scope = int(self.request.GET.get('scope', ''))
# except (TypeError, ValueError):
# scope = self.latest_summary.incomplete
# return scope
#
# def _generate_forecast(self):
# throughputs = predictions.throughput_history(self.filter_summaries)
#
# try:
# forecast = predictions.forecast(throughputs, self.scope)
# forecast = [self.latest_summary.created_on + relativedelta(days=int(f)) for f in forecast]
# forecasts = {self.days_ago: {'percentiles': forecast, 'scope': self.scope, 'actual_scope': self.latest_summary.incomplete}}
# except ValueError:
# forecasts = {}
# return ([0, ] + throughputs, forecasts)
#
# def _make_recent_history_table(self, throughputs):
# recent_history = []
# for i in range(0, len(self.filter_summaries)):
# summary = self.filter_summaries[i]
# recent_history.append(
# [summary.created_on, throughputs[i], summary.complete, summary.total, summary.pct_complete]
# )
# return recent_history
#
# def get_context_data(self, filter_id, **kwargs):
# self.find_summaries_or_404(filter_id)
#
# throughputs, forecasts = self._generate_forecast()
# recent_history = self._make_recent_history_table(throughputs)
#
# context = dict(
# filter_id=filter_id,
# recent_history=recent_history,
# forecasts=forecasts,
# start_date=self.start_date,
# end_date=self.latest_summary.created_on
# )
# return context
which might include code, classes, or functions. Output only the next line. | url(r'^refresh/$', Refresh.as_view(), name="refresh"), |
Based on the snippet: <|code_start|> return xtras
def _add_forecasts(xtras, summary, logger):
a_month_ago = summary.created_on - relativedelta(days=30)
try:
forecasts = predictions.for_project(filter_id=summary.filter_id, backlog_size=summary.incomplete, start_date=a_month_ago)
except ValueError as e:
forecasts = []
logger.warn("Filter {} predictions error: {}".format(summary.filter_id, str(e)))
xtras['predictions'] = [summary.created_on + relativedelta(days=int(f)) for f in forecasts]
logger.debug("Filter {} predictions created".format(summary.filter_id))
return xtras
def _add_target_date(xtras, target_date_string):
xtras['target_date'] = ""
try:
xtras['target_date'] = parse(target_date_string)
except (ValueError, AttributeError):
pass
return xtras
@job
def generate_dashboard():
logger = logging.getLogger("dashboard.jobs.generate_dashboard")
logger.info("Start")
sheet_id = settings.GOOGLE_SPREADSHEET_ID
<|code_end|>
, predict the immediate next line with the help of imports:
import datetime
import logging
from dateutil.relativedelta import relativedelta
from dateutil.parser import parse
from django.conf import settings
from django.core.cache import cache
from django.utils.timezone import get_default_timezone
from django_rq import job
from .services import sheets, jira, summaries, predictions
and context (classes, functions, sometimes code) from other files:
# Path: dashboard/services/sheets.py
# class Row(OrderedDict):
# def __init__(self, *args, **kwargs):
# def load_sheet(sheet_id, sheet_auth_file=""):
# def _load_via_api(sheet_id, sheet_auth_file):
# def _load_via_csv(sheet_id):
# def parse_csv(csv_text):
#
# Path: dashboard/services/jira.py
# LOGGER = logging.getLogger("dashboard.services.jira")
# def query_url(filter_id, jira_url=None):
# def fetch_query_results(filter_id, requests=requests, logger=LOGGER):
# def summarize_results(results):
# def summarize_query(filter_id):
#
# Path: dashboard/services/summaries.py
# SAVED = "saved"
# UPDATED = "updated"
# def fill_updated_at(summary):
# def create(filter_id, complete, incomplete, total, created_on=None):
# def store(summary_obj):
# def for_date(filter_id, date):
# def for_date_range(filter_id, start_date, end_date=None):
# def latest_update():
#
# Path: dashboard/services/predictions.py
# def throughput_history(summaries):
# def forecast(throughputs, backlog_size, num_simulations=10000, seed=None):
# def for_project(filter_id, backlog_size, start_date):
. Output only the next line. | data = sheets.load_sheet(sheet_id, settings.GOOGLE_SPREADSHEET_AUTH_FILE) |
Here is a snippet: <|code_start|>
def _add_current_jira_summary(xtras, jira_filter_id, logger):
summary_data = jira.summarize_query(jira_filter_id)
logger.debug("Filter {} summarized".format(jira_filter_id))
if summary_data.get('errors', []):
xtras['jira_summary_errors'] = summary_data['errors']
logger.warn("Filter {} summary error".format(summary_data['errors']))
elif summary_data:
<|code_end|>
. Write the next line using the current file imports:
import datetime
import logging
from dateutil.relativedelta import relativedelta
from dateutil.parser import parse
from django.conf import settings
from django.core.cache import cache
from django.utils.timezone import get_default_timezone
from django_rq import job
from .services import sheets, jira, summaries, predictions
and context from other files:
# Path: dashboard/services/sheets.py
# class Row(OrderedDict):
# def __init__(self, *args, **kwargs):
# def load_sheet(sheet_id, sheet_auth_file=""):
# def _load_via_api(sheet_id, sheet_auth_file):
# def _load_via_csv(sheet_id):
# def parse_csv(csv_text):
#
# Path: dashboard/services/jira.py
# LOGGER = logging.getLogger("dashboard.services.jira")
# def query_url(filter_id, jira_url=None):
# def fetch_query_results(filter_id, requests=requests, logger=LOGGER):
# def summarize_results(results):
# def summarize_query(filter_id):
#
# Path: dashboard/services/summaries.py
# SAVED = "saved"
# UPDATED = "updated"
# def fill_updated_at(summary):
# def create(filter_id, complete, incomplete, total, created_on=None):
# def store(summary_obj):
# def for_date(filter_id, date):
# def for_date_range(filter_id, start_date, end_date=None):
# def latest_update():
#
# Path: dashboard/services/predictions.py
# def throughput_history(summaries):
# def forecast(throughputs, backlog_size, num_simulations=10000, seed=None):
# def for_project(filter_id, backlog_size, start_date):
, which may include functions, classes, or code. Output only the next line. | p = summaries.create( |
Predict the next line for this snippet: <|code_start|> summary_data = jira.summarize_query(jira_filter_id)
logger.debug("Filter {} summarized".format(jira_filter_id))
if summary_data.get('errors', []):
xtras['jira_summary_errors'] = summary_data['errors']
logger.warn("Filter {} summary error".format(summary_data['errors']))
elif summary_data:
p = summaries.create(
filter_id=int(xtras['_jira_filter']),
incomplete=summary_data['incomplete'],
complete=summary_data['complete'],
total=summary_data['total'],
created_on=datetime.date.today(),
)
summaries.store(p)
xtras['jira_summary'] = p
logger.debug("Filter {} summary stored".format(p.filter_id))
return xtras
def _add_week_ago_summary(xtras, current_summary, logger):
week_ago = current_summary.created_on - relativedelta(days=7)
week_ago_summary = summaries.for_date(filter_id=current_summary.filter_id, date=week_ago)
xtras['week_ago_summary'] = week_ago_summary
logger.info("Filter {} week_ago summary retrieved".format(current_summary.filter_id))
return xtras
def _add_forecasts(xtras, summary, logger):
a_month_ago = summary.created_on - relativedelta(days=30)
try:
<|code_end|>
with the help of current file imports:
import datetime
import logging
from dateutil.relativedelta import relativedelta
from dateutil.parser import parse
from django.conf import settings
from django.core.cache import cache
from django.utils.timezone import get_default_timezone
from django_rq import job
from .services import sheets, jira, summaries, predictions
and context from other files:
# Path: dashboard/services/sheets.py
# class Row(OrderedDict):
# def __init__(self, *args, **kwargs):
# def load_sheet(sheet_id, sheet_auth_file=""):
# def _load_via_api(sheet_id, sheet_auth_file):
# def _load_via_csv(sheet_id):
# def parse_csv(csv_text):
#
# Path: dashboard/services/jira.py
# LOGGER = logging.getLogger("dashboard.services.jira")
# def query_url(filter_id, jira_url=None):
# def fetch_query_results(filter_id, requests=requests, logger=LOGGER):
# def summarize_results(results):
# def summarize_query(filter_id):
#
# Path: dashboard/services/summaries.py
# SAVED = "saved"
# UPDATED = "updated"
# def fill_updated_at(summary):
# def create(filter_id, complete, incomplete, total, created_on=None):
# def store(summary_obj):
# def for_date(filter_id, date):
# def for_date_range(filter_id, start_date, end_date=None):
# def latest_update():
#
# Path: dashboard/services/predictions.py
# def throughput_history(summaries):
# def forecast(throughputs, backlog_size, num_simulations=10000, seed=None):
# def for_project(filter_id, backlog_size, start_date):
, which may contain function names, class names, or code. Output only the next line. | forecasts = predictions.for_project(filter_id=summary.filter_id, backlog_size=summary.incomplete, start_date=a_month_ago) |
Using the snippet: <|code_start|>"""Classes and functions for manipulating and storing summaries."""
SAVED = "saved"
UPDATED = "updated"
def fill_updated_at(summary):
"""Set updated_at to created_on at 11:59 pm UTC"""
summary.updated_at = timezone.now().replace(
year=summary.created_on.year,
month=summary.created_on.month,
day=summary.created_on.day,
hour=23,
minute=59,
second=59,
)
return summary
def create(filter_id, complete, incomplete, total, created_on=None):
"""Create ProjectSummary instances."""
if created_on is None:
created_on = datetime.date.today()
<|code_end|>
, determine the next line of code. You have imports:
import datetime
from django.utils import timezone
from ..models import ProjectSummary
and context (class names, function names, or code) available:
# Path: dashboard/models.py
# class ProjectSummary(models.Model):
# """Summary data about a JIRA filter for a particular date."""
#
# filter_id = models.IntegerField()
# incomplete = models.IntegerField()
# complete = models.IntegerField()
# total = models.IntegerField()
# created_on = models.DateField()
# updated_at = models.DateTimeField(null=False)
#
# class Meta:
# verbose_name = "project summary"
# verbose_name_plural = "project summaries"
# unique_together = (("filter_id", "created_on"))
# get_latest_by = "updated_at"
#
# def __repr__(self):
# return "<ProjectSummary {} filter: {} created_on: {}>".format(self.id, self.filter_id, self.created_on)
#
# @property
# def pct_complete(self):
# """How complete is the project.
#
# Returns:
# float: Percentage of the project that is complete.
# """
# if self.total == 0:
# return float(0)
# return self.complete / float(self.total)
. Output only the next line. | p = ProjectSummary( |
Next line prediction: <|code_start|> if throughput < 0:
throughput = 0
history.append(throughput)
return history
def forecast(throughputs, backlog_size, num_simulations=10000, seed=None):
"""Monte Carlo forecast given the provided backlog and throughput histories.
Args:
throughputs (List[int]): Number of items completed per period
backlog_size (int): How many items remain incomplete
num_simulations (int: 10000): How many simulations should be run
seed (None): Provide a seed for the random number generator
Returns:
List[int]: The 50th, 80th, and 90th percentile # of periods remaining in the project
"""
results = Forecaster().forecast(throughputs, backlog_size, num_simulations=num_simulations, seed=seed)
return [results.percentile(50), results.percentile(80), results.percentile(90)]
def for_project(filter_id, backlog_size, start_date):
"""Forecast a project
Args:
filter_id (int): The filter_id for the project in question.
backlog_size (int): How many items remain incomplete
start_date (Date): How far back should we look for team history to simulate with
Returns:
List[int]: The 50th, 80th, and 90th percentile # of periods remaining in the project
"""
logger.debug("for_project: {}, {}, {}".format(filter_id, backlog_size, start_date))
<|code_end|>
. Use current file imports:
(import logging
from better.lib import Forecaster
from .summaries import for_date_range)
and context including class names, function names, or small code snippets from other files:
# Path: dashboard/services/summaries.py
# def for_date_range(filter_id, start_date, end_date=None):
# """Find all the summaries for a given filter_id and a date range.
# Args:
# filter_id (int): The filter_id for the project in question.
# start_date (Date): The start of the range you want summaries for
# end_date (Date[today]): The end of the range you want summaries for, default: today
# Returns:
# List[ProjectSummary]: 0 or more project summaries for that filter in the range
# """
# if end_date is None:
# end_date = datetime.date.today()
# return ProjectSummary.objects.filter(
# filter_id=filter_id,
# created_on__gte=start_date,
# created_on__lte=end_date,
# ).order_by('created_on')
. Output only the next line. | summaries = for_date_range(filter_id, start_date) |
Next line prediction: <|code_start|>
class CommunityPKProxy(RecordPKProxy):
"""Resolver proxy for a Record entity using the UUID."""
def get_need(self):
"""Return the user need of the community's owner."""
# TODO this may become difficult once there's multiple levels
# of membership (owner, manager, curator, ...)
# -> which needs should be generated? None, and let the
# user create the set of required needs from the
# resolved entity? or keep the 'owner' need?
comid = str(self._parse_ref_dict_id(self._ref_dict))
return CommunityNeed(comid)
class CommunityResolver(RecordResolver):
"""Community entity resolver.
The entity resolver enables Invenio-Requests to understand communities as
receiver and topic of a request.
"""
type_id = 'community'
"""Type identifier for this resolver."""
def __init__(self):
"""Initialize the default record resolver."""
super().__init__(
<|code_end|>
. Use current file imports:
(from invenio_records_resources.references.resolvers.records import \
RecordPKProxy, RecordResolver
from .records.api import Community
from .services.permissions import CommunityNeed)
and context including class names, function names, or small code snippets from other files:
# Path: invenio_communities/communities/records/api.py
# class Community(Record):
# """Community API."""
#
# pid = PIDField('id', provider=CommunitiesIdProvider, create=False)
# schema = ConstantField(
# '$schema', 'local://communities/communities-v1.0.0.json')
#
# model_cls = models.CommunityMetadata
#
# index = IndexField(
# "communities-communities-v1.0.0",
# search_alias="communities"
# )
#
# access = CommunityAccessField()
#
# bucket_id = ModelField(dump=False)
# bucket = ModelField(dump=False)
# files = FilesField(
# store=False,
# file_cls=CommunityFile,
# # Don't delete, we'll manage in the service
# delete=False,
# )
#
# Path: invenio_communities/communities/services/permissions.py
# class IfRestrictedBase(Generator):
# class IfRestricted(IfRestrictedBase):
# class IfPolicyClosed(IfRestrictedBase):
# class CommunityOwners(Generator):
# class CommunityPermissionPolicy(BasePermissionPolicy):
# def __init__(self, field_getter,
# field_name, then_value, else_value, then_, else_):
# def generators(self, record):
# def needs(self, record=None, **kwargs):
# def excludes(self, record=None, **kwargs):
# def make_query(self, generators, **kwargs):
# def query_filter(self, **kwargs):
# def __init__(self, field, then_, else_):
# def __init__(self, field, then_, else_):
# def needs(self, record=None, **kwargs):
# def query_filter(self, identity=None, **kwargs):
. Output only the next line. | Community, type_key=self.type_id, proxy_cls=CommunityPKProxy) |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# Copyright (C) 2021 CERN.
# Copyright (C) 2022 Northwestern University.
#
# Invenio-Communities is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Entity resolver for the requests module.
Entity resolvers are considered part of the service-layer. The resolver
is registered in Invenio-Requests via the "invenio_requests.entity_resolvers"
entry point.
"""
class CommunityPKProxy(RecordPKProxy):
"""Resolver proxy for a Record entity using the UUID."""
def get_need(self):
"""Return the user need of the community's owner."""
# TODO this may become difficult once there's multiple levels
# of membership (owner, manager, curator, ...)
# -> which needs should be generated? None, and let the
# user create the set of required needs from the
# resolved entity? or keep the 'owner' need?
comid = str(self._parse_ref_dict_id(self._ref_dict))
<|code_end|>
, predict the immediate next line with the help of imports:
from invenio_records_resources.references.resolvers.records import \
RecordPKProxy, RecordResolver
from .records.api import Community
from .services.permissions import CommunityNeed
and context (classes, functions, sometimes code) from other files:
# Path: invenio_communities/communities/records/api.py
# class Community(Record):
# """Community API."""
#
# pid = PIDField('id', provider=CommunitiesIdProvider, create=False)
# schema = ConstantField(
# '$schema', 'local://communities/communities-v1.0.0.json')
#
# model_cls = models.CommunityMetadata
#
# index = IndexField(
# "communities-communities-v1.0.0",
# search_alias="communities"
# )
#
# access = CommunityAccessField()
#
# bucket_id = ModelField(dump=False)
# bucket = ModelField(dump=False)
# files = FilesField(
# store=False,
# file_cls=CommunityFile,
# # Don't delete, we'll manage in the service
# delete=False,
# )
#
# Path: invenio_communities/communities/services/permissions.py
# class IfRestrictedBase(Generator):
# class IfRestricted(IfRestrictedBase):
# class IfPolicyClosed(IfRestrictedBase):
# class CommunityOwners(Generator):
# class CommunityPermissionPolicy(BasePermissionPolicy):
# def __init__(self, field_getter,
# field_name, then_value, else_value, then_, else_):
# def generators(self, record):
# def needs(self, record=None, **kwargs):
# def excludes(self, record=None, **kwargs):
# def make_query(self, generators, **kwargs):
# def query_filter(self, **kwargs):
# def __init__(self, field, then_, else_):
# def __init__(self, field, then_, else_):
# def needs(self, record=None, **kwargs):
# def query_filter(self, identity=None, **kwargs):
. Output only the next line. | return CommunityNeed(comid) |
Predict the next line for this snippet: <|code_start|> def edit(self, identity, draft=None, record=None, **kwargs):
"""Update draft metadata."""
draft.access = record.access
def new_version(self, identity, draft=None, record=None, **kwargs):
"""Update draft metadata."""
draft.access = record.access
# TODO: Move to Invenio-Records-Resources (and make reusable). Duplicated from
# Invenio-RDM-Records.
class CommunityAccessComponent(AccessComponent):
"""Service component for access integration."""
def _populate_access_and_validate(self, identity, data, record, **kwargs):
"""Populate and validate the community's access field."""
if record is not None and "access" in data:
# populate the record's access field with the data already
# validated by marshmallow
record.setdefault('access', {})
record['access'].update(data.get("access", {}))
record.access.refresh_from_dict(record.get("access"))
def _init_owners(self, identity, record, **kwargs):
"""If the record has no owners yet, add the current user."""
# if the given identity is that of a user, we add the
# corresponding user to the owners (record.access.owned_by)
is_sys_id = system_process in identity.provides
if not record.access.owned_by and not is_sys_id:
record.access.owned_by.add({"user": identity.id})
<|code_end|>
with the help of current file imports:
import re
from invenio_access.permissions import system_process
from invenio_pidstore.errors import PIDAlreadyExists
from invenio_records_resources.services.records.components import \
ServiceComponent
from marshmallow.exceptions import ValidationError
from ...utils import on_membership_change
and context from other files:
# Path: invenio_communities/utils.py
# def on_membership_change(identity=None):
# """Handler called when a membership is changed."""
# if identity is not None:
# current_cache.delete(identity_cache_key(identity))
, which may contain function names, class names, or code. Output only the next line. | on_membership_change(identity=identity) |
Given the following code snippet before the placeholder: <|code_start|># -*- coding: utf-8 -*-
#
# Copyright (C) 2021 CERN.
#
# Invenio-Communities is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
class CommunityRelationMixin:
"""Model mixin to define a relationship between a communities and records.
Usage:
.. code-block:: python
class CommunityRecordM2M(db.Model, CommunityRelationMixin):
__record_model__ = MyParentRecord
"""
__record_model__ = None
__request_model__ = None
@declared_attr
def community_id(cls):
"""Foreign key to the related communithy."""
return db.Column(
UUIDType,
<|code_end|>
, predict the next line using imports from the current file:
from invenio_db import db
from invenio_requests.records.models import RequestMetadata
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy_utils.types import UUIDType
from ...communities.records.models import CommunityMetadata
and context including class names, function names, and sometimes code from other files:
# Path: invenio_communities/communities/records/models.py
# class CommunityMetadata(db.Model, RecordMetadataBase):
# """Represent a community."""
#
# __tablename__ = 'communities_metadata'
#
# # Enables SQLAlchemy-Continuum versioning
# __versioned__ = {}
#
# bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id))
# bucket = db.relationship(Bucket)
. Output only the next line. | db.ForeignKey(CommunityMetadata.id, ondelete="CASCADE"), |
Based on the snippet: <|code_start|> return
# Cache keys
#
# The cache of communities must be invalidated on:
# 1) on creation of a community (likely this one is modelled via membership
# in the future).
# 2) add/remove/change of membership
#
# We construct the cache key for each membership entity (e.g. user,
# role, system role). This way, once a membership is added/removed/updated
# we can cache the list of associated communities for this entity.
# Once a user logs in, we get the cache for each of the membership
# entities and combine it into a single list.
# Currently, only users are supported (no roles or system roles)
cache_key = identity_cache_key(identity)
communities = current_cache.get(cache_key)
if communities is None:
try:
communities = []
for c in search_communities(service, identity):
communities.append(str(c.uuid))
current_cache.set(cache_key, communities, timeout=24*3600)
except PermissionDeniedError:
communities = []
# Add community needs to identity
for c_id in communities:
<|code_end|>
, predict the immediate next line with the help of imports:
from elasticsearch_dsl import Q
from invenio_cache import current_cache
from invenio_records_resources.services.errors import PermissionDeniedError
from .communities.services.permissions import CommunityNeed
and context (classes, functions, sometimes code) from other files:
# Path: invenio_communities/communities/services/permissions.py
# class IfRestrictedBase(Generator):
# class IfRestricted(IfRestrictedBase):
# class IfPolicyClosed(IfRestrictedBase):
# class CommunityOwners(Generator):
# class CommunityPermissionPolicy(BasePermissionPolicy):
# def __init__(self, field_getter,
# field_name, then_value, else_value, then_, else_):
# def generators(self, record):
# def needs(self, record=None, **kwargs):
# def excludes(self, record=None, **kwargs):
# def make_query(self, generators, **kwargs):
# def query_filter(self, **kwargs):
# def __init__(self, field, then_, else_):
# def __init__(self, field, then_, else_):
# def needs(self, record=None, **kwargs):
# def query_filter(self, identity=None, **kwargs):
. Output only the next line. | identity.provides.add(CommunityNeed(c_id)) |
Predict the next line for this snippet: <|code_start|>
@pytest.fixture()
def headers():
"""Default headers for making requests."""
return {
'content-type': 'application/json',
'accept': 'application/json',
}
@pytest.fixture()
def client_with_login(client, users):
"""Log in a user to the client."""
user = users[0]
login_user(user, remember=True)
login_user_via_session(client, email=user.email)
return client
@pytest.fixture(scope="function")
def create_many_records(app, client_with_login, minimal_community, headers):
"""Multiple community created and posted to test search functionality."""
client = client_with_login
community_types = ['organization', 'event', 'topic', 'project']
N = 4
for (type_,ind) in itertools.product(community_types, list(range(N))):
minimal_community['id'] = f'comm_{type_}_{ind}'
minimal_community['metadata']['type'] = type_
client.post( '/communities', headers=headers, json=minimal_community)
<|code_end|>
with the help of current file imports:
import itertools
import pytest
from flask_security import login_user
from invenio_accounts.testutils import create_test_user, login_user_via_session
from invenio_communities.communities.records.api import Community
and context from other files:
# Path: invenio_communities/communities/records/api.py
# class Community(Record):
# """Community API."""
#
# pid = PIDField('id', provider=CommunitiesIdProvider, create=False)
# schema = ConstantField(
# '$schema', 'local://communities/communities-v1.0.0.json')
#
# model_cls = models.CommunityMetadata
#
# index = IndexField(
# "communities-communities-v1.0.0",
# search_alias="communities"
# )
#
# access = CommunityAccessField()
#
# bucket_id = ModelField(dump=False)
# bucket = ModelField(dump=False)
# files = FilesField(
# store=False,
# file_cls=CommunityFile,
# # Don't delete, we'll manage in the service
# delete=False,
# )
, which may contain function names, class names, or code. Output only the next line. | Community.index.refresh() |
Given the code snippet: <|code_start|> {'text': 'Engineering', 'value': 'engineering'},
{'text': 'Technology', 'value': 'technology'},
{'text': 'History and Archaeology', 'value': 'history_and_archaeology'},
{'text': 'Information and Computing Sciences', 'value': 'information_and_computing_sciences'},
{'text': 'Language, Communication and Culture', 'value': 'language_communication_and_culture'},
{'text': 'Law and Legal Studies', 'value': 'law_and_legal_studies'},
{'text': 'Mathematical Sciences', 'value': 'mathematical_sciences'},
{'text': 'Medical and Health Sciences', 'value': 'medical_and_health_sciences'},
{'text': 'Philosophy and Religious Studies', 'value': 'philosophy_and_religious_studies'},
{'text': 'Physical Sciences', 'value': 'physical_sciences'},
{'text': 'Psychology and Cognitive Sciences', 'value': 'psychology_and_cognitive_sciences'},
{'text': 'Studies in Creative Arts and Writing', 'value': 'studies_in_creative_arts_and_writing'},
{'text': 'Studies in Human Society', 'value': 'studies_in_human_society'},
]
COMMUNITIES_ROUTES = {
'frontpage': '/communities',
'search': '/communities/search',
'new': '/communities/new',
'details': '/communities/<pid_value>',
'settings': '/communities/<pid_value>/settings',
'settings_privileges': '/communities/<pid_value>/settings/privileges',
}
"""Communities ui endpoints."""
COMMUNITIES_ENABLED = True
"""Config to enable/disable communities blueprints."""
COMMUNITIES_FACETS = {
'type': {
<|code_end|>
, generate the next line using the imports in this file:
from invenio_communities.communities.services import facets
and context (functions, classes, or occasionally code) from other files:
# Path: invenio_communities/communities/services/facets.py
. Output only the next line. | 'facet': facets.type, |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2021 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Community access system field."""
class CommunityAccess:
"""Access management per community."""
VISIBILITY_LEVELS = ('public', 'restricted')
MEMBER_POLICY_LEVELS = ('open', 'closed')
RECORD_POLICY_LEVELS = ('open', 'closed', 'restricted')
<|code_end|>
, determine the next line of code. You have imports:
from invenio_records.systemfields import SystemField
from .owners import Owners
and context (class names, function names, or code) available:
# Path: invenio_communities/communities/records/systemfields/owners.py
# class Owners(list):
# """A list of owners for a record."""
#
# owner_cls = Owner
#
# def __init__(self, owners=None, owner_cls=None):
# """Create a new list of owners."""
# self.owner_cls = owner_cls or self.owner_cls
# for owner in owners or []:
# self.add(owner)
#
# def add(self, owner):
# """Alias for self.append(owner)."""
# self.append(owner)
#
# def append(self, owner):
# """Add the specified owner to the list of owners.
#
# :param owner: The record's owner (either a dict, User or Owner).
# """
# if not isinstance(owner, self.owner_cls):
# owner = self.owner_cls(owner)
#
# if owner not in self:
# super().append(owner)
#
# def extend(self, owners):
# """Add all new items from the specified owners to this list."""
# for owner in owners:
# self.add(owner)
#
# def remove(self, owner):
# """Remove the specified owner from the list of owners.
#
# :param owner: The record's owner (either a dict, User or Owner).
# """
# if not isinstance(owner, self.owner_cls):
# owner = self.owner_cls(owner)
#
# super().remove(owner)
#
# def dump(self):
# """Dump the owners as a list of owner dictionaries."""
# return [owner.dump() for owner in self]
. Output only the next line. | owners_cls = Owners |
Predict the next line after this snippet: <|code_start|>
class CommunitiesRelationManager:
"""Manager for a record's community relations."""
def __init__(self, m2m_model_cls, record_id, data):
self._m2m_model_cls = m2m_model_cls
self._record_id = record_id
self._default_id = None
self._communities_ids = set()
self._communities_cache = {}
self.from_dict(data)
#
# Helpers
#
def _to_id(self, val):
"""Get the community id"""
if isinstance(val, str):
return val
elif isinstance(val, Record):
return str(val.id)
return None
def _lookup_community(self, community_id):
"""Retrieve a community by id.
Caches the community.
"""
if community_id not in self._communities_cache:
<|code_end|>
using the current file's imports:
from invenio_db import db
from invenio_records.api import Record
from invenio_communities.communities.records.api import Community
and any relevant context from other files:
# Path: invenio_communities/communities/records/api.py
# class Community(Record):
# """Community API."""
#
# pid = PIDField('id', provider=CommunitiesIdProvider, create=False)
# schema = ConstantField(
# '$schema', 'local://communities/communities-v1.0.0.json')
#
# model_cls = models.CommunityMetadata
#
# index = IndexField(
# "communities-communities-v1.0.0",
# search_alias="communities"
# )
#
# access = CommunityAccessField()
#
# bucket_id = ModelField(dump=False)
# bucket = ModelField(dump=False)
# files = FilesField(
# store=False,
# file_cls=CommunityFile,
# # Don't delete, we'll manage in the service
# delete=False,
# )
. Output only the next line. | c = Community.get_record(community_id) |
Next line prediction: <|code_start|># Invenio-Communities is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Invitations Actions."""
class AcceptAction(RequestAction):
"""Accept action."""
status_from = ['open']
status_to = 'accepted'
def can_execute(self, identity):
"""Check if the accept action can be executed."""
# TODO
return True
def execute(self, identity, uow):
"""Accept entity into community."""
# community = self.request.topic.resolve()
# entity = self.request.receiver.resolve()
member_data = {
**self.request.receiver.reference_dict,
**self.request.topic.reference_dict,
# TODO: add role
}
<|code_end|>
. Use current file imports:
(from invenio_requests.customizations import RequestAction
from invenio_communities.proxies import current_communities)
and context including class names, function names, or small code snippets from other files:
# Path: invenio_communities/proxies.py
. Output only the next line. | current_communities.service.members.create( |
Given snippet: <|code_start|># -*- coding: utf-8 -*-
#
# Copyright (C) 2022 Northwestern University.
#
# Invenio-Communities is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Members Service Config."""
class MemberLink(Link):
"""Link variables setter for RequestEvent links."""
@staticmethod
def vars(record, vars):
"""Variables for the URI template."""
# TODO: Revise for human-readable community id and different entities.
vars.update({
"community_id": record.community_id,
"entity": "user",
"id": record.user_id,
})
class MemberServiceConfig(RecordServiceConfig):
"""Member Service Config."""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from invenio_records_resources.services import Link, RecordServiceConfig
from ..records import Member
from .permissions import CommunityMembersPermissionPolicy
from .schema import MemberSchema
and context:
# Path: invenio_communities/members/records/api.py
# class Member(Record):
# """A Request Event."""
#
# model_cls = MemberModel
#
# # Systemfields
#
# metadata = None
#
# index = IndexField(
# "communitymembers-members-v1.0.0", search_alias="communitymembers"
# )
# """The ES index used."""
#
# community_id = ModelField("community_id")
# """The data-layer UUID of the community."""
#
# user_id = ModelField("user_id")
# """The data-layer id of the user (or None)."""
#
# role = ModelField("role")
# """The role of the entity."""
#
# # TODO: add visibility
# # TODO: add group
#
# @classmethod
# def get_record(cls, id_, with_deleted=True):
# """Return Member."""
#
# with db.session.no_autoflush:
# query = cls.model_cls.query.filter_by(**id_)
# if not with_deleted:
# query = query.filter(cls.model_cls.is_deleted != True)
# obj = query.one()
# return cls(obj.data, model=obj)
#
# Path: invenio_communities/members/services/permissions.py
# class CommunityMembersPermissionPolicy(BasePermissionPolicy):
# """Permissions for Community Members CRUD operations."""
#
# # TODO #384
# can_read = [AnyUser()]
#
# can_search = [AnyUser(), SystemProcess()]
#
# Path: invenio_communities/members/services/schema.py
# class MemberSchema(BaseRecordSchema):
# """Schema for a community member."""
#
# # input and output
# community = SanitizedUnicode(required=True)
# user = fields.Integer()
# role = fields.String(
# required=True,
# validate=validate.OneOf(ROLE_TYPES)
# )
# # TODO: add visibility
# # visibility = SanitizedUnicode(
# # required=True, validate=_not_blank(max=100))
# # TODO: add group
# # group = SanitizedUnicode()
#
# # output only
# # TODO: add name
# # name = SanitizedUnicode(dump_only=True)
#
# @validates_schema
# def validate_member_entity(self, data, **kwargs):
# """Check that at least one member entity is passed."""
# valid_entities = ['user', 'group']
#
# # TODO?: Could maybe use something like ?
# # validate.OneOf(valid_entities)
# if not any(e in data for e in valid_entities):
# raise ValidationError(
# _("There must be one of {}".format(valid_entities))
# )
#
# # community is coming in from the API
# # community_id is coming in from the Record when dumping
which might include code, classes, or functions. Output only the next line. | record_cls = Member |
Given snippet: <|code_start|># Copyright (C) 2022 Northwestern University.
#
# Invenio-Communities is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Members Service Config."""
class MemberLink(Link):
"""Link variables setter for RequestEvent links."""
@staticmethod
def vars(record, vars):
"""Variables for the URI template."""
# TODO: Revise for human-readable community id and different entities.
vars.update({
"community_id": record.community_id,
"entity": "user",
"id": record.user_id,
})
class MemberServiceConfig(RecordServiceConfig):
"""Member Service Config."""
record_cls = Member
schema = MemberSchema
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from invenio_records_resources.services import Link, RecordServiceConfig
from ..records import Member
from .permissions import CommunityMembersPermissionPolicy
from .schema import MemberSchema
and context:
# Path: invenio_communities/members/records/api.py
# class Member(Record):
# """A Request Event."""
#
# model_cls = MemberModel
#
# # Systemfields
#
# metadata = None
#
# index = IndexField(
# "communitymembers-members-v1.0.0", search_alias="communitymembers"
# )
# """The ES index used."""
#
# community_id = ModelField("community_id")
# """The data-layer UUID of the community."""
#
# user_id = ModelField("user_id")
# """The data-layer id of the user (or None)."""
#
# role = ModelField("role")
# """The role of the entity."""
#
# # TODO: add visibility
# # TODO: add group
#
# @classmethod
# def get_record(cls, id_, with_deleted=True):
# """Return Member."""
#
# with db.session.no_autoflush:
# query = cls.model_cls.query.filter_by(**id_)
# if not with_deleted:
# query = query.filter(cls.model_cls.is_deleted != True)
# obj = query.one()
# return cls(obj.data, model=obj)
#
# Path: invenio_communities/members/services/permissions.py
# class CommunityMembersPermissionPolicy(BasePermissionPolicy):
# """Permissions for Community Members CRUD operations."""
#
# # TODO #384
# can_read = [AnyUser()]
#
# can_search = [AnyUser(), SystemProcess()]
#
# Path: invenio_communities/members/services/schema.py
# class MemberSchema(BaseRecordSchema):
# """Schema for a community member."""
#
# # input and output
# community = SanitizedUnicode(required=True)
# user = fields.Integer()
# role = fields.String(
# required=True,
# validate=validate.OneOf(ROLE_TYPES)
# )
# # TODO: add visibility
# # visibility = SanitizedUnicode(
# # required=True, validate=_not_blank(max=100))
# # TODO: add group
# # group = SanitizedUnicode()
#
# # output only
# # TODO: add name
# # name = SanitizedUnicode(dump_only=True)
#
# @validates_schema
# def validate_member_entity(self, data, **kwargs):
# """Check that at least one member entity is passed."""
# valid_entities = ['user', 'group']
#
# # TODO?: Could maybe use something like ?
# # validate.OneOf(valid_entities)
# if not any(e in data for e in valid_entities):
# raise ValidationError(
# _("There must be one of {}".format(valid_entities))
# )
#
# # community is coming in from the API
# # community_id is coming in from the Record when dumping
which might include code, classes, or functions. Output only the next line. | permission_policy_cls = CommunityMembersPermissionPolicy |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# Copyright (C) 2022 Northwestern University.
#
# Invenio-Communities is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Members Service Config."""
class MemberLink(Link):
"""Link variables setter for RequestEvent links."""
@staticmethod
def vars(record, vars):
"""Variables for the URI template."""
# TODO: Revise for human-readable community id and different entities.
vars.update({
"community_id": record.community_id,
"entity": "user",
"id": record.user_id,
})
class MemberServiceConfig(RecordServiceConfig):
"""Member Service Config."""
record_cls = Member
<|code_end|>
, predict the immediate next line with the help of imports:
from invenio_records_resources.services import Link, RecordServiceConfig
from ..records import Member
from .permissions import CommunityMembersPermissionPolicy
from .schema import MemberSchema
and context (classes, functions, sometimes code) from other files:
# Path: invenio_communities/members/records/api.py
# class Member(Record):
# """A Request Event."""
#
# model_cls = MemberModel
#
# # Systemfields
#
# metadata = None
#
# index = IndexField(
# "communitymembers-members-v1.0.0", search_alias="communitymembers"
# )
# """The ES index used."""
#
# community_id = ModelField("community_id")
# """The data-layer UUID of the community."""
#
# user_id = ModelField("user_id")
# """The data-layer id of the user (or None)."""
#
# role = ModelField("role")
# """The role of the entity."""
#
# # TODO: add visibility
# # TODO: add group
#
# @classmethod
# def get_record(cls, id_, with_deleted=True):
# """Return Member."""
#
# with db.session.no_autoflush:
# query = cls.model_cls.query.filter_by(**id_)
# if not with_deleted:
# query = query.filter(cls.model_cls.is_deleted != True)
# obj = query.one()
# return cls(obj.data, model=obj)
#
# Path: invenio_communities/members/services/permissions.py
# class CommunityMembersPermissionPolicy(BasePermissionPolicy):
# """Permissions for Community Members CRUD operations."""
#
# # TODO #384
# can_read = [AnyUser()]
#
# can_search = [AnyUser(), SystemProcess()]
#
# Path: invenio_communities/members/services/schema.py
# class MemberSchema(BaseRecordSchema):
# """Schema for a community member."""
#
# # input and output
# community = SanitizedUnicode(required=True)
# user = fields.Integer()
# role = fields.String(
# required=True,
# validate=validate.OneOf(ROLE_TYPES)
# )
# # TODO: add visibility
# # visibility = SanitizedUnicode(
# # required=True, validate=_not_blank(max=100))
# # TODO: add group
# # group = SanitizedUnicode()
#
# # output only
# # TODO: add name
# # name = SanitizedUnicode(dump_only=True)
#
# @validates_schema
# def validate_member_entity(self, data, **kwargs):
# """Check that at least one member entity is passed."""
# valid_entities = ['user', 'group']
#
# # TODO?: Could maybe use something like ?
# # validate.OneOf(valid_entities)
# if not any(e in data for e in valid_entities):
# raise ValidationError(
# _("There must be one of {}".format(valid_entities))
# )
#
# # community is coming in from the API
# # community_id is coming in from the Record when dumping
. Output only the next line. | schema = MemberSchema |
Here is a snippet: <|code_start|> )
@login_required
def communities_new():
"""Communities creation page."""
return render_template(
"invenio_communities/new.html",
form_config=dict(
access=dict(
visibilty=[
{
'text': 'Public',
'value': 'public',
'icon': 'group',
'helpText': 'Your community is publicly accessible ' \
'and shows up in search results.'
},
{
'text': 'Restricted',
'value': 'restricted',
'icon': 'lock',
'helpText': 'Your community is restricted to users ' \
'with access.'
}
]
),
SITE_UI_URL=current_app.config["SITE_UI_URL"]
),
)
<|code_end|>
. Write the next line using the current file imports:
from flask import current_app, render_template
from flask_login import login_required
from .decorators import pass_community, pass_community_logo, \
require_community_owner
and context from other files:
# Path: invenio_communities/views/decorators.py
# def pass_community(f):
# """Decorate to retrieve the community record using the community service.
# """
# @wraps(f)
# def view(**kwargs):
# pid_value = kwargs['pid_value']
# community = service().read(
# id_=pid_value, identity=g.identity
# )
# kwargs['community'] = community
# return f(**kwargs)
# return view
#
# def pass_community_logo(f):
# """Decorate a view to pass a community logo using the files service."""
# @wraps(f)
# def view(**kwargs):
# """."""
# try:
# pid_value = kwargs['pid_value']
# files = service().read_logo(
# id_=pid_value, identity=g.identity
# )
# kwargs['logo'] = files
# except FileNotFoundError:
# kwargs['logo'] = None
#
# return f(**kwargs)
# return view
#
# def require_community_owner(f):
# """Decorate a view to require community owner for accessing the view."""
# @wraps(f)
# def view(**kwargs):
# """."""
# pid_value = kwargs['pid_value']
# community = service().read(
# id_=pid_value, identity=g.identity
# )
# if community.has_permissions_to(["update"])["can_update"]:
# return f(**kwargs)
# else:
# raise PermissionDeniedError()
#
# return view
, which may include functions, classes, or code. Output only the next line. | @pass_community |
Here is a snippet: <|code_start|>
@login_required
def communities_new():
"""Communities creation page."""
return render_template(
"invenio_communities/new.html",
form_config=dict(
access=dict(
visibilty=[
{
'text': 'Public',
'value': 'public',
'icon': 'group',
'helpText': 'Your community is publicly accessible ' \
'and shows up in search results.'
},
{
'text': 'Restricted',
'value': 'restricted',
'icon': 'lock',
'helpText': 'Your community is restricted to users ' \
'with access.'
}
]
),
SITE_UI_URL=current_app.config["SITE_UI_URL"]
),
)
@pass_community
<|code_end|>
. Write the next line using the current file imports:
from flask import current_app, render_template
from flask_login import login_required
from .decorators import pass_community, pass_community_logo, \
require_community_owner
and context from other files:
# Path: invenio_communities/views/decorators.py
# def pass_community(f):
# """Decorate to retrieve the community record using the community service.
# """
# @wraps(f)
# def view(**kwargs):
# pid_value = kwargs['pid_value']
# community = service().read(
# id_=pid_value, identity=g.identity
# )
# kwargs['community'] = community
# return f(**kwargs)
# return view
#
# def pass_community_logo(f):
# """Decorate a view to pass a community logo using the files service."""
# @wraps(f)
# def view(**kwargs):
# """."""
# try:
# pid_value = kwargs['pid_value']
# files = service().read_logo(
# id_=pid_value, identity=g.identity
# )
# kwargs['logo'] = files
# except FileNotFoundError:
# kwargs['logo'] = None
#
# return f(**kwargs)
# return view
#
# def require_community_owner(f):
# """Decorate a view to require community owner for accessing the view."""
# @wraps(f)
# def view(**kwargs):
# """."""
# pid_value = kwargs['pid_value']
# community = service().read(
# id_=pid_value, identity=g.identity
# )
# if community.has_permissions_to(["update"])["can_update"]:
# return f(**kwargs)
# else:
# raise PermissionDeniedError()
#
# return view
, which may include functions, classes, or code. Output only the next line. | @pass_community_logo |
Given snippet: <|code_start|> }
]
),
SITE_UI_URL=current_app.config["SITE_UI_URL"]
),
)
@pass_community
@pass_community_logo
def communities_detail(community=None, logo=None, pid_value=None):
"""Community detail page."""
return render_template(
"invenio_communities/details/index.html",
community=community.to_dict(), # TODO: use serializer
logo=logo.to_dict() if logo else None,
# TODO: inject this from a vocabulary in the community
types={
"organization": "Organization",
"event": "Event",
"topic": "Topic",
"project": "Project"
},
# Pass permissions so we can disable partially UI components
# e.g Settings tab
permissions=community.has_permissions_to(['update']),
active_menu_tab="search"
)
@pass_community
@pass_community_logo
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from flask import current_app, render_template
from flask_login import login_required
from .decorators import pass_community, pass_community_logo, \
require_community_owner
and context:
# Path: invenio_communities/views/decorators.py
# def pass_community(f):
# """Decorate to retrieve the community record using the community service.
# """
# @wraps(f)
# def view(**kwargs):
# pid_value = kwargs['pid_value']
# community = service().read(
# id_=pid_value, identity=g.identity
# )
# kwargs['community'] = community
# return f(**kwargs)
# return view
#
# def pass_community_logo(f):
# """Decorate a view to pass a community logo using the files service."""
# @wraps(f)
# def view(**kwargs):
# """."""
# try:
# pid_value = kwargs['pid_value']
# files = service().read_logo(
# id_=pid_value, identity=g.identity
# )
# kwargs['logo'] = files
# except FileNotFoundError:
# kwargs['logo'] = None
#
# return f(**kwargs)
# return view
#
# def require_community_owner(f):
# """Decorate a view to require community owner for accessing the view."""
# @wraps(f)
# def view(**kwargs):
# """."""
# pid_value = kwargs['pid_value']
# community = service().read(
# id_=pid_value, identity=g.identity
# )
# if community.has_permissions_to(["update"])["can_update"]:
# return f(**kwargs)
# else:
# raise PermissionDeniedError()
#
# return view
which might include code, classes, or functions. Output only the next line. | @require_community_owner |
Predict the next line for this snippet: <|code_start|>"""Example of a record draft API."""
class MockRecord(RecordBase):
"""Example parent record."""
# Configuration
model_cls = MockRecordMetadata
# System fields
schema = ConstantField('$schema', 'local://mocks/mock-v1.0.0.json')
index = IndexField('mocks-mock-v1.0.0', search_alias='mocks')
<|code_end|>
with the help of current file imports:
from invenio_records.systemfields import ConstantField
from invenio_records_resources.records import Record as RecordBase
from invenio_records_resources.records.systemfields import IndexField
from invenio_communities.records.records.systemfields import CommunitiesField
from .models import MockRecordMetadata, MockRecordCommunity
and context from other files:
# Path: invenio_communities/records/records/systemfields/communities/field.py
# class CommunitiesField(SystemField):
# """Communites system field for managing relations to communities."""
#
# def __init__(self, m2m_model_cls, key='communities', context_cls=None,
# manager_cls=None):
# self._m2m_model_cls = m2m_model_cls
# self._context_cls = context_cls or CommunitiesFieldContext
# self._manager_cls = manager_cls or CommunitiesRelationManager
# super().__init__(key=key)
#
# #
# # Life-cycle hooks
# #
# def pre_commit(self, record):
# """Commit the communities field."""
# manager = self.obj(record)
# self.set_dictkey(record, manager.to_dict())
#
# #
# # Helpers
# #
# def obj(self, record):
# """Get or crate the communities manager."""
# # Check cache
# obj = self._get_cache(record)
# if obj is not None:
# return obj
#
# data = self.get_dictkey(record)
# # Create manager
# obj = self._manager_cls(self._m2m_model_cls, record.id, data)
# self._set_cache(record, obj)
# return obj
#
# # Data descriptor methods (i.e. attribute access)
# # __set__() not defined on purpose
# def __get__(self, record, owner=None):
# """Get the persistent identifier."""
# if record is None:
# return self._context_cls(self, owner)
# return self.obj(record)
#
# Path: tests/records/mock_module/models.py
# class MockRecordMetadata(db.Model, RecordMetadataBase):
# """A baisc record."""
#
# __tablename__ = 'mock_metadata'
#
# class MockRecordCommunity(db.Model, CommunityRelationMixin):
# """Relationship between record and community."""
#
# __tablename__ = 'mock_community'
# __record_model__ = MockRecordMetadata
# # __request_model__ = RequestMetadata
, which may contain function names, class names, or code. Output only the next line. | communities = CommunitiesField(MockRecordCommunity) |
Here is a snippet: <|code_start|>"""Example of a record draft API."""
class MockRecord(RecordBase):
"""Example parent record."""
# Configuration
model_cls = MockRecordMetadata
# System fields
schema = ConstantField('$schema', 'local://mocks/mock-v1.0.0.json')
index = IndexField('mocks-mock-v1.0.0', search_alias='mocks')
<|code_end|>
. Write the next line using the current file imports:
from invenio_records.systemfields import ConstantField
from invenio_records_resources.records import Record as RecordBase
from invenio_records_resources.records.systemfields import IndexField
from invenio_communities.records.records.systemfields import CommunitiesField
from .models import MockRecordMetadata, MockRecordCommunity
and context from other files:
# Path: invenio_communities/records/records/systemfields/communities/field.py
# class CommunitiesField(SystemField):
# """Communites system field for managing relations to communities."""
#
# def __init__(self, m2m_model_cls, key='communities', context_cls=None,
# manager_cls=None):
# self._m2m_model_cls = m2m_model_cls
# self._context_cls = context_cls or CommunitiesFieldContext
# self._manager_cls = manager_cls or CommunitiesRelationManager
# super().__init__(key=key)
#
# #
# # Life-cycle hooks
# #
# def pre_commit(self, record):
# """Commit the communities field."""
# manager = self.obj(record)
# self.set_dictkey(record, manager.to_dict())
#
# #
# # Helpers
# #
# def obj(self, record):
# """Get or crate the communities manager."""
# # Check cache
# obj = self._get_cache(record)
# if obj is not None:
# return obj
#
# data = self.get_dictkey(record)
# # Create manager
# obj = self._manager_cls(self._m2m_model_cls, record.id, data)
# self._set_cache(record, obj)
# return obj
#
# # Data descriptor methods (i.e. attribute access)
# # __set__() not defined on purpose
# def __get__(self, record, owner=None):
# """Get the persistent identifier."""
# if record is None:
# return self._context_cls(self, owner)
# return self.obj(record)
#
# Path: tests/records/mock_module/models.py
# class MockRecordMetadata(db.Model, RecordMetadataBase):
# """A baisc record."""
#
# __tablename__ = 'mock_metadata'
#
# class MockRecordCommunity(db.Model, CommunityRelationMixin):
# """Relationship between record and community."""
#
# __tablename__ = 'mock_community'
# __record_model__ = MockRecordMetadata
# # __request_model__ = RequestMetadata
, which may include functions, classes, or code. Output only the next line. | communities = CommunitiesField(MockRecordCommunity) |
Predict the next line for this snippet: <|code_start|># under the terms of the MIT License; see LICENSE file for more details.
"""Command-line tools for demo module."""
@click.group()
def communities():
"""Invenio communities commands."""
@communities.command('demo')
@with_appcontext
def demo():
"""Create 100 fake communities for demo purposes."""
click.secho('Creating demo communities...', fg='green')
faker = Faker()
for _ in range(100):
fake_data = create_fake_community(faker)
create_demo_community.delay(fake_data)
click.secho('Created communities!', fg='green')
@communities.command("rebuild-index")
@with_appcontext
def rebuild_index():
click.secho("Reindexing communities...", fg="green")
<|code_end|>
with the help of current file imports:
import click
from faker import Faker
from flask.cli import with_appcontext
from invenio_access.permissions import system_identity
from invenio_communities.proxies import current_communities
from .fixtures.demo import create_fake_community
from .fixtures.tasks import create_demo_community
and context from other files:
# Path: invenio_communities/proxies.py
#
# Path: invenio_communities/fixtures/demo.py
# def create_fake_community(faker):
# """Create fake communities for demo purposes."""
# data_to_use = {
# "access": {
# "visibility": random.choice(["public", "restricted"]),
# "member_policy": random.choice(["open", "closed"]),
# "record_policy": random.choice(["open", "closed", "restricted"])
# },
# "id": faker.unique.domain_word(),
# "metadata": {
# "title": faker.sentence(nb_words=5, variable_nb_words=True),
# "description": faker.text(max_nb_chars=2000),
# "type": random.choice(
# ["organization", "event", "topic", "project"]),
# "curation_policy": faker.text(max_nb_chars=2000),
# "page": faker.text(max_nb_chars=2000),
# "website": "https://" + faker.domain_name(), # fake.url()
# "organizations": [
# { "name": "CERN" }
# ],
# },
# }
#
# return json.loads(json.dumps(data_to_use))
#
# Path: invenio_communities/fixtures/tasks.py
# @shared_task
# def create_demo_community(data):
# """Create a demo community."""
# service = current_communities.service
# try:
# service.create(data=data, identity=system_identity)
# except PIDAlreadyExists:
# pass
, which may contain function names, class names, or code. Output only the next line. | communities_service = current_communities.service |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2021 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Command-line tools for demo module."""
@click.group()
def communities():
"""Invenio communities commands."""
@communities.command('demo')
@with_appcontext
def demo():
"""Create 100 fake communities for demo purposes."""
click.secho('Creating demo communities...', fg='green')
faker = Faker()
for _ in range(100):
<|code_end|>
with the help of current file imports:
import click
from faker import Faker
from flask.cli import with_appcontext
from invenio_access.permissions import system_identity
from invenio_communities.proxies import current_communities
from .fixtures.demo import create_fake_community
from .fixtures.tasks import create_demo_community
and context from other files:
# Path: invenio_communities/proxies.py
#
# Path: invenio_communities/fixtures/demo.py
# def create_fake_community(faker):
# """Create fake communities for demo purposes."""
# data_to_use = {
# "access": {
# "visibility": random.choice(["public", "restricted"]),
# "member_policy": random.choice(["open", "closed"]),
# "record_policy": random.choice(["open", "closed", "restricted"])
# },
# "id": faker.unique.domain_word(),
# "metadata": {
# "title": faker.sentence(nb_words=5, variable_nb_words=True),
# "description": faker.text(max_nb_chars=2000),
# "type": random.choice(
# ["organization", "event", "topic", "project"]),
# "curation_policy": faker.text(max_nb_chars=2000),
# "page": faker.text(max_nb_chars=2000),
# "website": "https://" + faker.domain_name(), # fake.url()
# "organizations": [
# { "name": "CERN" }
# ],
# },
# }
#
# return json.loads(json.dumps(data_to_use))
#
# Path: invenio_communities/fixtures/tasks.py
# @shared_task
# def create_demo_community(data):
# """Create a demo community."""
# service = current_communities.service
# try:
# service.create(data=data, identity=system_identity)
# except PIDAlreadyExists:
# pass
, which may contain function names, class names, or code. Output only the next line. | fake_data = create_fake_community(faker) |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2021 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Command-line tools for demo module."""
@click.group()
def communities():
"""Invenio communities commands."""
@communities.command('demo')
@with_appcontext
def demo():
"""Create 100 fake communities for demo purposes."""
click.secho('Creating demo communities...', fg='green')
faker = Faker()
for _ in range(100):
fake_data = create_fake_community(faker)
<|code_end|>
, predict the immediate next line with the help of imports:
import click
from faker import Faker
from flask.cli import with_appcontext
from invenio_access.permissions import system_identity
from invenio_communities.proxies import current_communities
from .fixtures.demo import create_fake_community
from .fixtures.tasks import create_demo_community
and context (classes, functions, sometimes code) from other files:
# Path: invenio_communities/proxies.py
#
# Path: invenio_communities/fixtures/demo.py
# def create_fake_community(faker):
# """Create fake communities for demo purposes."""
# data_to_use = {
# "access": {
# "visibility": random.choice(["public", "restricted"]),
# "member_policy": random.choice(["open", "closed"]),
# "record_policy": random.choice(["open", "closed", "restricted"])
# },
# "id": faker.unique.domain_word(),
# "metadata": {
# "title": faker.sentence(nb_words=5, variable_nb_words=True),
# "description": faker.text(max_nb_chars=2000),
# "type": random.choice(
# ["organization", "event", "topic", "project"]),
# "curation_policy": faker.text(max_nb_chars=2000),
# "page": faker.text(max_nb_chars=2000),
# "website": "https://" + faker.domain_name(), # fake.url()
# "organizations": [
# { "name": "CERN" }
# ],
# },
# }
#
# return json.loads(json.dumps(data_to_use))
#
# Path: invenio_communities/fixtures/tasks.py
# @shared_task
# def create_demo_community(data):
# """Create a demo community."""
# service = current_communities.service
# try:
# service.create(data=data, identity=system_identity)
# except PIDAlreadyExists:
# pass
. Output only the next line. | create_demo_community.delay(fake_data) |
Next line prediction: <|code_start|> """Register blueprint routes on app."""
routes = app.config.get("COMMUNITIES_ROUTES")
blueprint = Blueprint(
"invenio_communities",
__name__,
template_folder="../templates",
static_folder='../static'
)
# control blueprint endpoints registration
if app.config["COMMUNITIES_ENABLED"]:
# Communities URL rules
blueprint.add_url_rule(
routes["frontpage"],
view_func=communities_frontpage,
)
blueprint.add_url_rule(
routes["search"],
view_func=communities_search,
)
blueprint.add_url_rule(
routes["new"],
view_func=communities_new,
)
blueprint.add_url_rule(
routes["details"],
<|code_end|>
. Use current file imports:
(from flask import Blueprint, current_app, render_template
from flask_login import current_user
from flask_menu import current_menu
from invenio_pidstore.errors import PIDDeletedError, PIDDoesNotExistError
from invenio_records_resources.services.errors import PermissionDeniedError
from .communities import communities_detail, communities_frontpage, \
communities_new, communities_search, communities_settings, \
communities_settings_privileges)
and context including class names, function names, or small code snippets from other files:
# Path: invenio_communities/views/communities.py
# @pass_community
# @pass_community_logo
# def communities_detail(community=None, logo=None, pid_value=None):
# """Community detail page."""
# return render_template(
# "invenio_communities/details/index.html",
# community=community.to_dict(), # TODO: use serializer
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="search"
# )
#
# def communities_frontpage():
# """Communities index page."""
# return render_template(
# "invenio_communities/frontpage.html",
# )
#
# @login_required
# def communities_new():
# """Communities creation page."""
# return render_template(
# "invenio_communities/new.html",
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# SITE_UI_URL=current_app.config["SITE_UI_URL"]
# ),
# )
#
# def communities_search():
# """Communities search page."""
# return render_template(
# "invenio_communities/search.html",
# )
#
# @pass_community
# @pass_community_logo
# @require_community_owner
# def communities_settings(community=None, logo=None, pid_value=None):
# """Community settings/profile page."""
# return render_template(
# "invenio_communities/details/settings/profile.html",
# community=community.to_dict(), # TODO: use serializer,
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings"
# )
#
# @require_community_owner
# @pass_community
# @pass_community_logo
# def communities_settings_privileges(community=None, logo=None, pid_value=None):
# """Community settings/privileges page."""
# return render_template(
# "invenio_communities/details/settings/privileges.html",
# community=community.to_dict(), # TODO: use serializer,
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# ),
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings",
# logo=logo.to_dict() if logo else None
# )
. Output only the next line. | view_func=communities_detail, |
Given snippet: <|code_start|> return render_template("invenio_communities/tombstone.html"), 410
def record_permission_denied_error(error):
"""Handle permission denier error on record views."""
if not current_user.is_authenticated:
# trigger the flask-login unauthorized handler
return current_app.login_manager.unauthorized()
return render_template(current_app.config['THEME_403_TEMPLATE']), 403
#
# Registration
#
def create_ui_blueprint(app):
"""Register blueprint routes on app."""
routes = app.config.get("COMMUNITIES_ROUTES")
blueprint = Blueprint(
"invenio_communities",
__name__,
template_folder="../templates",
static_folder='../static'
)
# control blueprint endpoints registration
if app.config["COMMUNITIES_ENABLED"]:
# Communities URL rules
blueprint.add_url_rule(
routes["frontpage"],
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from flask import Blueprint, current_app, render_template
from flask_login import current_user
from flask_menu import current_menu
from invenio_pidstore.errors import PIDDeletedError, PIDDoesNotExistError
from invenio_records_resources.services.errors import PermissionDeniedError
from .communities import communities_detail, communities_frontpage, \
communities_new, communities_search, communities_settings, \
communities_settings_privileges
and context:
# Path: invenio_communities/views/communities.py
# @pass_community
# @pass_community_logo
# def communities_detail(community=None, logo=None, pid_value=None):
# """Community detail page."""
# return render_template(
# "invenio_communities/details/index.html",
# community=community.to_dict(), # TODO: use serializer
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="search"
# )
#
# def communities_frontpage():
# """Communities index page."""
# return render_template(
# "invenio_communities/frontpage.html",
# )
#
# @login_required
# def communities_new():
# """Communities creation page."""
# return render_template(
# "invenio_communities/new.html",
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# SITE_UI_URL=current_app.config["SITE_UI_URL"]
# ),
# )
#
# def communities_search():
# """Communities search page."""
# return render_template(
# "invenio_communities/search.html",
# )
#
# @pass_community
# @pass_community_logo
# @require_community_owner
# def communities_settings(community=None, logo=None, pid_value=None):
# """Community settings/profile page."""
# return render_template(
# "invenio_communities/details/settings/profile.html",
# community=community.to_dict(), # TODO: use serializer,
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings"
# )
#
# @require_community_owner
# @pass_community
# @pass_community_logo
# def communities_settings_privileges(community=None, logo=None, pid_value=None):
# """Community settings/privileges page."""
# return render_template(
# "invenio_communities/details/settings/privileges.html",
# community=community.to_dict(), # TODO: use serializer,
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# ),
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings",
# logo=logo.to_dict() if logo else None
# )
which might include code, classes, or functions. Output only the next line. | view_func=communities_frontpage, |
Given the code snippet: <|code_start|>
#
# Registration
#
def create_ui_blueprint(app):
"""Register blueprint routes on app."""
routes = app.config.get("COMMUNITIES_ROUTES")
blueprint = Blueprint(
"invenio_communities",
__name__,
template_folder="../templates",
static_folder='../static'
)
# control blueprint endpoints registration
if app.config["COMMUNITIES_ENABLED"]:
# Communities URL rules
blueprint.add_url_rule(
routes["frontpage"],
view_func=communities_frontpage,
)
blueprint.add_url_rule(
routes["search"],
view_func=communities_search,
)
blueprint.add_url_rule(
routes["new"],
<|code_end|>
, generate the next line using the imports in this file:
from flask import Blueprint, current_app, render_template
from flask_login import current_user
from flask_menu import current_menu
from invenio_pidstore.errors import PIDDeletedError, PIDDoesNotExistError
from invenio_records_resources.services.errors import PermissionDeniedError
from .communities import communities_detail, communities_frontpage, \
communities_new, communities_search, communities_settings, \
communities_settings_privileges
and context (functions, classes, or occasionally code) from other files:
# Path: invenio_communities/views/communities.py
# @pass_community
# @pass_community_logo
# def communities_detail(community=None, logo=None, pid_value=None):
# """Community detail page."""
# return render_template(
# "invenio_communities/details/index.html",
# community=community.to_dict(), # TODO: use serializer
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="search"
# )
#
# def communities_frontpage():
# """Communities index page."""
# return render_template(
# "invenio_communities/frontpage.html",
# )
#
# @login_required
# def communities_new():
# """Communities creation page."""
# return render_template(
# "invenio_communities/new.html",
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# SITE_UI_URL=current_app.config["SITE_UI_URL"]
# ),
# )
#
# def communities_search():
# """Communities search page."""
# return render_template(
# "invenio_communities/search.html",
# )
#
# @pass_community
# @pass_community_logo
# @require_community_owner
# def communities_settings(community=None, logo=None, pid_value=None):
# """Community settings/profile page."""
# return render_template(
# "invenio_communities/details/settings/profile.html",
# community=community.to_dict(), # TODO: use serializer,
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings"
# )
#
# @require_community_owner
# @pass_community
# @pass_community_logo
# def communities_settings_privileges(community=None, logo=None, pid_value=None):
# """Community settings/privileges page."""
# return render_template(
# "invenio_communities/details/settings/privileges.html",
# community=community.to_dict(), # TODO: use serializer,
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# ),
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings",
# logo=logo.to_dict() if logo else None
# )
. Output only the next line. | view_func=communities_new, |
Given the following code snippet before the placeholder: <|code_start|> if not current_user.is_authenticated:
# trigger the flask-login unauthorized handler
return current_app.login_manager.unauthorized()
return render_template(current_app.config['THEME_403_TEMPLATE']), 403
#
# Registration
#
def create_ui_blueprint(app):
"""Register blueprint routes on app."""
routes = app.config.get("COMMUNITIES_ROUTES")
blueprint = Blueprint(
"invenio_communities",
__name__,
template_folder="../templates",
static_folder='../static'
)
# control blueprint endpoints registration
if app.config["COMMUNITIES_ENABLED"]:
# Communities URL rules
blueprint.add_url_rule(
routes["frontpage"],
view_func=communities_frontpage,
)
blueprint.add_url_rule(
routes["search"],
<|code_end|>
, predict the next line using imports from the current file:
from flask import Blueprint, current_app, render_template
from flask_login import current_user
from flask_menu import current_menu
from invenio_pidstore.errors import PIDDeletedError, PIDDoesNotExistError
from invenio_records_resources.services.errors import PermissionDeniedError
from .communities import communities_detail, communities_frontpage, \
communities_new, communities_search, communities_settings, \
communities_settings_privileges
and context including class names, function names, and sometimes code from other files:
# Path: invenio_communities/views/communities.py
# @pass_community
# @pass_community_logo
# def communities_detail(community=None, logo=None, pid_value=None):
# """Community detail page."""
# return render_template(
# "invenio_communities/details/index.html",
# community=community.to_dict(), # TODO: use serializer
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="search"
# )
#
# def communities_frontpage():
# """Communities index page."""
# return render_template(
# "invenio_communities/frontpage.html",
# )
#
# @login_required
# def communities_new():
# """Communities creation page."""
# return render_template(
# "invenio_communities/new.html",
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# SITE_UI_URL=current_app.config["SITE_UI_URL"]
# ),
# )
#
# def communities_search():
# """Communities search page."""
# return render_template(
# "invenio_communities/search.html",
# )
#
# @pass_community
# @pass_community_logo
# @require_community_owner
# def communities_settings(community=None, logo=None, pid_value=None):
# """Community settings/profile page."""
# return render_template(
# "invenio_communities/details/settings/profile.html",
# community=community.to_dict(), # TODO: use serializer,
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings"
# )
#
# @require_community_owner
# @pass_community
# @pass_community_logo
# def communities_settings_privileges(community=None, logo=None, pid_value=None):
# """Community settings/privileges page."""
# return render_template(
# "invenio_communities/details/settings/privileges.html",
# community=community.to_dict(), # TODO: use serializer,
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# ),
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings",
# logo=logo.to_dict() if logo else None
# )
. Output only the next line. | view_func=communities_search, |
Predict the next line after this snippet: <|code_start|> template_folder="../templates",
static_folder='../static'
)
# control blueprint endpoints registration
if app.config["COMMUNITIES_ENABLED"]:
# Communities URL rules
blueprint.add_url_rule(
routes["frontpage"],
view_func=communities_frontpage,
)
blueprint.add_url_rule(
routes["search"],
view_func=communities_search,
)
blueprint.add_url_rule(
routes["new"],
view_func=communities_new,
)
blueprint.add_url_rule(
routes["details"],
view_func=communities_detail,
)
# Settings tab routes
blueprint.add_url_rule(
routes["settings"],
<|code_end|>
using the current file's imports:
from flask import Blueprint, current_app, render_template
from flask_login import current_user
from flask_menu import current_menu
from invenio_pidstore.errors import PIDDeletedError, PIDDoesNotExistError
from invenio_records_resources.services.errors import PermissionDeniedError
from .communities import communities_detail, communities_frontpage, \
communities_new, communities_search, communities_settings, \
communities_settings_privileges
and any relevant context from other files:
# Path: invenio_communities/views/communities.py
# @pass_community
# @pass_community_logo
# def communities_detail(community=None, logo=None, pid_value=None):
# """Community detail page."""
# return render_template(
# "invenio_communities/details/index.html",
# community=community.to_dict(), # TODO: use serializer
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="search"
# )
#
# def communities_frontpage():
# """Communities index page."""
# return render_template(
# "invenio_communities/frontpage.html",
# )
#
# @login_required
# def communities_new():
# """Communities creation page."""
# return render_template(
# "invenio_communities/new.html",
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# SITE_UI_URL=current_app.config["SITE_UI_URL"]
# ),
# )
#
# def communities_search():
# """Communities search page."""
# return render_template(
# "invenio_communities/search.html",
# )
#
# @pass_community
# @pass_community_logo
# @require_community_owner
# def communities_settings(community=None, logo=None, pid_value=None):
# """Community settings/profile page."""
# return render_template(
# "invenio_communities/details/settings/profile.html",
# community=community.to_dict(), # TODO: use serializer,
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings"
# )
#
# @require_community_owner
# @pass_community
# @pass_community_logo
# def communities_settings_privileges(community=None, logo=None, pid_value=None):
# """Community settings/privileges page."""
# return render_template(
# "invenio_communities/details/settings/privileges.html",
# community=community.to_dict(), # TODO: use serializer,
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# ),
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings",
# logo=logo.to_dict() if logo else None
# )
. Output only the next line. | view_func=communities_settings, |
Given the following code snippet before the placeholder: <|code_start|> if app.config["COMMUNITIES_ENABLED"]:
# Communities URL rules
blueprint.add_url_rule(
routes["frontpage"],
view_func=communities_frontpage,
)
blueprint.add_url_rule(
routes["search"],
view_func=communities_search,
)
blueprint.add_url_rule(
routes["new"],
view_func=communities_new,
)
blueprint.add_url_rule(
routes["details"],
view_func=communities_detail,
)
# Settings tab routes
blueprint.add_url_rule(
routes["settings"],
view_func=communities_settings,
)
blueprint.add_url_rule(
routes["settings_privileges"],
<|code_end|>
, predict the next line using imports from the current file:
from flask import Blueprint, current_app, render_template
from flask_login import current_user
from flask_menu import current_menu
from invenio_pidstore.errors import PIDDeletedError, PIDDoesNotExistError
from invenio_records_resources.services.errors import PermissionDeniedError
from .communities import communities_detail, communities_frontpage, \
communities_new, communities_search, communities_settings, \
communities_settings_privileges
and context including class names, function names, and sometimes code from other files:
# Path: invenio_communities/views/communities.py
# @pass_community
# @pass_community_logo
# def communities_detail(community=None, logo=None, pid_value=None):
# """Community detail page."""
# return render_template(
# "invenio_communities/details/index.html",
# community=community.to_dict(), # TODO: use serializer
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="search"
# )
#
# def communities_frontpage():
# """Communities index page."""
# return render_template(
# "invenio_communities/frontpage.html",
# )
#
# @login_required
# def communities_new():
# """Communities creation page."""
# return render_template(
# "invenio_communities/new.html",
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# SITE_UI_URL=current_app.config["SITE_UI_URL"]
# ),
# )
#
# def communities_search():
# """Communities search page."""
# return render_template(
# "invenio_communities/search.html",
# )
#
# @pass_community
# @pass_community_logo
# @require_community_owner
# def communities_settings(community=None, logo=None, pid_value=None):
# """Community settings/profile page."""
# return render_template(
# "invenio_communities/details/settings/profile.html",
# community=community.to_dict(), # TODO: use serializer,
# logo=logo.to_dict() if logo else None,
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings"
# )
#
# @require_community_owner
# @pass_community
# @pass_community_logo
# def communities_settings_privileges(community=None, logo=None, pid_value=None):
# """Community settings/privileges page."""
# return render_template(
# "invenio_communities/details/settings/privileges.html",
# community=community.to_dict(), # TODO: use serializer,
# form_config=dict(
# access=dict(
# visibilty=[
# {
# 'text': 'Public',
# 'value': 'public',
# 'icon': 'group',
# 'helpText': 'Your community is publicly accessible ' \
# 'and shows up in search results.'
# },
# {
# 'text': 'Restricted',
# 'value': 'restricted',
# 'icon': 'lock',
# 'helpText': 'Your community is restricted to users ' \
# 'with access.'
# }
# ]
# ),
# ),
# # TODO: inject this from a vocabulary in the community
# types={
# "organization": "Organization",
# "event": "Event",
# "topic": "Topic",
# "project": "Project"
# },
# # Pass permissions so we can disable partially UI components
# # e.g Settings tab
# permissions=community.has_permissions_to(['update']),
# active_menu_tab="settings",
# logo=logo.to_dict() if logo else None
# )
. Output only the next line. | view_func=communities_settings_privileges, |
Using the snippet: <|code_start|># Copyright (C) 2022 Northwestern University.
#
# Invenio-Communities is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Invitation request types."""
# Actions
class AcceptAction(RequestAction):
"""Accept action."""
status_from = ['open']
status_to = 'accepted'
def can_execute(self, identity):
"""Check if the accept action can be executed."""
# TODO
return True
def execute(self, identity, uow):
"""Accept entity into community."""
member_data = {
**self.request.receiver.reference_dict,
**self.request.topic.reference_dict,
"role": self.request["payload"]["role"]
}
<|code_end|>
, determine the next line of code. You have imports:
from flask_babelex import lazy_gettext as _
from invenio_requests.customizations import RequestAction, RequestState, \
BaseRequestType
from ...proxies import current_communities
from .schemas import MemberInvitationPayloadSchema
and context (class names, function names, or code) available:
# Path: invenio_communities/proxies.py
#
# Path: invenio_communities/invitations/services/schemas.py
# class MemberInvitationPayloadSchema(RequestSchema):
# """Community Member Invitation Schema."""
#
# role = fields.String(required=True)
#
# @validates("role")
# def validate_role(self, value):
# """Validate role."""
# return validate.OneOf(ROLE_TYPES)
. Output only the next line. | current_communities.service.members.create( |
Using the snippet: <|code_start|>
super().execute(identity, uow)
# Request types
class CommunityMemberInvitation(BaseRequestType):
"""Community member invitation request type."""
type_id = 'community-member-invitation'
name = _('Community Member Invitation')
available_statuses = {
"open": RequestState.OPEN,
"cancelled": RequestState.CLOSED,
"declined": RequestState.CLOSED,
"accepted": RequestState.CLOSED,
"expired": RequestState.CLOSED,
}
default_status = "open"
available_actions = {
"accept": AcceptAction,
# "cancel": CancelAction,
# "decline": DeclineAction,
# "expire": ExpireAction,
}
creator_can_be_none = False
topic_can_be_none = False
allowed_creator_ref_types = ["community"]
allowed_receiver_ref_types = ["user"]
allowed_topic_ref_types = ["community"]
<|code_end|>
, determine the next line of code. You have imports:
from flask_babelex import lazy_gettext as _
from invenio_requests.customizations import RequestAction, RequestState, \
BaseRequestType
from ...proxies import current_communities
from .schemas import MemberInvitationPayloadSchema
and context (class names, function names, or code) available:
# Path: invenio_communities/proxies.py
#
# Path: invenio_communities/invitations/services/schemas.py
# class MemberInvitationPayloadSchema(RequestSchema):
# """Community Member Invitation Schema."""
#
# role = fields.String(required=True)
#
# @validates("role")
# def validate_role(self, value):
# """Validate role."""
# return validate.OneOf(ROLE_TYPES)
. Output only the next line. | payload_schema = MemberInvitationPayloadSchema().fields |
Using the snippet: <|code_start|> ]
error_messages = set([item['messages'][0] for item in response.json['errors']])
assert expected == len(set(error_messages_list).intersection(error_messages))
def test_simple_flow(
app, client_with_login, location, minimal_community, headers,
es_clear
):
"""Test a simple REST API flow."""
client = client_with_login
# Create a community
res = client.post(
'/communities', headers=headers,
json=minimal_community)
assert res.status_code == 201
_assert_single_item_response(res)
created_community = res.json
id_ = created_community["id"]
# Read the community
res = client.get(f'/communities/{id_}', headers=headers)
assert res.status_code == 200
assert res.json['metadata'] == \
created_community['metadata']
read_community = res.json
<|code_end|>
, determine the next line of code. You have imports:
import copy
from io import BytesIO
from invenio_communities.communities.records.api import Community
and context (class names, function names, or code) available:
# Path: invenio_communities/communities/records/api.py
# class Community(Record):
# """Community API."""
#
# pid = PIDField('id', provider=CommunitiesIdProvider, create=False)
# schema = ConstantField(
# '$schema', 'local://communities/communities-v1.0.0.json')
#
# model_cls = models.CommunityMetadata
#
# index = IndexField(
# "communities-communities-v1.0.0",
# search_alias="communities"
# )
#
# access = CommunityAccessField()
#
# bucket_id = ModelField(dump=False)
# bucket = ModelField(dump=False)
# files = FilesField(
# store=False,
# file_cls=CommunityFile,
# # Don't delete, we'll manage in the service
# delete=False,
# )
. Output only the next line. | Community.index.refresh() |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# Copyright (C) 2022 Northwestern University.
#
# Invenio-Communities is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Member Model."""
class MemberModel(db.Model, RecordMetadataBase):
"""Member model."""
__tablename__ = "communities_members"
id = db.Column(UUIDType, primary_key=True, default=uuid.uuid4)
community_id = db.Column(
UUIDType,
<|code_end|>
, predict the immediate next line with the help of imports:
import uuid
from invenio_db import db
from invenio_accounts.models import User
from invenio_records.models import RecordMetadataBase
from sqlalchemy_utils.types import UUIDType
from ...communities.records.models import CommunityMetadata
and context (classes, functions, sometimes code) from other files:
# Path: invenio_communities/communities/records/models.py
# class CommunityMetadata(db.Model, RecordMetadataBase):
# """Represent a community."""
#
# __tablename__ = 'communities_metadata'
#
# # Enables SQLAlchemy-Continuum versioning
# __versioned__ = {}
#
# bucket_id = db.Column(UUIDType, db.ForeignKey(Bucket.id))
# bucket = db.relationship(Bucket)
. Output only the next line. | db.ForeignKey(CommunityMetadata.id, ondelete="CASCADE"), |
Given the following code snippet before the placeholder: <|code_start|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015-2018 Digi International Inc.
if __name__ == '__main__':
dc = get_authenticated_dc()
dc.filedata.write_file("/~/test_dir/", "test_file.txt", six.b("Helllo, world!"), "text/plain")
dc.filedata.write_file("/~/test_dir/", "test_file2.txt", six.b("Hello, again!"))
for dirpath, directories, files in dc.filedata.walk("/"):
for fd_file in files:
print(fd_file)
<|code_end|>
, predict the next line using imports from the current file:
from devicecloud.examples.example_helpers import get_authenticated_dc
from devicecloud.filedata import fd_path
import six
and context including class names, function names, and sometimes code from other files:
# Path: devicecloud/examples/example_helpers.py
# def get_authenticated_dc():
# while True:
# base_url = os.environ.get('DC_BASE_URL', 'https://devicecloud.digi.com')
#
# username = os.environ.get('DC_USERNAME', None)
# if not username:
# username = input("username: ")
#
# password = os.environ.get('DC_PASSWORD', None)
# if not password:
# password = getpass("password: ")
#
# dc = DeviceCloud(username, password, base_url=base_url)
# if dc.has_valid_credentials():
# print("Credentials accepted!")
# return dc
# else:
# print("Invalid username or password provided, try again")
#
# Path: devicecloud/filedata.py
# class FileDataAPI(APIBase):
# class FileDataObject(object):
# class FileDataDirectory(FileDataObject):
# class FileDataFile(FileDataObject):
# def get_filedata(self, condition=None, page_size=1000):
# def write_file(self, path, name, data, content_type=None, archive=False,
# raw=False):
# def delete_file(self, path):
# def walk(self, root="~/"):
# def from_json(cls, fdapi, json_data):
# def __init__(self, fdapi, json_data):
# def delete(self):
# def get_data(self):
# def get_type(self):
# def get_last_modified_date(self):
# def get_content_type(self):
# def get_customer_id(self):
# def get_created_date(self):
# def get_name(self):
# def get_path(self):
# def get_full_path(self):
# def get_size(self):
# def from_json(cls, fdapi, json_data):
# def __init__(self, fdapi, data):
# def __repr__(self):
# def walk(self):
# def write_file(self, *args, **kwargs):
# def from_json(cls, fdapi, json_data):
# def __init__(self, fdapi, json_data):
# def __repr__(self):
. Output only the next line. | for fd in dc.filedata.get_filedata(fd_path == "~/"): |
Predict the next line after this snippet: <|code_start|>
PUT_FILE_DATA_COMMAND = """\
<commands>\
<put_file {offset}path="{path}" truncate="{truncate}">\
<data>{data}</data>\
</put_file>\
</commands>\
"""
PUT_FILE_FILE_COMMAND = """\
<commands>\
<put_file {offset}path="{path}" truncate="{truncate}">\
<file>{server_file}</file>\
</put_file>\
</commands>\
"""
DELETE_FILE_COMMAND = """\
<commands>\
<rm path="{path}" />\
</commands>\
"""
class TestFileInfo(unittest.TestCase):
def setUp(self):
self.fss_api = mock.Mock()
self.dev_id = '00000000-00000000-18A905FF-FF2F1BBD'
def test_eq_not_eq(self):
<|code_end|>
using the current file's imports:
import base64
import unittest
import mock
import six
from xml.etree import ElementTree as ET
from devicecloud.file_system_service import FileInfo, DirectoryInfo, FileSystemServiceException, \
_parse_command_response, ResponseParseError, \
ErrorInfo, LsInfo, _parse_error_tree, LsCommand, GetCommand, PutCommand, DeleteCommand, \
FileSystemServiceCommandBlock
from devicecloud.sci import AllTarget
from devicecloud.test.unit.test_utilities import HttpTestBase
and any relevant context from other files:
# Path: devicecloud/file_system_service.py
# class FileSystemServiceException(Exception):
# class ResponseParseError(FileSystemServiceException):
# class ErrorInfo(object):
# class FileInfo(object):
# class DirectoryInfo(object):
# class FileSystemServiceCommandBlock(object):
# class FileSystemServiceCommandABC(object):
# class LsCommand(FileSystemServiceCommandABC):
# class GetCommand(FileSystemServiceCommandABC):
# class PutCommand(FileSystemServiceCommandABC):
# class DeleteCommand(FileSystemServiceCommandABC):
# class FileSystemServiceAPI(SCIAPIBase):
# def _parse_command_response(response):
# def _parse_error_tree(error):
# def __init__(self, errno, message):
# def __str__(self):
# def __init__(self, fssapi, device_id, path, last_modified, size, hash, hash_type):
# def get_data(self):
# def delete(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self, fssapi, device_id, path, last_modified):
# def list_contents(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self):
# def add_command(self, command):
# def get_command_string(self):
# def get_etree(self):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, hash='any'):
# def get_etree(self):
# def parse_response(cls, response, device_id=None, fssapi=None, **kwargs):
# def __init__(self, path, offset=None, length=None):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, file_data=None, server_file=None, offset=None, truncate=False):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def send_command_block(self, target, command_block):
# def list_files(self, target, path, hash='any'):
# def get_file(self, target, path, offset=None, length=None):
# def put_file(self, target, path, file_data=None, server_file=None, offset=None, truncate=False):
# def delete_file(self, target, path):
# def get_modified_items(self, target, path, last_modified_cutoff):
# def exists(self, target, path, path_sep="/"):
# FILE_SYSTEM_COMMANDS = [LsCommand, GetCommand, PutCommand, DeleteCommand]
#
# Path: devicecloud/sci.py
# class AllTarget(TargetABC):
# """Target all devices"""
#
# def __init__(self):
# pass
#
# def to_xml(self):
# return '<device id="all"/>'
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
. Output only the next line. | file1 = FileInfo(self.fss_api, self.dev_id, '/a/path/file1.txt', 1436276773, 7989, |
Continue the code snippet: <|code_start|> et = getcommand.get_etree()
self.assertEqual(et.tag, 'get_file')
self.assertEqual('/a/path/here', et.get('path'))
self.assertEqual('5', et.get('offset', None))
self.assertEqual('10', et.get('length', None))
self.assertEqual(0, len(list(et)))
self.assertEqual(None, et.text)
def test_parse_empty(self):
data = GetCommand.parse_response(ET.fromstring(GET_FILE_BLOCK.format(data='')))
self.assertEqual(six.b(""), data)
def test_parse(self):
data_str = base64.b64encode(six.b("File Data")).decode('ascii')
data = GetCommand.parse_response(ET.fromstring(GET_FILE_BLOCK.format(data=data_str)))
self.assertEqual(six.b("File Data"), data)
def test_parse_error(self):
errinfo = GetCommand.parse_response(
ET.fromstring(ERROR_BLOCK.format(command='get_file', errno=1, errtext="error text")))
self.assertEqual(errinfo.errno, 1)
self.assertEqual(errinfo.message, "error text")
def test_parse_wrong_response(self):
self.assertRaises(ResponseParseError, GetCommand.parse_response, ET.fromstring('<rm />'))
class TestPutCommand(unittest.TestCase):
def test_init_defaults(self):
<|code_end|>
. Use current file imports:
import base64
import unittest
import mock
import six
from xml.etree import ElementTree as ET
from devicecloud.file_system_service import FileInfo, DirectoryInfo, FileSystemServiceException, \
_parse_command_response, ResponseParseError, \
ErrorInfo, LsInfo, _parse_error_tree, LsCommand, GetCommand, PutCommand, DeleteCommand, \
FileSystemServiceCommandBlock
from devicecloud.sci import AllTarget
from devicecloud.test.unit.test_utilities import HttpTestBase
and context (classes, functions, or code) from other files:
# Path: devicecloud/file_system_service.py
# class FileSystemServiceException(Exception):
# class ResponseParseError(FileSystemServiceException):
# class ErrorInfo(object):
# class FileInfo(object):
# class DirectoryInfo(object):
# class FileSystemServiceCommandBlock(object):
# class FileSystemServiceCommandABC(object):
# class LsCommand(FileSystemServiceCommandABC):
# class GetCommand(FileSystemServiceCommandABC):
# class PutCommand(FileSystemServiceCommandABC):
# class DeleteCommand(FileSystemServiceCommandABC):
# class FileSystemServiceAPI(SCIAPIBase):
# def _parse_command_response(response):
# def _parse_error_tree(error):
# def __init__(self, errno, message):
# def __str__(self):
# def __init__(self, fssapi, device_id, path, last_modified, size, hash, hash_type):
# def get_data(self):
# def delete(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self, fssapi, device_id, path, last_modified):
# def list_contents(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self):
# def add_command(self, command):
# def get_command_string(self):
# def get_etree(self):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, hash='any'):
# def get_etree(self):
# def parse_response(cls, response, device_id=None, fssapi=None, **kwargs):
# def __init__(self, path, offset=None, length=None):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, file_data=None, server_file=None, offset=None, truncate=False):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def send_command_block(self, target, command_block):
# def list_files(self, target, path, hash='any'):
# def get_file(self, target, path, offset=None, length=None):
# def put_file(self, target, path, file_data=None, server_file=None, offset=None, truncate=False):
# def delete_file(self, target, path):
# def get_modified_items(self, target, path, last_modified_cutoff):
# def exists(self, target, path, path_sep="/"):
# FILE_SYSTEM_COMMANDS = [LsCommand, GetCommand, PutCommand, DeleteCommand]
#
# Path: devicecloud/sci.py
# class AllTarget(TargetABC):
# """Target all devices"""
#
# def __init__(self):
# pass
#
# def to_xml(self):
# return '<device id="all"/>'
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
. Output only the next line. | putcommand = PutCommand(path='/a/path/here', file_data=six.b("some file data")) |
Continue the code snippet: <|code_start|> self.assertEqual('/a/path/here', et.get('path'))
self.assertEqual('5', et.get('offset', None))
self.assertEqual('true', et.get('truncate'))
self.assertEqual(1, len(list(et)))
data = et.find('./data')
self.assertEqual(base64.b64encode(six.b("some file data")), six.b(data.text))
def test_put_command_no_data(self):
self.assertRaises(FileSystemServiceException, PutCommand, path='/a/path/here')
def test_put_command_both_data(self):
self.assertRaises(FileSystemServiceException, PutCommand, path='/a/path/here',
file_data=six.b("some file data"), server_file='/a/file/on/server')
def test_parse(self):
self.assertIsNone(PutCommand.parse_response(ET.fromstring('<put_file />')))
def test_parse_error(self):
errinfo = PutCommand.parse_response(
ET.fromstring(ERROR_BLOCK.format(command='put_file', errno=1, errtext="error text")))
self.assertEqual(errinfo.errno, 1)
self.assertEqual(errinfo.message, "error text")
def test_parse_wrong_response(self):
self.assertRaises(ResponseParseError, PutCommand.parse_response, ET.fromstring('<rm />'))
class TestDeleteCommand(unittest.TestCase):
def test_init(self):
<|code_end|>
. Use current file imports:
import base64
import unittest
import mock
import six
from xml.etree import ElementTree as ET
from devicecloud.file_system_service import FileInfo, DirectoryInfo, FileSystemServiceException, \
_parse_command_response, ResponseParseError, \
ErrorInfo, LsInfo, _parse_error_tree, LsCommand, GetCommand, PutCommand, DeleteCommand, \
FileSystemServiceCommandBlock
from devicecloud.sci import AllTarget
from devicecloud.test.unit.test_utilities import HttpTestBase
and context (classes, functions, or code) from other files:
# Path: devicecloud/file_system_service.py
# class FileSystemServiceException(Exception):
# class ResponseParseError(FileSystemServiceException):
# class ErrorInfo(object):
# class FileInfo(object):
# class DirectoryInfo(object):
# class FileSystemServiceCommandBlock(object):
# class FileSystemServiceCommandABC(object):
# class LsCommand(FileSystemServiceCommandABC):
# class GetCommand(FileSystemServiceCommandABC):
# class PutCommand(FileSystemServiceCommandABC):
# class DeleteCommand(FileSystemServiceCommandABC):
# class FileSystemServiceAPI(SCIAPIBase):
# def _parse_command_response(response):
# def _parse_error_tree(error):
# def __init__(self, errno, message):
# def __str__(self):
# def __init__(self, fssapi, device_id, path, last_modified, size, hash, hash_type):
# def get_data(self):
# def delete(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self, fssapi, device_id, path, last_modified):
# def list_contents(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self):
# def add_command(self, command):
# def get_command_string(self):
# def get_etree(self):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, hash='any'):
# def get_etree(self):
# def parse_response(cls, response, device_id=None, fssapi=None, **kwargs):
# def __init__(self, path, offset=None, length=None):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, file_data=None, server_file=None, offset=None, truncate=False):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def send_command_block(self, target, command_block):
# def list_files(self, target, path, hash='any'):
# def get_file(self, target, path, offset=None, length=None):
# def put_file(self, target, path, file_data=None, server_file=None, offset=None, truncate=False):
# def delete_file(self, target, path):
# def get_modified_items(self, target, path, last_modified_cutoff):
# def exists(self, target, path, path_sep="/"):
# FILE_SYSTEM_COMMANDS = [LsCommand, GetCommand, PutCommand, DeleteCommand]
#
# Path: devicecloud/sci.py
# class AllTarget(TargetABC):
# """Target all devices"""
#
# def __init__(self):
# pass
#
# def to_xml(self):
# return '<device id="all"/>'
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
. Output only the next line. | delcommand = DeleteCommand(path='/a/path/here') |
Based on the snippet: <|code_start|>
def test_parse_wrong_response(self):
self.assertRaises(ResponseParseError, PutCommand.parse_response, ET.fromstring('<rm />'))
class TestDeleteCommand(unittest.TestCase):
def test_init(self):
delcommand = DeleteCommand(path='/a/path/here')
et = delcommand.get_etree()
self.assertEqual(et.tag, 'rm')
self.assertEqual('/a/path/here', et.get('path'))
self.assertEqual(0, len(list(et)))
self.assertEqual(None, et.text)
def test_parse(self):
self.assertIsNone(DeleteCommand.parse_response(ET.fromstring('<rm />')))
def test_parse_error(self):
errinfo = DeleteCommand.parse_response(
ET.fromstring(ERROR_BLOCK.format(command='rm', errno=1, errtext="error text")))
self.assertEqual(errinfo.errno, 1)
self.assertEqual(errinfo.message, "error text")
def test_parse_wrong_response(self):
self.assertRaises(ResponseParseError, DeleteCommand.parse_response, ET.fromstring('<put_file />'))
class TestCommandBlock(unittest.TestCase):
def test_init(self):
<|code_end|>
, predict the immediate next line with the help of imports:
import base64
import unittest
import mock
import six
from xml.etree import ElementTree as ET
from devicecloud.file_system_service import FileInfo, DirectoryInfo, FileSystemServiceException, \
_parse_command_response, ResponseParseError, \
ErrorInfo, LsInfo, _parse_error_tree, LsCommand, GetCommand, PutCommand, DeleteCommand, \
FileSystemServiceCommandBlock
from devicecloud.sci import AllTarget
from devicecloud.test.unit.test_utilities import HttpTestBase
and context (classes, functions, sometimes code) from other files:
# Path: devicecloud/file_system_service.py
# class FileSystemServiceException(Exception):
# class ResponseParseError(FileSystemServiceException):
# class ErrorInfo(object):
# class FileInfo(object):
# class DirectoryInfo(object):
# class FileSystemServiceCommandBlock(object):
# class FileSystemServiceCommandABC(object):
# class LsCommand(FileSystemServiceCommandABC):
# class GetCommand(FileSystemServiceCommandABC):
# class PutCommand(FileSystemServiceCommandABC):
# class DeleteCommand(FileSystemServiceCommandABC):
# class FileSystemServiceAPI(SCIAPIBase):
# def _parse_command_response(response):
# def _parse_error_tree(error):
# def __init__(self, errno, message):
# def __str__(self):
# def __init__(self, fssapi, device_id, path, last_modified, size, hash, hash_type):
# def get_data(self):
# def delete(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self, fssapi, device_id, path, last_modified):
# def list_contents(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self):
# def add_command(self, command):
# def get_command_string(self):
# def get_etree(self):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, hash='any'):
# def get_etree(self):
# def parse_response(cls, response, device_id=None, fssapi=None, **kwargs):
# def __init__(self, path, offset=None, length=None):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, file_data=None, server_file=None, offset=None, truncate=False):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def send_command_block(self, target, command_block):
# def list_files(self, target, path, hash='any'):
# def get_file(self, target, path, offset=None, length=None):
# def put_file(self, target, path, file_data=None, server_file=None, offset=None, truncate=False):
# def delete_file(self, target, path):
# def get_modified_items(self, target, path, last_modified_cutoff):
# def exists(self, target, path, path_sep="/"):
# FILE_SYSTEM_COMMANDS = [LsCommand, GetCommand, PutCommand, DeleteCommand]
#
# Path: devicecloud/sci.py
# class AllTarget(TargetABC):
# """Target all devices"""
#
# def __init__(self):
# pass
#
# def to_xml(self):
# return '<device id="all"/>'
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
. Output only the next line. | command_block = FileSystemServiceCommandBlock() |
Using the snippet: <|code_start|>class TestCommandBlock(unittest.TestCase):
def test_init(self):
command_block = FileSystemServiceCommandBlock()
et = command_block.get_etree()
self.assertEqual(et.tag, 'commands')
self.assertEqual(0, len(list(et)))
self.assertEqual(0, len(et.keys()))
def test_add_command(self):
command_block = FileSystemServiceCommandBlock()
command_block.add_command(DeleteCommand(path='/a/path'))
et = command_block.get_etree()
self.assertEqual(et.tag, 'commands')
self.assertEqual(1, len(list(et)))
self.assertEqual(0, len(et.keys()))
self.assertIsNotNone(et.find('./{}'.format(DeleteCommand.command_name)))
def test_get_command_string(self):
command_block = FileSystemServiceCommandBlock()
self.assertEqual(six.b('<commands />'), command_block.get_command_string())
command_block.add_command(DeleteCommand(path='/a/path'))
self.assertEqual(six.b('<commands><rm path="/a/path" /></commands>'), command_block.get_command_string())
class TestFileSystemServiceAPI(HttpTestBase):
def setUp(self):
HttpTestBase.setUp(self)
self.fss_api = self.dc.get_fss_api()
self.sci_api = mock.Mock()
self.fss_api._sci_api = self.sci_api
<|code_end|>
, determine the next line of code. You have imports:
import base64
import unittest
import mock
import six
from xml.etree import ElementTree as ET
from devicecloud.file_system_service import FileInfo, DirectoryInfo, FileSystemServiceException, \
_parse_command_response, ResponseParseError, \
ErrorInfo, LsInfo, _parse_error_tree, LsCommand, GetCommand, PutCommand, DeleteCommand, \
FileSystemServiceCommandBlock
from devicecloud.sci import AllTarget
from devicecloud.test.unit.test_utilities import HttpTestBase
and context (class names, function names, or code) available:
# Path: devicecloud/file_system_service.py
# class FileSystemServiceException(Exception):
# class ResponseParseError(FileSystemServiceException):
# class ErrorInfo(object):
# class FileInfo(object):
# class DirectoryInfo(object):
# class FileSystemServiceCommandBlock(object):
# class FileSystemServiceCommandABC(object):
# class LsCommand(FileSystemServiceCommandABC):
# class GetCommand(FileSystemServiceCommandABC):
# class PutCommand(FileSystemServiceCommandABC):
# class DeleteCommand(FileSystemServiceCommandABC):
# class FileSystemServiceAPI(SCIAPIBase):
# def _parse_command_response(response):
# def _parse_error_tree(error):
# def __init__(self, errno, message):
# def __str__(self):
# def __init__(self, fssapi, device_id, path, last_modified, size, hash, hash_type):
# def get_data(self):
# def delete(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self, fssapi, device_id, path, last_modified):
# def list_contents(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self):
# def add_command(self, command):
# def get_command_string(self):
# def get_etree(self):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, hash='any'):
# def get_etree(self):
# def parse_response(cls, response, device_id=None, fssapi=None, **kwargs):
# def __init__(self, path, offset=None, length=None):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, file_data=None, server_file=None, offset=None, truncate=False):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def send_command_block(self, target, command_block):
# def list_files(self, target, path, hash='any'):
# def get_file(self, target, path, offset=None, length=None):
# def put_file(self, target, path, file_data=None, server_file=None, offset=None, truncate=False):
# def delete_file(self, target, path):
# def get_modified_items(self, target, path, last_modified_cutoff):
# def exists(self, target, path, path_sep="/"):
# FILE_SYSTEM_COMMANDS = [LsCommand, GetCommand, PutCommand, DeleteCommand]
#
# Path: devicecloud/sci.py
# class AllTarget(TargetABC):
# """Target all devices"""
#
# def __init__(self):
# pass
#
# def to_xml(self):
# return '<device id="all"/>'
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
. Output only the next line. | self.target = AllTarget() |
Given snippet: <|code_start|> self.assertEqual(errinfo.message, "error text")
def test_parse_wrong_response(self):
self.assertRaises(ResponseParseError, DeleteCommand.parse_response, ET.fromstring('<put_file />'))
class TestCommandBlock(unittest.TestCase):
def test_init(self):
command_block = FileSystemServiceCommandBlock()
et = command_block.get_etree()
self.assertEqual(et.tag, 'commands')
self.assertEqual(0, len(list(et)))
self.assertEqual(0, len(et.keys()))
def test_add_command(self):
command_block = FileSystemServiceCommandBlock()
command_block.add_command(DeleteCommand(path='/a/path'))
et = command_block.get_etree()
self.assertEqual(et.tag, 'commands')
self.assertEqual(1, len(list(et)))
self.assertEqual(0, len(et.keys()))
self.assertIsNotNone(et.find('./{}'.format(DeleteCommand.command_name)))
def test_get_command_string(self):
command_block = FileSystemServiceCommandBlock()
self.assertEqual(six.b('<commands />'), command_block.get_command_string())
command_block.add_command(DeleteCommand(path='/a/path'))
self.assertEqual(six.b('<commands><rm path="/a/path" /></commands>'), command_block.get_command_string())
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import base64
import unittest
import mock
import six
from xml.etree import ElementTree as ET
from devicecloud.file_system_service import FileInfo, DirectoryInfo, FileSystemServiceException, \
_parse_command_response, ResponseParseError, \
ErrorInfo, LsInfo, _parse_error_tree, LsCommand, GetCommand, PutCommand, DeleteCommand, \
FileSystemServiceCommandBlock
from devicecloud.sci import AllTarget
from devicecloud.test.unit.test_utilities import HttpTestBase
and context:
# Path: devicecloud/file_system_service.py
# class FileSystemServiceException(Exception):
# class ResponseParseError(FileSystemServiceException):
# class ErrorInfo(object):
# class FileInfo(object):
# class DirectoryInfo(object):
# class FileSystemServiceCommandBlock(object):
# class FileSystemServiceCommandABC(object):
# class LsCommand(FileSystemServiceCommandABC):
# class GetCommand(FileSystemServiceCommandABC):
# class PutCommand(FileSystemServiceCommandABC):
# class DeleteCommand(FileSystemServiceCommandABC):
# class FileSystemServiceAPI(SCIAPIBase):
# def _parse_command_response(response):
# def _parse_error_tree(error):
# def __init__(self, errno, message):
# def __str__(self):
# def __init__(self, fssapi, device_id, path, last_modified, size, hash, hash_type):
# def get_data(self):
# def delete(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self, fssapi, device_id, path, last_modified):
# def list_contents(self):
# def __str__(self):
# def __eq__(self, other):
# def __init__(self):
# def add_command(self, command):
# def get_command_string(self):
# def get_etree(self):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, hash='any'):
# def get_etree(self):
# def parse_response(cls, response, device_id=None, fssapi=None, **kwargs):
# def __init__(self, path, offset=None, length=None):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path, file_data=None, server_file=None, offset=None, truncate=False):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def __init__(self, path):
# def get_etree(self):
# def parse_response(cls, response, **kwargs):
# def send_command_block(self, target, command_block):
# def list_files(self, target, path, hash='any'):
# def get_file(self, target, path, offset=None, length=None):
# def put_file(self, target, path, file_data=None, server_file=None, offset=None, truncate=False):
# def delete_file(self, target, path):
# def get_modified_items(self, target, path, last_modified_cutoff):
# def exists(self, target, path, path_sep="/"):
# FILE_SYSTEM_COMMANDS = [LsCommand, GetCommand, PutCommand, DeleteCommand]
#
# Path: devicecloud/sci.py
# class AllTarget(TargetABC):
# """Target all devices"""
#
# def __init__(self):
# pass
#
# def to_xml(self):
# return '<device id="all"/>'
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
which might include code, classes, or functions. Output only the next line. | class TestFileSystemServiceAPI(HttpTestBase): |
Here is a snippet: <|code_start|> "resultTotalRows": "0",
"requestedStartRow": "0",
"resultSize": "0",
"requestedSize": "1000",
"remainingSize": "0",
"items": []
}
"""
class TestMonitorAPI(HttpTestBase):
def test_create_tcp_monitor(self):
self.prepare_response("POST", "/ws/Monitor", data=CREATE_MONITOR_GOOD_RESPONSE)
mon = self.dc.monitor.create_tcp_monitor(['topA', 'topB'], batch_size=10, batch_duration=0,
compression='gzip', format_type='json')
self.assertEqual(self._get_last_request().body, six.b(CREATE_TCP_MONITOR_GOOD_REQUEST))
self.assertEqual(mon.get_id(), 178008)
def test_create_http_monitor(self):
self.prepare_response("POST", "/ws/Monitor", data=CREATE_MONITOR_GOOD_RESPONSE)
mon = self.dc.monitor.create_http_monitor(['topA', 'topB'], 'http://digi.com', transport_token=None,
transport_method='PUT', connect_timeout=0, response_timeout=0,
batch_size=1, batch_duration=0, compression='none',
format_type='json')
self.assertEqual(self._get_last_request().body, six.b(CREATE_HTTP_MONITOR_GOOD_REQUEST))
self.assertEqual(mon.get_id(), 178008)
def test_get_tcp_monitors(self):
self.prepare_response("GET", "/ws/Monitor", data=GET_TCP_MONITOR_SINGLE_FOUND)
<|code_end|>
. Write the next line using the current file imports:
from devicecloud.monitor import MON_TOPIC_ATTR, MON_TRANSPORT_TYPE_ATTR
from devicecloud.test.unit.test_utilities import HttpTestBase
import six
and context from other files:
# Path: devicecloud/monitor.py
# MON_TOPIC_ATTR = Attribute("monTopic")
#
# MON_TRANSPORT_TYPE_ATTR = Attribute("monTransportType")
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
, which may include functions, classes, or code. Output only the next line. | mons = list(self.dc.monitor.get_monitors((MON_TOPIC_ATTR == "DeviceCore") & |
Continue the code snippet: <|code_start|> "requestedStartRow": "0",
"resultSize": "0",
"requestedSize": "1000",
"remainingSize": "0",
"items": []
}
"""
class TestMonitorAPI(HttpTestBase):
def test_create_tcp_monitor(self):
self.prepare_response("POST", "/ws/Monitor", data=CREATE_MONITOR_GOOD_RESPONSE)
mon = self.dc.monitor.create_tcp_monitor(['topA', 'topB'], batch_size=10, batch_duration=0,
compression='gzip', format_type='json')
self.assertEqual(self._get_last_request().body, six.b(CREATE_TCP_MONITOR_GOOD_REQUEST))
self.assertEqual(mon.get_id(), 178008)
def test_create_http_monitor(self):
self.prepare_response("POST", "/ws/Monitor", data=CREATE_MONITOR_GOOD_RESPONSE)
mon = self.dc.monitor.create_http_monitor(['topA', 'topB'], 'http://digi.com', transport_token=None,
transport_method='PUT', connect_timeout=0, response_timeout=0,
batch_size=1, batch_duration=0, compression='none',
format_type='json')
self.assertEqual(self._get_last_request().body, six.b(CREATE_HTTP_MONITOR_GOOD_REQUEST))
self.assertEqual(mon.get_id(), 178008)
def test_get_tcp_monitors(self):
self.prepare_response("GET", "/ws/Monitor", data=GET_TCP_MONITOR_SINGLE_FOUND)
mons = list(self.dc.monitor.get_monitors((MON_TOPIC_ATTR == "DeviceCore") &
<|code_end|>
. Use current file imports:
from devicecloud.monitor import MON_TOPIC_ATTR, MON_TRANSPORT_TYPE_ATTR
from devicecloud.test.unit.test_utilities import HttpTestBase
import six
and context (classes, functions, or code) from other files:
# Path: devicecloud/monitor.py
# MON_TOPIC_ATTR = Attribute("monTopic")
#
# MON_TRANSPORT_TYPE_ATTR = Attribute("monTransportType")
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
. Output only the next line. | (MON_TRANSPORT_TYPE_ATTR == "tcp"))) |
Given the code snippet: <|code_start|> "monStatus": "INACTIVE",
"monBatchDuration": "10"
},
{
"monId": "178007",
"cstId": "7603",
"monTopic": "DeviceCore,FileDataCore,FileData,DataPoint",
"monTransportType": "http",
"monFormatType": "json",
"monBatchSize": "1",
"monCompression": "none",
"monStatus": "INACTIVE",
"monBatchDuration": "0"
}
]
}
"""
GET_MONITOR_NONE_FOUND = """\
{
"resultTotalRows": "0",
"requestedStartRow": "0",
"resultSize": "0",
"requestedSize": "1000",
"remainingSize": "0",
"items": []
}
"""
<|code_end|>
, generate the next line using the imports in this file:
from devicecloud.monitor import MON_TOPIC_ATTR, MON_TRANSPORT_TYPE_ATTR
from devicecloud.test.unit.test_utilities import HttpTestBase
import six
and context (functions, classes, or occasionally code) from other files:
# Path: devicecloud/monitor.py
# MON_TOPIC_ATTR = Attribute("monTopic")
#
# MON_TRANSPORT_TYPE_ATTR = Attribute("monTransportType")
#
# Path: devicecloud/test/unit/test_utilities.py
# class HttpTestBase(unittest.TestCase):
# def setUp(self):
# httpretty.enable()
# # setup Device Cloud ping response
# self.prepare_response("GET", "/ws/DeviceCore?size=1", "", status=200)
# self.dc = DeviceCloud('user', 'pass')
#
# def tearDown(self):
# httpretty.disable()
# httpretty.reset()
#
# def _get_last_request(self):
# return httpretty.last_request()
#
# def _get_last_request_params(self):
# # Get the query params from the last request as a dictionary
# params = urllib_parse.parse_qs(urllib_parse.urlparse(self._get_last_request().path).query)
# return {k: v[0] for k, v in params.items()} # convert from list values to single-value
#
# def prepare_response(self, method, path, data=None, status=200, match_querystring=False, **kwargs):
# # TODO:
# # Should probably assert on more request headers and
# # respond with correct content type, etc.
# if data is not None:
# kwargs['body'] = data
# httpretty.register_uri(method,
# "https://devicecloud.digi.com{}".format(path),
# match_querystring=match_querystring,
# status=status,
# **kwargs)
#
# def prepare_json_response(self, method, path, data, status=200):
# self.prepare_response(method, path, json.dumps(data), status=status)
. Output only the next line. | class TestMonitorAPI(HttpTestBase): |
Given snippet: <|code_start|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015-2018 Digi International Inc.
"""Provide access to Device Cloud filedata API"""
fd_path = Attribute("fdPath")
fd_name = Attribute("fdName")
fd_type = Attribute("fdType")
fd_customer_id = Attribute("customer_id")
fd_created_date = Attribute("fdCreatedDate")
fd_last_modified_date = Attribute("fdLastModifiedDate")
fd_content_type = Attribute("fdContentType")
fd_size = Attribute("fdSize")
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import base64
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
and context:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
which might include code, classes, or functions. Output only the next line. | class FileDataAPI(APIBase): |
Given snippet: <|code_start|>fd_path = Attribute("fdPath")
fd_name = Attribute("fdName")
fd_type = Attribute("fdType")
fd_customer_id = Attribute("customer_id")
fd_created_date = Attribute("fdCreatedDate")
fd_last_modified_date = Attribute("fdLastModifiedDate")
fd_content_type = Attribute("fdContentType")
fd_size = Attribute("fdSize")
class FileDataAPI(APIBase):
"""Encapsulate data and logic required to interact with Device Cloud file data store"""
def get_filedata(self, condition=None, page_size=1000):
"""Return a generator over all results matching the provided condition
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the filedata that will be retrieved from
file data store. If a condition is unspecified, the following condition
will be used ``fd_path == '~/'``. This condition will match all file
data in this accounts "home" directory (a sensible root).
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page. Regardless
of the size specified, :meth:`.get_filedata` will continue to fetch pages
and yield results until all items have been fetched.
:return: Generator yielding :class:`.FileDataObject` instances matching the
provided conditions.
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import base64
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
and context:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
which might include code, classes, or functions. Output only the next line. | condition = validate_type(condition, type(None), Expression, *six.string_types) |
Given the code snippet: <|code_start|>
def __init__(self, fdapi, json_data):
self._fdapi = fdapi
self._json_data = json_data
def delete(self):
"""Delete this file or directory"""
return self._fdapi.delete_file(self.get_full_path())
def get_data(self):
"""Get the data associated with this filedata object
:returns: Data associated with this object or None if none exists
:rtype: str (Python2)/bytes (Python3) or None
"""
# NOTE: we assume that the "embed" option is used
base64_data = self._json_data.get("fdData")
if base64_data is None:
return None
else:
# need to convert to bytes() with python 3
return base64.decodestring(six.b(base64_data))
def get_type(self):
"""Get the type (file/directory) of this object"""
return self._json_data["fdType"]
def get_last_modified_date(self):
"""Get the last modified datetime of this object"""
<|code_end|>
, generate the next line using the imports in this file:
import base64
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
and context (functions, classes, or occasionally code) from other files:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
. Output only the next line. | return iso8601_to_dt(self._json_data["fdLastModifiedDate"]) |
Continue the code snippet: <|code_start|>fd_path = Attribute("fdPath")
fd_name = Attribute("fdName")
fd_type = Attribute("fdType")
fd_customer_id = Attribute("customer_id")
fd_created_date = Attribute("fdCreatedDate")
fd_last_modified_date = Attribute("fdLastModifiedDate")
fd_content_type = Attribute("fdContentType")
fd_size = Attribute("fdSize")
class FileDataAPI(APIBase):
"""Encapsulate data and logic required to interact with Device Cloud file data store"""
def get_filedata(self, condition=None, page_size=1000):
"""Return a generator over all results matching the provided condition
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the filedata that will be retrieved from
file data store. If a condition is unspecified, the following condition
will be used ``fd_path == '~/'``. This condition will match all file
data in this accounts "home" directory (a sensible root).
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page. Regardless
of the size specified, :meth:`.get_filedata` will continue to fetch pages
and yield results until all items have been fetched.
:return: Generator yielding :class:`.FileDataObject` instances matching the
provided conditions.
"""
<|code_end|>
. Use current file imports:
import base64
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
and context (classes, functions, or code) from other files:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
. Output only the next line. | condition = validate_type(condition, type(None), Expression, *six.string_types) |
Given the code snippet: <|code_start|>
class AsyncRequestProxy(object):
"""An object representing an asynychronous SCI request.
Can be used for polling the status of the corresponding request.
:ivar job_id: the ID in device cloud of the job
:ivar response: the response to the request if completed
:ivar completed: True if the request has completed, False otherwise; queries on read
"""
def __init__(self, job_id, conn):
self.job_id = job_id
self._conn = conn
self.response = None
@property
def completed(self):
if self.response is not None:
return True
resp = self._conn.get('/ws/sci/{0}'.format(self.job_id))
dom = ET.fromstring(resp.content)
status = dom.find('.//status')
if status is not None and status.text == 'complete':
self.response = resp.content
return True
else:
return False
<|code_end|>
, generate the next line using the imports in this file:
from devicecloud.apibase import APIBase
from xml.etree import ElementTree as ET
import six
and context (functions, classes, or occasionally code) from other files:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
. Output only the next line. | class ServerCommandInterfaceAPI(APIBase): |
Based on the snippet: <|code_start|> :type last_modified: int
:param size: The size of the file
:type size: int
:param hash: The files hash
:param hash_type: The method used to produce the hash
:ivar device_id: The Device ID of the device this file is on
:ivar path: The path to this file on the device
:ivar last_modified: The last time the file was modified
:ivar size: The size of the file
:ivar hash: The files hash
:ivar hash_type: The method used to produce the hash
"""
def __init__(self, fssapi, device_id, path, last_modified, size, hash, hash_type):
self._fssapi = fssapi
self.device_id = device_id
self.path = path
self.last_modified = last_modified
self.size = size
self.hash = hash
self.hash_type = hash_type
def get_data(self):
"""Get the contents of this file
:return: The contents of this file
:rtype: six.binary_type
"""
<|code_end|>
, predict the immediate next line with the help of imports:
import base64
import xml.etree.ElementTree as ET
import six
from collections import namedtuple
from devicecloud.sci import DeviceTarget
from devicecloud.apibase import SCIAPIBase
and context (classes, functions, sometimes code) from other files:
# Path: devicecloud/sci.py
# class DeviceTarget(TargetABC):
# """Target a specific device"""
#
# def __init__(self, device_id):
# self._device_id = device_id
#
# def to_xml(self):
# return '<device id="{}"/>'.format(self._device_id)
#
# Path: devicecloud/apibase.py
# class SCIAPIBase(object):
# """Base class for API classes using SCI to communicate
#
# :type _sci_api: devicecloud.sci.ServerCommandInterfaceAPI
# """
# def __init__(self, sci_api):
# self._sci_api = sci_api
. Output only the next line. | target = DeviceTarget(self.device_id) |
Given the following code snippet before the placeholder: <|code_start|> def parse_response(cls, response, **kwargs):
"""Parse the server response for this put file command
This will parse xml of the following form::
<rm />
or with an error::
<rm>
<error ... />
</rm>
:param response: The XML root of the response for a delete file command
:type response: :class:`xml.etree.ElementTree.Element`
:return: None if everything was ok or an :class:`~ErrorInfo` if the xml contained an error
"""
if response.tag != cls.command_name:
raise ResponseParseError(
"Received response of type {}, DeleteCommand can only parse responses of type {}".format(response.tag,
cls.command_name))
error = response.find('./error')
if error is not None:
return _parse_error_tree(error)
return None
FILE_SYSTEM_COMMANDS = [LsCommand, GetCommand, PutCommand, DeleteCommand]
<|code_end|>
, predict the next line using imports from the current file:
import base64
import xml.etree.ElementTree as ET
import six
from collections import namedtuple
from devicecloud.sci import DeviceTarget
from devicecloud.apibase import SCIAPIBase
and context including class names, function names, and sometimes code from other files:
# Path: devicecloud/sci.py
# class DeviceTarget(TargetABC):
# """Target a specific device"""
#
# def __init__(self, device_id):
# self._device_id = device_id
#
# def to_xml(self):
# return '<device id="{}"/>'.format(self._device_id)
#
# Path: devicecloud/apibase.py
# class SCIAPIBase(object):
# """Base class for API classes using SCI to communicate
#
# :type _sci_api: devicecloud.sci.ServerCommandInterfaceAPI
# """
# def __init__(self, sci_api):
# self._sci_api = sci_api
. Output only the next line. | class FileSystemServiceAPI(SCIAPIBase): |
Here is a snippet: <|code_start|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015-2018 Digi International Inc.
def show_group_tree(dc):
stats = {} # group -> devices count including children
def count_nodes(group):
count_for_this_node = \
len(list(dc.devicecore.get_devices(group_path == group.get_path())))
subnode_count = 0
for child in group.get_children():
subnode_count += count_nodes(child)
total = count_for_this_node + subnode_count
stats[group] = total
return total
count_nodes(dc.devicecore.get_group_tree_root())
print(stats)
dc.devicecore.get_group_tree_root().print_subtree()
if __name__ == '__main__':
dc = get_authenticated_dc()
devices = dc.devicecore.get_devices(
<|code_end|>
. Write the next line using the current file imports:
from devicecloud.devicecore import dev_mac, group_path
from devicecloud.examples.example_helpers import get_authenticated_dc
and context from other files:
# Path: devicecloud/devicecore.py
# ADD_GROUP_TEMPLATE = \
# """
# <DeviceCore>
# <devConnectwareId>{connectware_id}</devConnectwareId>
# <grpPath>{group_path}</grpPath>
# </DeviceCore>
# """
# TAGS_TEMPLATE = \
# """
# <DeviceCore>
# <devConnectwareId>{connectware_id}</devConnectwareId>
# <dpTags>{tags}</dpTags>
# </DeviceCore>
# """
# class DeviceCoreAPI(APIBase):
# class Group(object):
# class Device(object):
# def __init__(self, conn, sci):
# def get_devices(self, condition=None, page_size=1000):
# def get_group_tree_root(self, page_size=1000):
# def get_groups(self, condition=None, page_size=1000):
# def delete_device(self, dev):
# def provision_device(self, **kwargs):
# def provision_devices(self, devices):
# def write_tag(tag, val):
# def maybe_write_element(tag, val):
# def __init__(self, group_id, name, description, path, parent_id):
# def from_json(cls, json_data):
# def __repr__(self):
# def print_subtree(self, fobj=sys.stdout, level=0):
# def is_root(self):
# def add_child(self, group):
# def get_children(self):
# def get_id(self):
# def get_name(self):
# def get_description(self):
# def get_path(self):
# def get_parent_id(self):
# def __init__(self, conn, sci, device_json):
# def __repr__(self):
# def get_device_json(self, use_cached=True):
# def get_tags(self, use_cached=True):
# def is_connected(self, use_cached=True):
# def get_connectware_id(self, use_cached=True):
# def get_device_id(self, use_cached=True):
# def get_ip(self, use_cached=True):
# def get_mac(self, use_cached=True):
# def get_mac_last4(self, use_cached=True):
# def get_registration_dt(self, use_cached=True):
# def get_meid(self, use_cached=True):
# def get_customer_id(self, use_cached=True):
# def get_group_id(self, use_cached=True):
# def get_group_path(self, use_cached=True):
# def get_vendor_id(self, use_cached=True):
# def get_device_type(self, use_cached=True):
# def get_firmware_level(self, use_cached=True):
# def get_firmware_level_description(self, use_cached=True):
# def get_restricted_status(self, use_cached=True):
# def get_last_known_ip(self, use_cached=True):
# def get_global_ip(self, use_cached=True):
# def get_last_connected_dt(self, use_cached=True):
# def get_contact(self, use_cached=True):
# def get_description(self, use_cached=True):
# def get_location(self, use_cached=True):
# def get_latlon(self, use_cached=True):
# def get_user_metadata(self, use_cached=True):
# def get_zb_pan_id(self, use_cached=True):
# def get_zb_extended_address(self, use_cached=True):
# def get_server_id(self, use_cached=True):
# def get_provision_id(self, use_cached=True):
# def get_current_connect_pw(self, use_cached=True):
# def add_to_group(self, group_path):
# def remove_from_group(self):
# def add_tag(self, new_tags):
# def remove_tag(self, tag):
#
# Path: devicecloud/examples/example_helpers.py
# def get_authenticated_dc():
# while True:
# base_url = os.environ.get('DC_BASE_URL', 'https://devicecloud.digi.com')
#
# username = os.environ.get('DC_USERNAME', None)
# if not username:
# username = input("username: ")
#
# password = os.environ.get('DC_PASSWORD', None)
# if not password:
# password = getpass("password: ")
#
# dc = DeviceCloud(username, password, base_url=base_url)
# if dc.has_valid_credentials():
# print("Credentials accepted!")
# return dc
# else:
# print("Invalid username or password provided, try again")
, which may include functions, classes, or code. Output only the next line. | (dev_mac == '00:40:9D:50:B0:EA') |
Given the following code snippet before the placeholder: <|code_start|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015-2018 Digi International Inc.
def show_group_tree(dc):
stats = {} # group -> devices count including children
def count_nodes(group):
count_for_this_node = \
<|code_end|>
, predict the next line using imports from the current file:
from devicecloud.devicecore import dev_mac, group_path
from devicecloud.examples.example_helpers import get_authenticated_dc
and context including class names, function names, and sometimes code from other files:
# Path: devicecloud/devicecore.py
# ADD_GROUP_TEMPLATE = \
# """
# <DeviceCore>
# <devConnectwareId>{connectware_id}</devConnectwareId>
# <grpPath>{group_path}</grpPath>
# </DeviceCore>
# """
# TAGS_TEMPLATE = \
# """
# <DeviceCore>
# <devConnectwareId>{connectware_id}</devConnectwareId>
# <dpTags>{tags}</dpTags>
# </DeviceCore>
# """
# class DeviceCoreAPI(APIBase):
# class Group(object):
# class Device(object):
# def __init__(self, conn, sci):
# def get_devices(self, condition=None, page_size=1000):
# def get_group_tree_root(self, page_size=1000):
# def get_groups(self, condition=None, page_size=1000):
# def delete_device(self, dev):
# def provision_device(self, **kwargs):
# def provision_devices(self, devices):
# def write_tag(tag, val):
# def maybe_write_element(tag, val):
# def __init__(self, group_id, name, description, path, parent_id):
# def from_json(cls, json_data):
# def __repr__(self):
# def print_subtree(self, fobj=sys.stdout, level=0):
# def is_root(self):
# def add_child(self, group):
# def get_children(self):
# def get_id(self):
# def get_name(self):
# def get_description(self):
# def get_path(self):
# def get_parent_id(self):
# def __init__(self, conn, sci, device_json):
# def __repr__(self):
# def get_device_json(self, use_cached=True):
# def get_tags(self, use_cached=True):
# def is_connected(self, use_cached=True):
# def get_connectware_id(self, use_cached=True):
# def get_device_id(self, use_cached=True):
# def get_ip(self, use_cached=True):
# def get_mac(self, use_cached=True):
# def get_mac_last4(self, use_cached=True):
# def get_registration_dt(self, use_cached=True):
# def get_meid(self, use_cached=True):
# def get_customer_id(self, use_cached=True):
# def get_group_id(self, use_cached=True):
# def get_group_path(self, use_cached=True):
# def get_vendor_id(self, use_cached=True):
# def get_device_type(self, use_cached=True):
# def get_firmware_level(self, use_cached=True):
# def get_firmware_level_description(self, use_cached=True):
# def get_restricted_status(self, use_cached=True):
# def get_last_known_ip(self, use_cached=True):
# def get_global_ip(self, use_cached=True):
# def get_last_connected_dt(self, use_cached=True):
# def get_contact(self, use_cached=True):
# def get_description(self, use_cached=True):
# def get_location(self, use_cached=True):
# def get_latlon(self, use_cached=True):
# def get_user_metadata(self, use_cached=True):
# def get_zb_pan_id(self, use_cached=True):
# def get_zb_extended_address(self, use_cached=True):
# def get_server_id(self, use_cached=True):
# def get_provision_id(self, use_cached=True):
# def get_current_connect_pw(self, use_cached=True):
# def add_to_group(self, group_path):
# def remove_from_group(self):
# def add_tag(self, new_tags):
# def remove_tag(self, tag):
#
# Path: devicecloud/examples/example_helpers.py
# def get_authenticated_dc():
# while True:
# base_url = os.environ.get('DC_BASE_URL', 'https://devicecloud.digi.com')
#
# username = os.environ.get('DC_USERNAME', None)
# if not username:
# username = input("username: ")
#
# password = os.environ.get('DC_PASSWORD', None)
# if not password:
# password = getpass("password: ")
#
# dc = DeviceCloud(username, password, base_url=base_url)
# if dc.has_valid_credentials():
# print("Credentials accepted!")
# return dc
# else:
# print("Invalid username or password provided, try again")
. Output only the next line. | len(list(dc.devicecore.get_devices(group_path == group.get_path()))) |
Based on the snippet: <|code_start|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015-2018 Digi International Inc.
def show_group_tree(dc):
stats = {} # group -> devices count including children
def count_nodes(group):
count_for_this_node = \
len(list(dc.devicecore.get_devices(group_path == group.get_path())))
subnode_count = 0
for child in group.get_children():
subnode_count += count_nodes(child)
total = count_for_this_node + subnode_count
stats[group] = total
return total
count_nodes(dc.devicecore.get_group_tree_root())
print(stats)
dc.devicecore.get_group_tree_root().print_subtree()
if __name__ == '__main__':
<|code_end|>
, predict the immediate next line with the help of imports:
from devicecloud.devicecore import dev_mac, group_path
from devicecloud.examples.example_helpers import get_authenticated_dc
and context (classes, functions, sometimes code) from other files:
# Path: devicecloud/devicecore.py
# ADD_GROUP_TEMPLATE = \
# """
# <DeviceCore>
# <devConnectwareId>{connectware_id}</devConnectwareId>
# <grpPath>{group_path}</grpPath>
# </DeviceCore>
# """
# TAGS_TEMPLATE = \
# """
# <DeviceCore>
# <devConnectwareId>{connectware_id}</devConnectwareId>
# <dpTags>{tags}</dpTags>
# </DeviceCore>
# """
# class DeviceCoreAPI(APIBase):
# class Group(object):
# class Device(object):
# def __init__(self, conn, sci):
# def get_devices(self, condition=None, page_size=1000):
# def get_group_tree_root(self, page_size=1000):
# def get_groups(self, condition=None, page_size=1000):
# def delete_device(self, dev):
# def provision_device(self, **kwargs):
# def provision_devices(self, devices):
# def write_tag(tag, val):
# def maybe_write_element(tag, val):
# def __init__(self, group_id, name, description, path, parent_id):
# def from_json(cls, json_data):
# def __repr__(self):
# def print_subtree(self, fobj=sys.stdout, level=0):
# def is_root(self):
# def add_child(self, group):
# def get_children(self):
# def get_id(self):
# def get_name(self):
# def get_description(self):
# def get_path(self):
# def get_parent_id(self):
# def __init__(self, conn, sci, device_json):
# def __repr__(self):
# def get_device_json(self, use_cached=True):
# def get_tags(self, use_cached=True):
# def is_connected(self, use_cached=True):
# def get_connectware_id(self, use_cached=True):
# def get_device_id(self, use_cached=True):
# def get_ip(self, use_cached=True):
# def get_mac(self, use_cached=True):
# def get_mac_last4(self, use_cached=True):
# def get_registration_dt(self, use_cached=True):
# def get_meid(self, use_cached=True):
# def get_customer_id(self, use_cached=True):
# def get_group_id(self, use_cached=True):
# def get_group_path(self, use_cached=True):
# def get_vendor_id(self, use_cached=True):
# def get_device_type(self, use_cached=True):
# def get_firmware_level(self, use_cached=True):
# def get_firmware_level_description(self, use_cached=True):
# def get_restricted_status(self, use_cached=True):
# def get_last_known_ip(self, use_cached=True):
# def get_global_ip(self, use_cached=True):
# def get_last_connected_dt(self, use_cached=True):
# def get_contact(self, use_cached=True):
# def get_description(self, use_cached=True):
# def get_location(self, use_cached=True):
# def get_latlon(self, use_cached=True):
# def get_user_metadata(self, use_cached=True):
# def get_zb_pan_id(self, use_cached=True):
# def get_zb_extended_address(self, use_cached=True):
# def get_server_id(self, use_cached=True):
# def get_provision_id(self, use_cached=True):
# def get_current_connect_pw(self, use_cached=True):
# def add_to_group(self, group_path):
# def remove_from_group(self):
# def add_tag(self, new_tags):
# def remove_tag(self, tag):
#
# Path: devicecloud/examples/example_helpers.py
# def get_authenticated_dc():
# while True:
# base_url = os.environ.get('DC_BASE_URL', 'https://devicecloud.digi.com')
#
# username = os.environ.get('DC_USERNAME', None)
# if not username:
# username = input("username: ")
#
# password = os.environ.get('DC_PASSWORD', None)
# if not password:
# password = getpass("password: ")
#
# dc = DeviceCloud(username, password, base_url=base_url)
# if dc.has_valid_credentials():
# print("Credentials accepted!")
# return dc
# else:
# print("Invalid username or password provided, try again")
. Output only the next line. | dc = get_authenticated_dc() |
Predict the next line after this snippet: <|code_start|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015-2018 Digi International Inc.
"""Module with functionality for building queries against cloud resources
This functionality is somewhat poorly documented in Device Cloud documentation
in the `Compound Queries <http://ftp1.digi.com/support/documentation/html/90002008/90002008_P/Default.htm#ProgrammingTopics/ResourceConventions.htm#CompQueries%3FTocPath%3DDevice%20Cloud%20Programming%20Guide%7CResource%20Conventions%7C_____3>`_
section.
"""
def _quoted(value):
"""Return a single-quoted and escaped (percent-encoded) version of value
This function will also perform transforms of known data types to a representation
that will be handled by Device Cloud. For instance, datetime objects will be
converted to ISO8601.
"""
if isinstance(value, datetime.datetime):
<|code_end|>
using the current file's imports:
import datetime
from devicecloud.util import isoformat, to_none_or_dt
and any relevant context from other files:
# Path: devicecloud/util.py
# def isoformat(dt):
# """Return an ISO-8601 formatted string from the provided datetime object"""
# if not isinstance(dt, datetime.datetime):
# raise TypeError("Must provide datetime.datetime object to isoformat")
#
# if dt.tzinfo is None:
# raise ValueError("naive datetime objects are not allowed beyond the library boundaries")
#
# return dt.isoformat().replace("+00:00", "Z") # nicer to look at
#
# def to_none_or_dt(input):
# """Convert ``input`` to either None or a datetime object
#
# If the input is None, None will be returned.
# If the input is a datetime object, it will be converted to a datetime
# object with UTC timezone info. If the datetime object is naive, then
# this method will assume the object is specified according to UTC and
# not local or some other timezone.
# If the input to the function is a string, this method will attempt to
# parse the input as an ISO-8601 formatted string.
#
# :param input: Input data (expected to be either str, None, or datetime object)
# :return: datetime object from input or None if already None
# :rtype: datetime or None
#
# """
# if input is None:
# return input
# elif isinstance(input, datetime.datetime):
# arrow_dt = arrow.Arrow.fromdatetime(input, input.tzinfo or 'utc')
# return arrow_dt.to('utc').datetime
# if isinstance(input, six.string_types):
# # try to convert from ISO8601
# return iso8601_to_dt(input)
# else:
# raise TypeError("Not a string, NoneType, or datetime object")
. Output only the next line. | value = isoformat(to_none_or_dt(value)) |
Given the code snippet: <|code_start|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015-2018 Digi International Inc.
"""Module with functionality for building queries against cloud resources
This functionality is somewhat poorly documented in Device Cloud documentation
in the `Compound Queries <http://ftp1.digi.com/support/documentation/html/90002008/90002008_P/Default.htm#ProgrammingTopics/ResourceConventions.htm#CompQueries%3FTocPath%3DDevice%20Cloud%20Programming%20Guide%7CResource%20Conventions%7C_____3>`_
section.
"""
def _quoted(value):
"""Return a single-quoted and escaped (percent-encoded) version of value
This function will also perform transforms of known data types to a representation
that will be handled by Device Cloud. For instance, datetime objects will be
converted to ISO8601.
"""
if isinstance(value, datetime.datetime):
<|code_end|>
, generate the next line using the imports in this file:
import datetime
from devicecloud.util import isoformat, to_none_or_dt
and context (functions, classes, or occasionally code) from other files:
# Path: devicecloud/util.py
# def isoformat(dt):
# """Return an ISO-8601 formatted string from the provided datetime object"""
# if not isinstance(dt, datetime.datetime):
# raise TypeError("Must provide datetime.datetime object to isoformat")
#
# if dt.tzinfo is None:
# raise ValueError("naive datetime objects are not allowed beyond the library boundaries")
#
# return dt.isoformat().replace("+00:00", "Z") # nicer to look at
#
# def to_none_or_dt(input):
# """Convert ``input`` to either None or a datetime object
#
# If the input is None, None will be returned.
# If the input is a datetime object, it will be converted to a datetime
# object with UTC timezone info. If the datetime object is naive, then
# this method will assume the object is specified according to UTC and
# not local or some other timezone.
# If the input to the function is a string, this method will attempt to
# parse the input as an ISO-8601 formatted string.
#
# :param input: Input data (expected to be either str, None, or datetime object)
# :return: datetime object from input or None if already None
# :rtype: datetime or None
#
# """
# if input is None:
# return input
# elif isinstance(input, datetime.datetime):
# arrow_dt = arrow.Arrow.fromdatetime(input, input.tzinfo or 'utc')
# return arrow_dt.to('utc').datetime
# if isinstance(input, six.string_types):
# # try to convert from ISO8601
# return iso8601_to_dt(input)
# else:
# raise TypeError("Not a string, NoneType, or datetime object")
. Output only the next line. | value = isoformat(to_none_or_dt(value)) |
Predict the next line after this snippet: <|code_start|>#
# Copyright (c) 2015-2018 Digi International Inc.
dev_mac = Attribute('devMac')
group_id = Attribute('grpId')
group_path = Attribute('grpPath')
dev_connectware_id = Attribute('devConnectwareId')
# TODO: Can we support location based device lookups? (e.g. lat/long?)
ADD_GROUP_TEMPLATE = \
"""
<DeviceCore>
<devConnectwareId>{connectware_id}</devConnectwareId>
<grpPath>{group_path}</grpPath>
</DeviceCore>
"""
TAGS_TEMPLATE = \
"""
<DeviceCore>
<devConnectwareId>{connectware_id}</devConnectwareId>
<dpTags>{tags}</dpTags>
</DeviceCore>
"""
<|code_end|>
using the current file's imports:
import sys
import xml.etree.ElementTree as ET
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
from xml.sax.saxutils import escape
and any relevant context from other files:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
. Output only the next line. | class DeviceCoreAPI(APIBase): |
Continue the code snippet: <|code_start|> APIBase.__init__(self, conn)
self._sci = sci
def get_devices(self, condition=None, page_size=1000):
"""Iterates over each :class:`Device` for this device cloud account
Examples::
# get a list of all devices
all_devices = list(dc.devicecore.get_devices())
# build a mapping of devices by their vendor id using a
# dict comprehension
devices = dc.devicecore.get_devices() # generator object
devs_by_vendor_id = {d.get_vendor_id(): d for d in devices}
# iterate over all devices in 'minnesota' group and
# print the device mac and location
for device in dc.get_devices(group_path == 'minnesota'):
print "%s at %s" % (device.get_mac(), device.get_location())
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the devicecore. If unspecified,
an iterator over all devices will be returned.
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: Iterator over each :class:`~Device` in this device cloud
account in the form of a generator object.
"""
<|code_end|>
. Use current file imports:
import sys
import xml.etree.ElementTree as ET
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
from xml.sax.saxutils import escape
and context (classes, functions, or code) from other files:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
. Output only the next line. | condition = validate_type(condition, type(None), Expression, *six.string_types) |
Given the code snippet: <|code_start|> """Get this device's device id"""
device_json = self.get_device_json(use_cached)
return device_json["id"].get("devId")
def get_ip(self, use_cached=True):
"""Get the last known IP of this device"""
device_json = self.get_device_json(use_cached)
return device_json.get("dpLastKnownIp")
def get_mac(self, use_cached=True):
"""Get the MAC address of this device"""
device_json = self.get_device_json(use_cached)
return device_json.get("devMac")
def get_mac_last4(self, use_cached=True):
"""Get the last 4 characters in the device mac address hex (e.g. 00:40:9D:58:17:5B -> 175B)
This is useful for use as a short reference to the device. It is not guaranteed to
be unique (obviously) but will often be if you don't have too many devices.
"""
chunks = self.get_mac(use_cached).split(":")
mac4 = "%s%s" % (chunks[-2], chunks[-1])
return mac4.upper()
def get_registration_dt(self, use_cached=True):
"""Get the datetime of when this device was added to Device Cloud"""
device_json = self.get_device_json(use_cached)
start_date_iso8601 = device_json.get("devRecordStartDate")
if start_date_iso8601:
<|code_end|>
, generate the next line using the imports in this file:
import sys
import xml.etree.ElementTree as ET
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
from xml.sax.saxutils import escape
and context (functions, classes, or occasionally code) from other files:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
. Output only the next line. | return iso8601_to_dt(start_date_iso8601) |
Given the following code snippet before the placeholder: <|code_start|> APIBase.__init__(self, conn)
self._sci = sci
def get_devices(self, condition=None, page_size=1000):
"""Iterates over each :class:`Device` for this device cloud account
Examples::
# get a list of all devices
all_devices = list(dc.devicecore.get_devices())
# build a mapping of devices by their vendor id using a
# dict comprehension
devices = dc.devicecore.get_devices() # generator object
devs_by_vendor_id = {d.get_vendor_id(): d for d in devices}
# iterate over all devices in 'minnesota' group and
# print the device mac and location
for device in dc.get_devices(group_path == 'minnesota'):
print "%s at %s" % (device.get_mac(), device.get_location())
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the devicecore. If unspecified,
an iterator over all devices will be returned.
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: Iterator over each :class:`~Device` in this device cloud
account in the form of a generator object.
"""
<|code_end|>
, predict the next line using imports from the current file:
import sys
import xml.etree.ElementTree as ET
import six
from devicecloud.apibase import APIBase
from devicecloud.conditions import Attribute, Expression
from devicecloud.util import iso8601_to_dt, validate_type
from xml.sax.saxutils import escape
and context including class names, function names, and sometimes code from other files:
# Path: devicecloud/apibase.py
# class APIBase(object):
# """Base class for all API Classes
#
# :type _conn: devicecloud.DeviceCloudConnection
# """
# def __init__(self, conn):
# self._conn = conn
#
# Path: devicecloud/conditions.py
# class Attribute(object):
# """An attribute is a piece of data on which we may perform comparisons
#
# Comparisons performed to attributes will in turn generate new
# :class:`.Comparison` instances.
# """
#
# def __init__(self, name):
# self.name = name
#
# def __str__(self):
# return self.name
#
# def __gt__(self, value):
# return Comparison(self, '>', value)
#
# def __lt__(self, value):
# return Comparison(self, '<', value)
#
# def __eq__(self, value):
# return Comparison(self, '=', value)
#
# def like(self, value):
# return Comparison(self, ' like ', value)
#
# class Expression(object):
# r"""A condition is an evaluable filter
#
# Examples of conditions would include the following:
# * fdType='file'
# * fdName like 'sample%gas'
#
# Conditions may also be compound. E.g.
# * (fdType='file' and fdName like 'sample%gas')
#
# """
#
# def __init__(self):
# pass
#
# def __and__(self, rhs):
# return Combination(self, " and ", rhs)
#
# def __or__(self, rhs):
# return Combination(self, " or ", rhs)
#
# and_ = __and__ # alternate syntax
# or_ = __or__ # alternate syntax
#
# def compile(self):
# raise NotImplementedError("Should be implemented in subclass")
#
# Path: devicecloud/util.py
# def iso8601_to_dt(iso8601):
# """Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# # We could just use arrow.get() but that is more permissive than we actually want.
# # Internal (but still public) to arrow is the actual parser where we can be
# # a bit more specific
# parser = DateTimeParser()
# try:
# arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
# return arrow_dt.to('utc').datetime
# except ParserError as pe:
# raise ValueError("Provided was not a valid ISO8601 string: %r" % pe)
#
# def validate_type(input, *types):
# """Raise TypeError if the type of ``input`` is one of the args
#
# If the input value is one of the types specified, just return
# the input value.
#
# """
# if not isinstance(input, types):
# raise TypeError("Input expected to one of following types: %s" % (types, ))
# return input
. Output only the next line. | condition = validate_type(condition, type(None), Expression, *six.string_types) |
Predict the next line for this snippet: <|code_start|> random_state : random_state
Set the initial random state.
Returns
-------
numpy.ndarray
Sampled outcome.
"""
return func
class DeltaProdLik(object):
r"""
Product of Kronecker delta likelihoods.
The product can be written as
.. math::
\prod_i \delta[y_i = x_i]
Parameters
----------
link : link_func
Link function establishing :math:`g(y_i) = x_i`. Defaults to ``None``, which
leads to the identity link function.
"""
def __init__(self, link=None):
if link is None:
<|code_end|>
with the help of current file imports:
from numpy import ascontiguousarray
from ..link import IdentityLink, LogitLink, LogLink
import scipy.stats as st
import scipy.stats as st
import scipy.stats as st
and context from other files:
# Path: glimix_core/link/_link.py
# class IdentityLink(object):
# """
# Identity link function, g(x) = x.
# """
#
# @_value_doc
# def value(self, x):
# return asarray(x, float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(x, float)
#
# class LogitLink(object):
# """
# Logit link function, g(x) = log(x/(1 - x)).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x / (1 - x)), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(1 / (1 + exp(-x)), float)
#
# class LogLink(object):
# """
# Log link function, g(x) = log(x).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(exp(x), float)
, which may contain function names, class names, or code. Output only the next line. | link = IdentityLink() |
Here is a snippet: <|code_start|> return _aca(x)
@property
def sample_size(self):
r"""Get the number of samples."""
assert self.outcome is not None
return len(self.outcome)
class BernoulliProdLik(object):
r"""
Product of Bernoulli likelihoods.
The product can be written as
.. math::
\prod_i p_i^{y_i} (1-p_i)^{1-y_i}
where :math:`p_i` is the probability of success.
Parameters
----------
link : link_func
Link function establishing :math:`g(p_i) = x_i`. Defaults to ``None``, which
leads to the :class:`glimix_core.link.LogitLink` link function.
"""
def __init__(self, link=None):
if link is None:
<|code_end|>
. Write the next line using the current file imports:
from numpy import ascontiguousarray
from ..link import IdentityLink, LogitLink, LogLink
import scipy.stats as st
import scipy.stats as st
import scipy.stats as st
and context from other files:
# Path: glimix_core/link/_link.py
# class IdentityLink(object):
# """
# Identity link function, g(x) = x.
# """
#
# @_value_doc
# def value(self, x):
# return asarray(x, float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(x, float)
#
# class LogitLink(object):
# """
# Logit link function, g(x) = log(x/(1 - x)).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x / (1 - x)), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(1 / (1 + exp(-x)), float)
#
# class LogLink(object):
# """
# Log link function, g(x) = log(x).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(exp(x), float)
, which may include functions, classes, or code. Output only the next line. | link = LogitLink |
Using the snippet: <|code_start|> r"""Mean of the number of successful trials."""
return self._link.inv(x)
@_sample_doc
def sample(self, x, random_state=None):
p = self.mean(x)
nt = ascontiguousarray(self._ntrials, dtype=int)
return _aca(st.binom(nt, p).rvs(random_state=random_state))
@property
def sample_size(self):
r"""Get the number of samples."""
assert self.nsuccesses is not None
return len(self.nsuccesses)
class PoissonProdLik(object):
r"""
Product of Poisson likelihoods.
Parameters
----------
link : link_func
Link function establishing :math:`g(y_i) = x_i`. Defaults to ``None``, which
leads to the :class:`glimix_core.link.LogitLink` link function.
"""
def __init__(self, link=None):
if link is None:
<|code_end|>
, determine the next line of code. You have imports:
from numpy import ascontiguousarray
from ..link import IdentityLink, LogitLink, LogLink
import scipy.stats as st
import scipy.stats as st
import scipy.stats as st
and context (class names, function names, or code) available:
# Path: glimix_core/link/_link.py
# class IdentityLink(object):
# """
# Identity link function, g(x) = x.
# """
#
# @_value_doc
# def value(self, x):
# return asarray(x, float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(x, float)
#
# class LogitLink(object):
# """
# Logit link function, g(x) = log(x/(1 - x)).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x / (1 - x)), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(1 / (1 + exp(-x)), float)
#
# class LogLink(object):
# """
# Log link function, g(x) = log(x).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(exp(x), float)
. Output only the next line. | link = LogLink() |
Continue the code snippet: <|code_start|> """
return kron(self.A, self.X)
def value(self):
"""
Kronecker mean function.
Returns
-------
𝐦 : ndarray
(A⊗X)vec(B).
"""
return self.AX @ self._vecB.value
def gradient(self):
"""
Gradient of the linear mean function.
Returns
-------
vecB : ndarray
Derivative of M over vec(B).
"""
return {"vecB": self.AX}
@property
def B(self):
"""
Effect-sizes parameter, B.
"""
<|code_end|>
. Use current file imports:
from numpy import asarray, kron, zeros
from optimix import Function, Vector
from glimix_core._util import unvec, vec
and context (classes, functions, or code) from other files:
# Path: glimix_core/_util/_array.py
# def unvec(x, shape):
# return reshape(x, shape, order="F")
#
# def vec(x):
# return reshape(x, (-1,) + x.shape[2:], order="F")
. Output only the next line. | return unvec(self._vecB.value, (self.X.shape[1], self.A.shape[0])) |
Given the code snippet: <|code_start|> """
Kronecker mean function.
Returns
-------
𝐦 : ndarray
(A⊗X)vec(B).
"""
return self.AX @ self._vecB.value
def gradient(self):
"""
Gradient of the linear mean function.
Returns
-------
vecB : ndarray
Derivative of M over vec(B).
"""
return {"vecB": self.AX}
@property
def B(self):
"""
Effect-sizes parameter, B.
"""
return unvec(self._vecB.value, (self.X.shape[1], self.A.shape[0]))
@B.setter
def B(self, v):
<|code_end|>
, generate the next line using the imports in this file:
from numpy import asarray, kron, zeros
from optimix import Function, Vector
from glimix_core._util import unvec, vec
and context (functions, classes, or occasionally code) from other files:
# Path: glimix_core/_util/_array.py
# def unvec(x, shape):
# return reshape(x, shape, order="F")
#
# def vec(x):
# return reshape(x, (-1,) + x.shape[2:], order="F")
. Output only the next line. | self._vecB.value = vec(asarray(v, float)) |
Predict the next line after this snippet: <|code_start|>
def test_givencov():
K = RandomState(0).randn(5, 5)
K = K @ K.T
<|code_end|>
using the current file's imports:
from numpy.random import RandomState
from numpy.testing import assert_allclose
from glimix_core.cov import GivenCov
and any relevant context from other files:
# Path: glimix_core/cov/_given.py
# class GivenCov(Function):
# """
# Given covariance function, K = s⋅K₀.
#
# The covariance matrix is the provided matrix K₀ scaled by s: K = s⋅K₀.
#
# Example
# -------
#
# .. doctest::
#
# >>> from glimix_core.cov import GivenCov
# >>> from numpy import dot
# >>> from numpy.random import RandomState
# >>>
# >>> G = RandomState(0).randn(5, 3)
# >>> K0 = dot(G, G.T)
# >>> cov = GivenCov(K0)
# >>> cov.scale = 1.3
# >>> cov.name = "K"
# >>> print(cov)
# GivenCov(K0=...): K
# scale: 1.3
# """
#
# def __init__(self, K0):
# """
# Constructor.
#
# Parameters
# ----------
# K0 : array_like
# A semi-definite positive matrix.
# """
# from numpy_sugar.linalg import check_symmetry
#
# self._logscale = Scalar(0.0)
# Function.__init__(self, "GivenCov", logscale=self._logscale)
# self._logscale.bounds = (-20.0, +10)
# if not check_symmetry(K0):
# raise ValueError("The provided covariance-matrix is not symmetric.")
# self._K0 = K0
#
# @property
# def scale(self):
# """
# Scale parameter, s.
# """
# return float(exp(self._logscale.value))
#
# @scale.setter
# def scale(self, scale):
# from numpy_sugar import epsilon
#
# scale = max(scale, epsilon.tiny)
# self._logscale.value = log(scale)
#
# def value(self):
# """
# Covariance matrix, s⋅K₀.
#
# Returns
# -------
# K : ndarray
# s⋅K₀.
# """
# return self.scale * self._K0
#
# def gradient(self):
# """
# Derivative of the covariance matrix over log(s).
#
# Returns
# -------
# logscale : float
# s⋅K₀.
# """
# return dict(logscale=self.scale * self._K0)
#
# def __str__(self):
# return format_function(self, {"K0": "..."}, [("scale", self.scale)])
. Output only the next line. | cov = GivenCov(K) |
Given the code snippet: <|code_start|>
def test_probit_link():
link = ProbitLink()
assert_allclose(link.value(link.inv(3.2)), 3.2)
def test_logit_link():
<|code_end|>
, generate the next line using the imports in this file:
from numpy.testing import assert_allclose
from glimix_core.link import LogitLink, LogLink, ProbitLink
and context (functions, classes, or occasionally code) from other files:
# Path: glimix_core/link/_link.py
# class LogitLink(object):
# """
# Logit link function, g(x) = log(x/(1 - x)).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x / (1 - x)), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(1 / (1 + exp(-x)), float)
#
# class LogLink(object):
# """
# Log link function, g(x) = log(x).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(exp(x), float)
#
# class ProbitLink(object):
# """
# Probit link function, g(x) = 𝚽⁻¹(x).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(_normal_icdf(asarray(x, float)), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(_normal_cdf(asarray(x, float)), float)
. Output only the next line. | link = LogitLink() |
Continue the code snippet: <|code_start|>
def test_probit_link():
link = ProbitLink()
assert_allclose(link.value(link.inv(3.2)), 3.2)
def test_logit_link():
link = LogitLink()
assert_allclose(link.value(link.inv(3.2)), 3.2)
def test_loglink_link():
<|code_end|>
. Use current file imports:
from numpy.testing import assert_allclose
from glimix_core.link import LogitLink, LogLink, ProbitLink
and context (classes, functions, or code) from other files:
# Path: glimix_core/link/_link.py
# class LogitLink(object):
# """
# Logit link function, g(x) = log(x/(1 - x)).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x / (1 - x)), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(1 / (1 + exp(-x)), float)
#
# class LogLink(object):
# """
# Log link function, g(x) = log(x).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(log(x), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(exp(x), float)
#
# class ProbitLink(object):
# """
# Probit link function, g(x) = 𝚽⁻¹(x).
# """
#
# @_value_doc
# def value(self, x):
# return asarray(_normal_icdf(asarray(x, float)), float)
#
# @_inv_doc
# @staticmethod
# def inv(x):
# return asarray(_normal_cdf(asarray(x, float)), float)
. Output only the next line. | link = LogLink() |
Next line prediction: <|code_start|>
@scale.setter
def scale(self, scale):
scale = max(scale, epsilon.tiny)
self._logscale.value = log(scale)
def value(self):
"""
Covariance matrix, s⋅K₀.
Returns
-------
K : ndarray
s⋅K₀.
"""
return self.scale * self._K0
def gradient(self):
"""
Derivative of the covariance matrix over log(s).
Returns
-------
logscale : float
s⋅K₀.
"""
return dict(logscale=self.scale * self._K0)
def __str__(self):
<|code_end|>
. Use current file imports:
(from numpy import exp, log
from optimix import Function, Scalar
from .._util import format_function
from numpy_sugar.linalg import check_symmetry
from numpy_sugar import epsilon)
and context including class names, function names, or small code snippets from other files:
# Path: glimix_core/_util/format.py
# def format_function(func, params, attrs=None):
# if attrs is None:
# attrs = []
# tname = type(func).__name__
# name = func.name
# kwargs_input = [f"{arg}={val}" for arg, val in params.items()]
# input = ", ".join(kwargs_input)
# msg = f"{tname}({input})"
# if name is not None:
# msg += f": {name}"
#
# msg += "\n"
# for a in attrs:
# msg += _format_named_arr(a[0], a[1])
# return msg
. Output only the next line. | return format_function(self, {"K0": "..."}, [("scale", self.scale)]) |
Using the snippet: <|code_start|> """
Dimension of the matrix, d.
It corresponds to the number of rows and to the number of columns.
"""
return self._I.shape[0]
def value(self):
"""
Covariance matrix.
Returns
-------
K : ndarray
s⋅I, for scale s and a d×d identity matrix I.
"""
return self.scale * self._I
def gradient(self):
"""
Derivative of the covariance matrix over log(s), s⋅I.
Returns
-------
logscale : ndarray
s⋅I, for scale s and a d×d identity matrix I.
"""
return dict(logscale=self.value())
def __str__(self):
<|code_end|>
, determine the next line of code. You have imports:
from numpy import exp, eye, log
from optimix import Function, Scalar
from .._util import format_function
from numpy_sugar import epsilon
and context (class names, function names, or code) available:
# Path: glimix_core/_util/format.py
# def format_function(func, params, attrs=None):
# if attrs is None:
# attrs = []
# tname = type(func).__name__
# name = func.name
# kwargs_input = [f"{arg}={val}" for arg, val in params.items()]
# input = ", ".join(kwargs_input)
# msg = f"{tname}({input})"
# if name is not None:
# msg += f": {name}"
#
# msg += "\n"
# for a in attrs:
# msg += _format_named_arr(a[0], a[1])
# return msg
. Output only the next line. | return format_function(self, {"dim": self._I.shape[0]}, [("scale", self.scale)]) |
Predict the next line for this snippet: <|code_start|> return K + self._epsilon * eye(K.shape[0])
def gradient(self):
"""
Derivative of the covariance matrix over the parameters of L.
Returns
-------
Lu : ndarray
Derivative of K over the lower triangular part of L.
"""
L = self.L
self._grad_Lu[:] = 0
for i in range(len(self._tril1[0])):
row = self._tril1[0][i]
col = self._tril1[1][i]
self._grad_Lu[row, :, i] = L[:, col]
self._grad_Lu[:, row, i] += L[:, col]
m = len(self._tril1[0])
for i in range(len(self._diag[0])):
row = self._diag[0][i]
col = self._diag[1][i]
self._grad_Lu[row, :, m + i] = L[row, col] * L[:, col]
self._grad_Lu[:, row, m + i] += L[row, col] * L[:, col]
return {"Lu": self._grad_Lu}
def __str__(self):
<|code_end|>
with the help of current file imports:
from typing import Any, Dict
from numpy import diag_indices_from, dot, exp, eye, inf, log, tril_indices_from, zeros
from optimix import Function, Vector
from .._util import format_function
from numpy_sugar import epsilon
from numpy.linalg import svd
from numpy.linalg import slogdet
and context from other files:
# Path: glimix_core/_util/format.py
# def format_function(func, params, attrs=None):
# if attrs is None:
# attrs = []
# tname = type(func).__name__
# name = func.name
# kwargs_input = [f"{arg}={val}" for arg, val in params.items()]
# input = ", ".join(kwargs_input)
# msg = f"{tname}({input})"
# if name is not None:
# msg += f": {name}"
#
# msg += "\n"
# for a in attrs:
# msg += _format_named_arr(a[0], a[1])
# return msg
, which may contain function names, class names, or code. Output only the next line. | return format_function(self, {"dim": self._L.shape[0]}, [("L", self.L)]) |
Here is a snippet: <|code_start|>def test_util_check_economic_qs():
A = ones((3, 2))
B = ones((3, 1))
C = ones(2)
with pytest.raises(ValueError):
check_economic_qs(A)
with pytest.raises(ValueError):
check_economic_qs((A, C))
A[0, 0] = inf
QS = ((A, B), C)
with pytest.raises(ValueError):
check_economic_qs(QS)
A[0, 0] = 1
C[0] = nan
with pytest.raises(ValueError):
check_economic_qs(QS)
def test_util_check_covariates():
A = ones(2)
B = ones((1, 2))
with pytest.raises(ValueError):
<|code_end|>
. Write the next line using the current file imports:
import pytest
from numpy import array, asarray, block, inf, nan, ones
from numpy.linalg import inv, pinv, svd
from numpy.testing import assert_allclose
from numpy_sugar.linalg import ddot
from glimix_core._util import check_covariates, check_economic_qs, check_outcome, hinv
from glimix_core._util.solve import heigvals, hsvd
and context from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/solve.py
# def hinv(a, b, d):
# rcond = 1e-7
#
# b = atleast_1d(b)
# d = atleast_1d(d)
# a = float(a)
# cond = heigvals(a, b, d)
# norm = a * d - b * b
# with errstate(invalid="ignore", divide="ignore"):
# ai = d / norm
# bi = -b / norm
# di = a / norm
#
# nok = cond > 1 / rcond
# ai[nok], bi[nok], di[nok] = _hinv_svd(a, b[nok], d[nok])
# return ai, bi, di
#
# Path: glimix_core/_util/solve.py
# def heigvals(a, b, d):
# a = float(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
# T = a + d
# D = a * d - b * b
#
# t0 = T / 2
# t1 = sqrt(maximum(T * T / 4 - D, 0))
# eig0 = t0 + t1
# eig1 = t0 - t1
#
# eig0 = absolute(eig0)
# eig1 = absolute(eig1)
#
# with errstate(invalid="ignore", divide="ignore"):
# return nan_to_num(maximum(eig0, eig1) / minimum(eig0, eig1))
#
# def hsvd(a, b, d):
# a = atleast_1d(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
#
# aa = a * a
# bb = b * b
# dd = d * d
# ab = a * b
# bd = b * d
#
# e = aa - dd
# s1 = aa + 2 * bb + dd
# s2 = sqrt(e ** 2 + 4 * (ab + bd) ** 2)
#
# t = 2 * ab + 2 * bd
# theta = arctan2(t, e) / 2
# psi = arctan2(t, aa - dd) / 2
#
# Ct = cos(theta)
# St = sin(theta)
# Cp = cos(psi)
# Sp = sin(psi)
#
# s11 = (a * Ct + b * St) * Cp + (b * Ct + d * St) * Sp
# s22 = (a * St - b * Ct) * Sp + (-b * St + d * Ct) * Cp
#
# U = [[Ct, -St], [St, Ct]]
# S = [sqrt((s1 + s2) / 2), sqrt(maximum((s1 - s2) / 2, 0.0))]
#
# VT = [[sign(s11) * Cp, sign(s11) * Sp], [-sign(s22) * Sp, sign(s22) * Cp]]
#
# # U S V.T
# return U, S, VT
, which may include functions, classes, or code. Output only the next line. | check_covariates(A) |
Predict the next line for this snippet: <|code_start|>
def test_util_check_economic_qs():
A = ones((3, 2))
B = ones((3, 1))
C = ones(2)
with pytest.raises(ValueError):
<|code_end|>
with the help of current file imports:
import pytest
from numpy import array, asarray, block, inf, nan, ones
from numpy.linalg import inv, pinv, svd
from numpy.testing import assert_allclose
from numpy_sugar.linalg import ddot
from glimix_core._util import check_covariates, check_economic_qs, check_outcome, hinv
from glimix_core._util.solve import heigvals, hsvd
and context from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/solve.py
# def hinv(a, b, d):
# rcond = 1e-7
#
# b = atleast_1d(b)
# d = atleast_1d(d)
# a = float(a)
# cond = heigvals(a, b, d)
# norm = a * d - b * b
# with errstate(invalid="ignore", divide="ignore"):
# ai = d / norm
# bi = -b / norm
# di = a / norm
#
# nok = cond > 1 / rcond
# ai[nok], bi[nok], di[nok] = _hinv_svd(a, b[nok], d[nok])
# return ai, bi, di
#
# Path: glimix_core/_util/solve.py
# def heigvals(a, b, d):
# a = float(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
# T = a + d
# D = a * d - b * b
#
# t0 = T / 2
# t1 = sqrt(maximum(T * T / 4 - D, 0))
# eig0 = t0 + t1
# eig1 = t0 - t1
#
# eig0 = absolute(eig0)
# eig1 = absolute(eig1)
#
# with errstate(invalid="ignore", divide="ignore"):
# return nan_to_num(maximum(eig0, eig1) / minimum(eig0, eig1))
#
# def hsvd(a, b, d):
# a = atleast_1d(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
#
# aa = a * a
# bb = b * b
# dd = d * d
# ab = a * b
# bd = b * d
#
# e = aa - dd
# s1 = aa + 2 * bb + dd
# s2 = sqrt(e ** 2 + 4 * (ab + bd) ** 2)
#
# t = 2 * ab + 2 * bd
# theta = arctan2(t, e) / 2
# psi = arctan2(t, aa - dd) / 2
#
# Ct = cos(theta)
# St = sin(theta)
# Cp = cos(psi)
# Sp = sin(psi)
#
# s11 = (a * Ct + b * St) * Cp + (b * Ct + d * St) * Sp
# s22 = (a * St - b * Ct) * Sp + (-b * St + d * Ct) * Cp
#
# U = [[Ct, -St], [St, Ct]]
# S = [sqrt((s1 + s2) / 2), sqrt(maximum((s1 - s2) / 2, 0.0))]
#
# VT = [[sign(s11) * Cp, sign(s11) * Sp], [-sign(s22) * Sp, sign(s22) * Cp]]
#
# # U S V.T
# return U, S, VT
, which may contain function names, class names, or code. Output only the next line. | check_economic_qs(A) |
Predict the next line for this snippet: <|code_start|> A[0, 0] = inf
QS = ((A, B), C)
with pytest.raises(ValueError):
check_economic_qs(QS)
A[0, 0] = 1
C[0] = nan
with pytest.raises(ValueError):
check_economic_qs(QS)
def test_util_check_covariates():
A = ones(2)
B = ones((1, 2))
with pytest.raises(ValueError):
check_covariates(A)
B[0, 0] = inf
with pytest.raises(ValueError):
check_covariates(B)
def test_util_check_outcome():
y = ones(5)
y[0] = nan
with pytest.raises(ValueError):
<|code_end|>
with the help of current file imports:
import pytest
from numpy import array, asarray, block, inf, nan, ones
from numpy.linalg import inv, pinv, svd
from numpy.testing import assert_allclose
from numpy_sugar.linalg import ddot
from glimix_core._util import check_covariates, check_economic_qs, check_outcome, hinv
from glimix_core._util.solve import heigvals, hsvd
and context from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/solve.py
# def hinv(a, b, d):
# rcond = 1e-7
#
# b = atleast_1d(b)
# d = atleast_1d(d)
# a = float(a)
# cond = heigvals(a, b, d)
# norm = a * d - b * b
# with errstate(invalid="ignore", divide="ignore"):
# ai = d / norm
# bi = -b / norm
# di = a / norm
#
# nok = cond > 1 / rcond
# ai[nok], bi[nok], di[nok] = _hinv_svd(a, b[nok], d[nok])
# return ai, bi, di
#
# Path: glimix_core/_util/solve.py
# def heigvals(a, b, d):
# a = float(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
# T = a + d
# D = a * d - b * b
#
# t0 = T / 2
# t1 = sqrt(maximum(T * T / 4 - D, 0))
# eig0 = t0 + t1
# eig1 = t0 - t1
#
# eig0 = absolute(eig0)
# eig1 = absolute(eig1)
#
# with errstate(invalid="ignore", divide="ignore"):
# return nan_to_num(maximum(eig0, eig1) / minimum(eig0, eig1))
#
# def hsvd(a, b, d):
# a = atleast_1d(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
#
# aa = a * a
# bb = b * b
# dd = d * d
# ab = a * b
# bd = b * d
#
# e = aa - dd
# s1 = aa + 2 * bb + dd
# s2 = sqrt(e ** 2 + 4 * (ab + bd) ** 2)
#
# t = 2 * ab + 2 * bd
# theta = arctan2(t, e) / 2
# psi = arctan2(t, aa - dd) / 2
#
# Ct = cos(theta)
# St = sin(theta)
# Cp = cos(psi)
# Sp = sin(psi)
#
# s11 = (a * Ct + b * St) * Cp + (b * Ct + d * St) * Sp
# s22 = (a * St - b * Ct) * Sp + (-b * St + d * Ct) * Cp
#
# U = [[Ct, -St], [St, Ct]]
# S = [sqrt((s1 + s2) / 2), sqrt(maximum((s1 - s2) / 2, 0.0))]
#
# VT = [[sign(s11) * Cp, sign(s11) * Sp], [-sign(s22) * Sp, sign(s22) * Cp]]
#
# # U S V.T
# return U, S, VT
, which may contain function names, class names, or code. Output only the next line. | check_outcome((y,), "poisson") |
Given the following code snippet before the placeholder: <|code_start|> check_outcome((y,), "poisson")
y[0] = 0.5
want = array([0.5, 1.0, 1.0, 1.0, 1.0])
assert_allclose(check_outcome(y, "poisson"), want)
x = ones(4)
with pytest.raises(ValueError):
check_outcome((y, x), "bernoulli")
x = ones(5)
with pytest.raises(ValueError):
check_outcome(y, "normal")
def test_util_check_poisson_outcome():
y = ones(5)
y[0] = 25000 + 1
want = array(
[2.50000000e04, 1.00000000e00, 1.00000000e00, 1.00000000e00, 1.00000000e00]
)
with pytest.warns(UserWarning):
assert_allclose(check_outcome(y, "poisson"), want)
def test_hsvd():
A = asarray([[1.2, -0.2], [-0.2, 1.1]])
<|code_end|>
, predict the next line using imports from the current file:
import pytest
from numpy import array, asarray, block, inf, nan, ones
from numpy.linalg import inv, pinv, svd
from numpy.testing import assert_allclose
from numpy_sugar.linalg import ddot
from glimix_core._util import check_covariates, check_economic_qs, check_outcome, hinv
from glimix_core._util.solve import heigvals, hsvd
and context including class names, function names, and sometimes code from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/solve.py
# def hinv(a, b, d):
# rcond = 1e-7
#
# b = atleast_1d(b)
# d = atleast_1d(d)
# a = float(a)
# cond = heigvals(a, b, d)
# norm = a * d - b * b
# with errstate(invalid="ignore", divide="ignore"):
# ai = d / norm
# bi = -b / norm
# di = a / norm
#
# nok = cond > 1 / rcond
# ai[nok], bi[nok], di[nok] = _hinv_svd(a, b[nok], d[nok])
# return ai, bi, di
#
# Path: glimix_core/_util/solve.py
# def heigvals(a, b, d):
# a = float(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
# T = a + d
# D = a * d - b * b
#
# t0 = T / 2
# t1 = sqrt(maximum(T * T / 4 - D, 0))
# eig0 = t0 + t1
# eig1 = t0 - t1
#
# eig0 = absolute(eig0)
# eig1 = absolute(eig1)
#
# with errstate(invalid="ignore", divide="ignore"):
# return nan_to_num(maximum(eig0, eig1) / minimum(eig0, eig1))
#
# def hsvd(a, b, d):
# a = atleast_1d(a)
# b = atleast_1d(b)
# d = atleast_1d(d)
#
# aa = a * a
# bb = b * b
# dd = d * d
# ab = a * b
# bd = b * d
#
# e = aa - dd
# s1 = aa + 2 * bb + dd
# s2 = sqrt(e ** 2 + 4 * (ab + bd) ** 2)
#
# t = 2 * ab + 2 * bd
# theta = arctan2(t, e) / 2
# psi = arctan2(t, aa - dd) / 2
#
# Ct = cos(theta)
# St = sin(theta)
# Cp = cos(psi)
# Sp = sin(psi)
#
# s11 = (a * Ct + b * St) * Cp + (b * Ct + d * St) * Sp
# s22 = (a * St - b * Ct) * Sp + (-b * St + d * Ct) * Cp
#
# U = [[Ct, -St], [St, Ct]]
# S = [sqrt((s1 + s2) / 2), sqrt(maximum((s1 - s2) / 2, 0.0))]
#
# VT = [[sign(s11) * Cp, sign(s11) * Sp], [-sign(s22) * Sp, sign(s22) * Cp]]
#
# # U S V.T
# return U, S, VT
. Output only the next line. | SVD0 = hsvd(A[0, 0], A[0, 1], A[1, 1]) |
Continue the code snippet: <|code_start|>
def test_mean_kron():
random = RandomState(0)
# number of trais
p = 2
# number of covariates
c = 3
# sample size
n = 4
A = random.randn(p, p)
X = random.randn(n, c)
B = random.randn(p, c)
<|code_end|>
. Use current file imports:
from numpy import kron, ravel
from numpy.random import RandomState
from numpy.testing import assert_allclose
from glimix_core.mean import KronMean
and context (classes, functions, or code) from other files:
# Path: glimix_core/mean/_kron.py
# class KronMean(Function):
# """
# Kronecker mean function, (A⊗X)vec(B).
#
# Let
#
# - n be the number of samples;
# - p the number of traits; and
# - c the number of covariates.
#
# The mathematical representation is
#
# 𝐦 = (A⊗X)vec(B)
#
# where A is a p×p trait design matrix of fixed effects and X is a n×c sample design
# matrix of fixed effects. B is a c×p matrix of fixed-effect sizes.
# """
#
# def __init__(self, A, X):
# """
# Constructor.
#
# Parameters
# ----------
# A : array_like
# p×p array.
# X : array_like
# n×c array.
# """
# self._A = asarray(A, float)
# self._X = asarray(X, float)
# vecB = zeros((X.shape[1], A.shape[0])).ravel()
# self._vecB = Vector(vecB)
# self._nparams = vecB.size
# Function.__init__(self, "KronMean", vecB=self._vecB)
#
# @property
# def nparams(self):
# """
# Number of parameters.
# """
# return self._nparams
#
# @property
# def A(self):
# """
# Matrix A.
# """
# return self._A
#
# @property
# def X(self):
# """
# Matrix X.
# """
# return self._X
#
# @property
# def AX(self):
# """
# A ⊗ X.
# """
# return kron(self.A, self.X)
#
# def value(self):
# """
# Kronecker mean function.
#
# Returns
# -------
# 𝐦 : ndarray
# (A⊗X)vec(B).
# """
# return self.AX @ self._vecB.value
#
# def gradient(self):
# """
# Gradient of the linear mean function.
#
# Returns
# -------
# vecB : ndarray
# Derivative of M over vec(B).
# """
# return {"vecB": self.AX}
#
# @property
# def B(self):
# """
# Effect-sizes parameter, B.
# """
# return unvec(self._vecB.value, (self.X.shape[1], self.A.shape[0]))
#
# @B.setter
# def B(self, v):
# self._vecB.value = vec(asarray(v, float))
#
# def __str__(self):
# tname = type(self).__name__
# msg = "{}(A=..., X=...)".format(tname)
# if self.name is not None:
# msg += ": {}".format(self.name)
# msg += "\n"
# mat = format(self.B)
# msg += " B: " + "\n ".join(mat.split("\n"))
# return msg
. Output only the next line. | mean = KronMean(A, X) |
Predict the next line after this snippet: <|code_start|> @scale.setter
def scale(self, scale):
scale = max(scale, epsilon.tiny)
self._logscale.value = log(scale)
def value(self):
"""
Covariance matrix.
Returns
-------
K : ndarray
s⋅XXᵀ.
"""
X = self.X
return self.scale * (X @ X.T)
def gradient(self):
"""
Derivative of the covariance matrix over log(s).
Returns
-------
logscale : ndarray
s⋅XXᵀ.
"""
return dict(logscale=self.value())
def __str__(self):
<|code_end|>
using the current file's imports:
from numpy import exp, log
from optimix import Function, Scalar
from .._util import format_function
from numpy_sugar import epsilon
and any relevant context from other files:
# Path: glimix_core/_util/format.py
# def format_function(func, params, attrs=None):
# if attrs is None:
# attrs = []
# tname = type(func).__name__
# name = func.name
# kwargs_input = [f"{arg}={val}" for arg, val in params.items()]
# input = ", ".join(kwargs_input)
# msg = f"{tname}({input})"
# if name is not None:
# msg += f": {name}"
#
# msg += "\n"
# for a in attrs:
# msg += _format_named_arr(a[0], a[1])
# return msg
. Output only the next line. | return format_function(self, {}, [("scale", self.scale)]) |
Predict the next line after this snippet: <|code_start|> ----------
y : array_like
Outcome variable.
lik : tuple
Likelihood definition. The first item is one of the following likelihood names:
``"Bernoulli"``, ``"Binomial"``, ``"Normal"``, and ``"Poisson"``. For
`Binomial`, the second item is an array of outcomes.
X : array_like
Covariates.
QS : tuple
Economic eigen decomposition.
"""
def __init__(self, y, lik, X, QS=None):
y = ascontiguousarray(y, float)
X = asarray(X, float)
Function.__init__(
self,
"GLMM",
beta=Vector(zeros(X.shape[1])),
logscale=Scalar(0.0),
logitdelta=Scalar(0.0),
)
if not isinstance(lik, (tuple, list)):
lik = (lik,)
self._lik = (lik[0].lower(),) + tuple(ascontiguousarray(i) for i in lik[1:])
self._y = check_outcome(y, self._lik)
<|code_end|>
using the current file's imports:
from copy import copy
from numpy import asarray, ascontiguousarray, clip, dot, exp, log, zeros
from optimix import Function, Scalar, Vector
from .._util import (
check_covariates,
check_economic_qs,
check_outcome,
economic_qs_zeros,
)
from numpy_sugar.linalg import ddot, sum2diag
from numpy_sugar import epsilon
and any relevant context from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/eigen.py
# def economic_qs_zeros(n: int):
# """
# Eigen decomposition of a zero matrix.
# """
#
# Q0 = empty((n, 0))
# Q1 = eye(n)
# S0 = empty(0)
#
# return ((Q0, Q1), S0)
. Output only the next line. | self._X = check_covariates(X) |
Predict the next line for this snippet: <|code_start|> Likelihood definition. The first item is one of the following likelihood names:
``"Bernoulli"``, ``"Binomial"``, ``"Normal"``, and ``"Poisson"``. For
`Binomial`, the second item is an array of outcomes.
X : array_like
Covariates.
QS : tuple
Economic eigen decomposition.
"""
def __init__(self, y, lik, X, QS=None):
y = ascontiguousarray(y, float)
X = asarray(X, float)
Function.__init__(
self,
"GLMM",
beta=Vector(zeros(X.shape[1])),
logscale=Scalar(0.0),
logitdelta=Scalar(0.0),
)
if not isinstance(lik, (tuple, list)):
lik = (lik,)
self._lik = (lik[0].lower(),) + tuple(ascontiguousarray(i) for i in lik[1:])
self._y = check_outcome(y, self._lik)
self._X = check_covariates(X)
if QS is None:
self._QS = economic_qs_zeros(self._y.shape[0])
else:
<|code_end|>
with the help of current file imports:
from copy import copy
from numpy import asarray, ascontiguousarray, clip, dot, exp, log, zeros
from optimix import Function, Scalar, Vector
from .._util import (
check_covariates,
check_economic_qs,
check_outcome,
economic_qs_zeros,
)
from numpy_sugar.linalg import ddot, sum2diag
from numpy_sugar import epsilon
and context from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/eigen.py
# def economic_qs_zeros(n: int):
# """
# Eigen decomposition of a zero matrix.
# """
#
# Q0 = empty((n, 0))
# Q1 = eye(n)
# S0 = empty(0)
#
# return ((Q0, Q1), S0)
, which may contain function names, class names, or code. Output only the next line. | self._QS = check_economic_qs(QS) |
Continue the code snippet: <|code_start|> Parameters
----------
y : array_like
Outcome variable.
lik : tuple
Likelihood definition. The first item is one of the following likelihood names:
``"Bernoulli"``, ``"Binomial"``, ``"Normal"``, and ``"Poisson"``. For
`Binomial`, the second item is an array of outcomes.
X : array_like
Covariates.
QS : tuple
Economic eigen decomposition.
"""
def __init__(self, y, lik, X, QS=None):
y = ascontiguousarray(y, float)
X = asarray(X, float)
Function.__init__(
self,
"GLMM",
beta=Vector(zeros(X.shape[1])),
logscale=Scalar(0.0),
logitdelta=Scalar(0.0),
)
if not isinstance(lik, (tuple, list)):
lik = (lik,)
self._lik = (lik[0].lower(),) + tuple(ascontiguousarray(i) for i in lik[1:])
<|code_end|>
. Use current file imports:
from copy import copy
from numpy import asarray, ascontiguousarray, clip, dot, exp, log, zeros
from optimix import Function, Scalar, Vector
from .._util import (
check_covariates,
check_economic_qs,
check_outcome,
economic_qs_zeros,
)
from numpy_sugar.linalg import ddot, sum2diag
from numpy_sugar import epsilon
and context (classes, functions, or code) from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/eigen.py
# def economic_qs_zeros(n: int):
# """
# Eigen decomposition of a zero matrix.
# """
#
# Q0 = empty((n, 0))
# Q1 = eye(n)
# S0 = empty(0)
#
# return ((Q0, Q1), S0)
. Output only the next line. | self._y = check_outcome(y, self._lik) |
Predict the next line for this snippet: <|code_start|> Outcome variable.
lik : tuple
Likelihood definition. The first item is one of the following likelihood names:
``"Bernoulli"``, ``"Binomial"``, ``"Normal"``, and ``"Poisson"``. For
`Binomial`, the second item is an array of outcomes.
X : array_like
Covariates.
QS : tuple
Economic eigen decomposition.
"""
def __init__(self, y, lik, X, QS=None):
y = ascontiguousarray(y, float)
X = asarray(X, float)
Function.__init__(
self,
"GLMM",
beta=Vector(zeros(X.shape[1])),
logscale=Scalar(0.0),
logitdelta=Scalar(0.0),
)
if not isinstance(lik, (tuple, list)):
lik = (lik,)
self._lik = (lik[0].lower(),) + tuple(ascontiguousarray(i) for i in lik[1:])
self._y = check_outcome(y, self._lik)
self._X = check_covariates(X)
if QS is None:
<|code_end|>
with the help of current file imports:
from copy import copy
from numpy import asarray, ascontiguousarray, clip, dot, exp, log, zeros
from optimix import Function, Scalar, Vector
from .._util import (
check_covariates,
check_economic_qs,
check_outcome,
economic_qs_zeros,
)
from numpy_sugar.linalg import ddot, sum2diag
from numpy_sugar import epsilon
and context from other files:
# Path: glimix_core/_util/check.py
# def check_covariates(X):
# if not X.ndim == 2:
# raise ValueError("Covariates must be a bidimensional array.")
#
# if not npall(isfinite(X)):
# raise ValueError("Covariates must have finite values only.")
#
# return X
#
# def check_economic_qs(QS):
# if not isinstance(QS, tuple):
# raise ValueError("QS must be a tuple.")
#
# if not isinstance(QS[0], tuple):
# raise ValueError("QS[0] must be a tuple.")
#
# fmsg = "QS has non-finite values."
#
# if not all(npall(isfinite(Q)) for Q in QS[0]):
# raise ValueError(fmsg)
#
# if not npall(isfinite(QS[1])):
# raise ValueError(fmsg)
#
# return QS
#
# def check_outcome(y, lik):
# if not isinstance(lik, (list, tuple)):
# lik = (lik,)
#
# str_err = "The first item of ``lik`` has to be a string."
# if not isinstance(lik[0], str):
# raise ValueError(str_err)
#
# lik_name = lik[0].lower()
#
# y = ascontiguousarray(y, float)
# lik = lik[:1] + tuple(ascontiguousarray(i, float) for i in lik[1:])
#
# if not npall(isfinite(y)):
# raise ValueError("Outcome must be finite.")
#
# if lik_name == "poisson":
# return _check_poisson_outcome(y)
#
# if lik_name in ("binomial", "normal"):
# if len(lik) != 2:
# msg = "``lik`` must be a tuple of two elements for"
# msg += " {} likelihood.".format(lik_name[0].upper() + lik_name[1:])
# raise ValueError(msg)
#
# return y
#
# Path: glimix_core/_util/eigen.py
# def economic_qs_zeros(n: int):
# """
# Eigen decomposition of a zero matrix.
# """
#
# Q0 = empty((n, 0))
# Q1 = eye(n)
# S0 = empty(0)
#
# return ((Q0, Q1), S0)
, which may contain function names, class names, or code. Output only the next line. | self._QS = economic_qs_zeros(self._y.shape[0]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.