code
stringlengths 1
25.8M
| language
stringclasses 18
values | source
stringclasses 4
values | repo
stringclasses 78
values | path
stringlengths 0
268
|
|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ReliefColorsWidget.py
---------------------
Date : December 2016
Copyright : (C) 2016 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'December 2016'
__copyright__ = '(C) 2016, Alexander Bruy'
import os
import codecs
from qgis.PyQt import uic
from qgis.PyQt.QtCore import pyqtSlot, QDir
from qgis.PyQt.QtGui import QColor, QBrush
from qgis.PyQt.QtWidgets import (QTreeWidgetItem,
QFileDialog,
QMessageBox,
QInputDialog,
QColorDialog
)
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import QgsApplication, QgsMapLayer
from qgis.analysis import QgsRelief
from processing.gui.wrappers import WidgetWrapper
from processing.tools import system
pluginPath = os.path.dirname(__file__)
WIDGET, BASE = uic.loadUiType(os.path.join(pluginPath, 'reliefcolorswidgetbase.ui'))
class ReliefColorsWidget(BASE, WIDGET):
def __init__(self):
super(ReliefColorsWidget, self).__init__(None)
self.setupUi(self)
self.btnAdd.setIcon(QgsApplication.getThemeIcon('/symbologyAdd.svg'))
self.btnRemove.setIcon(QgsApplication.getThemeIcon('/symbologyRemove.svg'))
self.btnUp.setIcon(QgsApplication.getThemeIcon('/mActionArrowUp.svg'))
self.btnDown.setIcon(QgsApplication.getThemeIcon('/mActionArrowDown.svg'))
self.btnLoad.setIcon(QgsApplication.getThemeIcon('/mActionFileOpen.svg'))
self.btnSave.setIcon(QgsApplication.getThemeIcon('/mActionFileSave.svg'))
self.btnAuto.setIcon(QgsApplication.getThemeIcon('/mActionReload.svg'))
self.layer = None
@pyqtSlot()
def on_btnAdd_clicked(self):
item = QTreeWidgetItem()
item.setText(0, '0.00')
item.setText(1, '0.00')
item.setBackground(2, QBrush(QColor(127, 127, 127)))
self.reliefClassTree.addTopLevelItem(item)
@pyqtSlot()
def on_btnRemove_clicked(self):
selectedItems = self.reliefClassTree.selectedItems()
for item in selectedItems:
self.reliefClassTree.invisibleRootItem().removeChild(item)
item = None
@pyqtSlot()
def on_btnDown_clicked(self):
selectedItems = self.reliefClassTree.selectedItems()
for item in selectedItems:
currentIndex = self.reliefClassTree.indexOfTopLevelItem(item)
if currentIndex < self.reliefClassTree.topLevelItemCount() - 1:
self.reliefClassTree.takeTopLevelItem(currentIndex)
self.reliefClassTree.insertTopLevelItem(currentIndex + 1, item)
self.reliefClassTree.setCurrentItem(item)
@pyqtSlot()
def on_btnUp_clicked(self):
selectedItems = self.reliefClassTree.selectedItems()
for item in selectedItems:
currentIndex = self.reliefClassTree.indexOfTopLevelItem(item)
if currentIndex > 0:
self.reliefClassTree.takeTopLevelItem(currentIndex)
self.reliefClassTree.insertTopLevelItem(currentIndex - 1, item)
self.reliefClassTree.setCurrentItem(item)
@pyqtSlot()
def on_btnLoad_clicked(self):
fileName, _ = QFileDialog.getOpenFileName(None,
self.tr('Import Colors and elevations from XML'),
QDir.homePath(),
self.tr('XML files (*.xml *.XML)'))
if fileName == '':
return
doc = QDomDocument()
with codecs.open(fileName, 'r', encoding='utf-8') as f:
content = f.read()
if not doc.setContent(content):
QMessageBox.critical(None,
self.tr('Error parsing XML'),
self.tr('The XML file could not be loaded'))
return
self.reliefClassTree.clear()
reliefColorList = doc.elementsByTagName('ReliefColor')
for i in range(reliefColorList.length()):
elem = reliefColorList.at(i).toElement()
item = QTreeWidgetItem()
item.setText(0, elem.attribute('MinElevation'))
item.setText(1, elem.attribute('MaxElevation'))
item.setBackground(2, QBrush(QColor(int(elem.attribute('red')),
int(elem.attribute('green')),
int(elem.attribute('blue')))))
self.reliefClassTree.addTopLevelItem(item)
@pyqtSlot()
def on_btnSave_clicked(self):
fileName, _ = QFileDialog.getSaveFileName(None,
self.tr('Export Colors and elevations as XML'),
QDir.homePath(),
self.tr('XML files (*.xml *.XML)'))
if fileName == '':
return
if not fileName.lower().endswith('.xml'):
fileName += '.xml'
doc = QDomDocument()
colorsElem = doc.createElement('ReliefColors')
doc.appendChild(colorsElem)
colors = self.reliefColors()
for c in colors:
elem = doc.createElement('ReliefColor')
elem.setAttribute('MinElevation', str(c.minElevation))
elem.setAttribute('MaxElevation', str(c.maxElevation))
elem.setAttribute('red', str(c.color.red()))
elem.setAttribute('green', str(c.color.green()))
elem.setAttribute('blue', str(c.color.blue()))
colorsElem.appendChild(elem)
with codecs.open(fileName, 'w', encoding='utf-8') as f:
f.write(doc.toString(2))
@pyqtSlot()
def on_btnAuto_clicked(self):
if self.layer is None:
return
relief = QgsRelief(self.layer, system.getTempFilename(), 'GTiff')
colors = relief.calculateOptimizedReliefClasses()
self.populateColors(colors)
@pyqtSlot(QTreeWidgetItem, int)
def on_reliefClassTree_itemDoubleClicked(self, item, column):
if not item:
return
if column == 0:
d, ok = QInputDialog.getDouble(None,
self.tr('Enter lower elevation class bound'),
self.tr('Elevation'),
float(item.text(0)),
decimals=2)
if ok:
item.setText(0, str(d))
elif column == 1:
d, ok = QInputDialog.getDouble(None,
self.tr('Enter upper elevation class bound'),
self.tr('Elevation'),
float(item.text(1)),
decimals=2)
if ok:
item.setText(1, str(d))
elif column == 2:
c = QColorDialog.getColor(item.background(2).color(),
None,
self.tr('Select color for relief class'))
if c.isValid():
item.setBackground(2, QBrush(c))
def reliefColors(self):
colors = []
for i in range(self.reliefClassTree.topLevelItemCount()):
item = self.reliefClassTree.topLevelItem(i)
if item:
c = QgsRelief.ReliefColor(item.background(2).color(),
float(item.text(0)),
float(item.text(1)))
colors.append(c)
return colors
def populateColors(self, colors):
self.reliefClassTree.clear()
for c in colors:
item = QTreeWidgetItem()
item.setText(0, str(c.minElevation))
item.setText(1, str(c.maxElevation))
item.setBackground(2, QBrush(c.color))
self.reliefClassTree.addTopLevelItem(item)
def setLayer(self, layer):
self.layer = layer
def setValue(self, value):
self.reliefClassTree.clear()
rows = value.split(';')
for r in rows:
v = r.split(',')
item = QTreeWidgetItem()
item.setText(0, v[0])
item.setText(1, v[1])
color = QColor(int(v[2]), int(v[3]), int(v[4]))
item.setBackground(2, QBrush(color))
self.reliefClassTree.addTopLevelItem(item)
def value(self):
rColors = self.reliefColors()
colors = ''
for c in rColors:
colors += '{:f}, {:f}, {:d}, {:d}, {:d};'.format(c.minElevation,
c.maxElevation,
c.color.red(),
c.color.green(),
c.color.blue())
return colors[:-1]
class ReliefColorsWidgetWrapper(WidgetWrapper):
def createWidget(self):
return ReliefColorsWidget()
def postInitialize(self, wrappers):
for wrapper in wrappers:
if wrapper.param.name == self.param.parent:
self.setLayer(wrapper.value())
wrapper.widgetValueHasChanged.connect(self.parentValueChanged)
break
def parentValueChanged(self, wrapper):
self.setLayer(wrapper.parameterValue())
def setLayer(self, layer):
if isinstance(layer, QgsMapLayer):
layer = layer.source()
self.widget.setLayer(layer)
def setValue(self, value):
self.widget.setValue(value)
def value(self):
return self.widget.value()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
class ModuleDocFragment(object):
DOCUMENTATION = r"""
options:
result_format:
name: Format of the task result
description:
- Define the task result format used in the callback output.
- These formats do not cause the callback to emit valid JSON or YAML formats.
- The output contains these formats interspersed with other non-machine parsable data.
type: str
default: json
env:
- name: ANSIBLE_CALLBACK_RESULT_FORMAT
ini:
- key: callback_result_format
section: defaults
choices:
- json
- yaml
version_added: '2.13'
result_indentation:
name: Indentation of the result
description:
- Allows to configure indentation for YAML and verbose/pretty JSON.
- Please note that for O(result_format=yaml), only values between 2 and 9 will be handled as expected by PyYAML.
If indentation is set to 1, or to 10 or larger, the first level of indentation will be used,
but all further indentations will be by 2 spaces.
type: int
default: 4
env:
- name: ANSIBLE_CALLBACK_RESULT_INDENTATION
ini:
- key: callback_result_indentation
section: defaults
version_added: '2.20'
result_yaml_line_width:
name: Line width of YAML output
description:
- Configure the line width used for YAML. The YAML serializer will try to break longer lines.
type: str
default: default
choices:
default: Use PyYAML's default value, which is around 80 characters.
no-break: Disable line breaks.
terminal-width: Use the detected terminal width that is also used for other output of this callback.
env:
- name: ANSIBLE_CALLBACK_YAML_LINE_WIDTH
ini:
- key: callback_result_yaml_line_width
section: defaults
version_added: '2.21'
pretty_results:
name: Configure output for readability
description:
- Configure the result format to be more readable.
- When O(result_format) is set to V(yaml) this option defaults to V(true), and defaults
to V(false) when configured to V(json).
- Setting this option to V(true) will force V(json) and V(yaml) results to always be pretty
printed regardless of verbosity.
- When set to V(true) and used with the V(yaml) result format, this option will
modify module responses in an attempt to produce a more human friendly output at the expense
of correctness, and should not be relied upon to aid in writing variable manipulations
or conditionals. For correctness, set this option to V(false) or set O(result_format) to V(json).
type: bool
default: null
env:
- name: ANSIBLE_CALLBACK_FORMAT_PRETTY
ini:
- key: callback_format_pretty
section: defaults
version_added: '2.13'
"""
|
python
|
github
|
https://github.com/ansible/ansible
|
lib/ansible/plugins/doc_fragments/result_format_callback.py
|
---
applies_to:
stack:
serverless:
mapped_pages:
- https://www.elastic.co/guide/en/elasticsearch/reference/current/index-phrases.html
---
# index_phrases [index-phrases]
If enabled, two-term word combinations (*shingles*) are indexed into a separate field. This allows exact phrase queries (no slop) to run more efficiently, at the expense of a larger index. Note that this works best when stopwords are not removed, as phrases containing stopwords will not use the subsidiary field and will fall back to a standard phrase query. Accepts `true` or `false` (default).
|
unknown
|
github
|
https://github.com/elastic/elasticsearch
|
docs/reference/elasticsearch/mapping-reference/index-phrases.md
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Previewer bundles."""
from __future__ import unicode_literals
from invenio.ext.assets import Bundle, CleanCSSFilter, RequireJSFilter
pdfjs = Bundle(
"vendors/pdfjs-build/generic/web/compatibility.js",
"vendors/pdfjs-build/generic/web/l10n.js",
"vendors/pdfjs-build/generic/build/pdf.js",
"js/previewer/pdfjs/viewer.js",
"js/previewer/pdfjs/fullscreen.js",
filters="uglifyjs",
output="previewer/pdfjs.js",
weight=20,
bower={
"pdfjs-build": "latest"
}
)
pdftk = Bundle(
"js/previewer/pdf_viewer.js",
filters="uglifyjs",
output="previewer/pdftk.js",
weight=20
)
pdfjscss = Bundle(
"css/previewer/pdfjs/viewer.css",
filters=CleanCSSFilter(),
output="previewer/pdfjs.css",
weight=20
)
csv_previewer = Bundle(
"js/previewer/csv_previewer/init.js",
filters=RequireJSFilter(),
output="previewer/csv_previewer.js",
weight=20,
bower={
"d3": "latest"
}
)
pdftkcss = Bundle(
"css/previewer/pdf_viewer.css",
filters=CleanCSSFilter(),
output="previewer/pdftk.css",
weight=20
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
# Copyright 2016 Jakub Jursa <jakub.jursa1@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_nova_host_aggregate
short_description: Manage OpenStack host aggregates
extends_documentation_fragment: openstack
author: "Jakub Jursa (@kuboj)"
version_added: "2.3"
description:
- Create, update, or delete OpenStack host aggregates. If a aggregate
with the supplied name already exists, it will be updated with the
new name, new availability zone, new metadata and new list of hosts.
options:
name:
description: Name of the aggregate.
required: true
metadata:
description: Metadata dict.
availability_zone:
description: Availability zone to create aggregate into.
hosts:
description: List of hosts to set for an aggregate.
state:
description: Should the resource be present or absent.
choices: [present, absent]
default: present
requirements:
- "python >= 2.7"
- "openstacksdk"
'''
EXAMPLES = '''
# Create a host aggregate
- os_nova_host_aggregate:
cloud: mycloud
state: present
name: db_aggregate
hosts:
- host1
- host2
metadata:
type: dbcluster
# Delete an aggregate
- os_nova_host_aggregate:
cloud: mycloud
state: absent
name: db_aggregate
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_module_kwargs, openstack_cloud_from_module
def _needs_update(module, aggregate):
new_metadata = (module.params['metadata'] or {})
if module.params['availability_zone'] is not None:
new_metadata['availability_zone'] = module.params['availability_zone']
if ((module.params['name'] != aggregate.name) or
(module.params['hosts'] is not None and set(module.params['hosts']) != set(aggregate.hosts)) or
(module.params['availability_zone'] is not None and module.params['availability_zone'] != aggregate.availability_zone) or
(module.params['metadata'] is not None and new_metadata != aggregate.metadata)):
return True
return False
def _system_state_change(module, aggregate):
state = module.params['state']
if state == 'absent' and aggregate:
return True
if state == 'present':
if aggregate is None:
return True
return _needs_update(module, aggregate)
return False
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=True),
metadata=dict(required=False, default=None, type='dict'),
availability_zone=dict(required=False, default=None),
hosts=dict(required=False, default=None, type='list'),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
name = module.params['name']
metadata = module.params['metadata']
availability_zone = module.params['availability_zone']
hosts = module.params['hosts']
state = module.params['state']
if metadata is not None:
metadata.pop('availability_zone', None)
sdk, cloud = openstack_cloud_from_module(module)
try:
aggregates = cloud.search_aggregates(name_or_id=name)
if len(aggregates) == 1:
aggregate = aggregates[0]
elif len(aggregates) == 0:
aggregate = None
else:
raise Exception("Should not happen")
if module.check_mode:
module.exit_json(changed=_system_state_change(module, aggregate))
if state == 'present':
if aggregate is None:
aggregate = cloud.create_aggregate(name=name,
availability_zone=availability_zone)
if hosts:
for h in hosts:
cloud.add_host_to_aggregate(aggregate.id, h)
if metadata:
cloud.set_aggregate_metadata(aggregate.id, metadata)
changed = True
else:
if _needs_update(module, aggregate):
if availability_zone is not None:
aggregate = cloud.update_aggregate(aggregate.id, name=name,
availability_zone=availability_zone)
if metadata is not None:
metas = metadata
for i in (set(aggregate.metadata.keys()) - set(metadata.keys())):
if i != 'availability_zone':
metas[i] = None
cloud.set_aggregate_metadata(aggregate.id, metas)
if hosts is not None:
for i in (set(aggregate.hosts) - set(hosts)):
cloud.remove_host_from_aggregate(aggregate.id, i)
for i in (set(hosts) - set(aggregate.hosts)):
cloud.add_host_to_aggregate(aggregate.id, i)
changed = True
else:
changed = False
module.exit_json(changed=changed)
elif state == 'absent':
if aggregate is None:
changed = False
else:
if hosts:
for h in hosts:
cloud.remove_host_from_aggregate(aggregate.id, h)
cloud.delete_aggregate(aggregate.id)
changed = True
module.exit_json(changed=changed)
except sdk.exceptions.OpenStackCloudException as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
import itertools
import collections
from sympy import S, Tuple, MatrixBase
from sympy import S, Tuple, diff, MatrixBase
from sympy.tensor.array import ImmutableDenseNDimArray
from sympy.tensor.array.ndim_array import NDimArray
def _arrayfy(a):
if isinstance(a, NDimArray):
return a
if isinstance(a, (MatrixBase, list, tuple, Tuple)):
return ImmutableDenseNDimArray(a)
return a
def tensorproduct(*args):
"""
Tensor product among scalars or array-like objects.
Examples
========
>>> from sympy.tensor.array import tensorproduct, Array
>>> from sympy.abc import x, y, z, t
>>> A = Array([[1, 2], [3, 4]])
>>> B = Array([x, y])
>>> tensorproduct(A, B)
[[[x, y], [2*x, 2*y]], [[3*x, 3*y], [4*x, 4*y]]]
>>> tensorproduct(A, x)
[[x, 2*x], [3*x, 4*x]]
>>> tensorproduct(A, B, B)
[[[[x**2, x*y], [x*y, y**2]], [[2*x**2, 2*x*y], [2*x*y, 2*y**2]]], [[[3*x**2, 3*x*y], [3*x*y, 3*y**2]], [[4*x**2, 4*x*y], [4*x*y, 4*y**2]]]]
Applying this function on two matrices will result in a rank 4 array.
>>> from sympy import Matrix, eye
>>> m = Matrix([[x, y], [z, t]])
>>> p = tensorproduct(eye(3), m)
>>> p
[[[[x, y], [z, t]], [[0, 0], [0, 0]], [[0, 0], [0, 0]]], [[[0, 0], [0, 0]], [[x, y], [z, t]], [[0, 0], [0, 0]]], [[[0, 0], [0, 0]], [[0, 0], [0, 0]], [[x, y], [z, t]]]]
"""
if len(args) == 0:
return S.One
if len(args) == 1:
return _arrayfy(args[0])
if len(args) > 2:
return tensorproduct(tensorproduct(args[0], args[1]), *args[2:])
# length of args is 2:
a, b = map(_arrayfy, args)
if not isinstance(a, NDimArray) or not isinstance(b, NDimArray):
return a*b
al = list(a)
bl = list(b)
product_list = [i*j for i in al for j in bl]
return ImmutableDenseNDimArray(product_list, a.shape + b.shape)
def tensorcontraction(array, *contraction_axes):
"""
Contraction of an array-like object on the specified axes.
Examples
========
>>> from sympy.tensor.array import Array, tensorcontraction
>>> from sympy import Matrix, eye
>>> tensorcontraction(eye(3), (0, 1))
3
>>> A = Array(range(18), (3, 2, 3))
>>> A
[[[0, 1, 2], [3, 4, 5]], [[6, 7, 8], [9, 10, 11]], [[12, 13, 14], [15, 16, 17]]]
>>> tensorcontraction(A, (0, 2))
[21, 30]
Matrix multiplication may be emulated with a proper combination of
``tensorcontraction`` and ``tensorproduct``
>>> from sympy.tensor.array import tensorproduct
>>> from sympy.abc import a,b,c,d,e,f,g,h
>>> m1 = Matrix([[a, b], [c, d]])
>>> m2 = Matrix([[e, f], [g, h]])
>>> p = tensorproduct(m1, m2)
>>> p
[[[[a*e, a*f], [a*g, a*h]], [[b*e, b*f], [b*g, b*h]]], [[[c*e, c*f], [c*g, c*h]], [[d*e, d*f], [d*g, d*h]]]]
>>> tensorcontraction(p, (1, 2))
[[a*e + b*g, a*f + b*h], [c*e + d*g, c*f + d*h]]
>>> m1*m2
Matrix([
[a*e + b*g, a*f + b*h],
[c*e + d*g, c*f + d*h]])
"""
array = _arrayfy(array)
# Verify contraction_axes:
taken_dims = set([])
for axes_group in contraction_axes:
if not isinstance(axes_group, collections.Iterable):
raise ValueError("collections of contraction axes expected")
dim = array.shape[axes_group[0]]
for d in axes_group:
if d in taken_dims:
raise ValueError("dimension specified more than once")
if dim != array.shape[d]:
raise ValueError("cannot contract between axes of different dimension")
taken_dims.add(d)
rank = array.rank()
remaining_shape = [dim for i, dim in enumerate(array.shape) if i not in taken_dims]
cum_shape = [0]*rank
_cumul = 1
for i in range(rank):
cum_shape[rank - i - 1] = _cumul
_cumul *= int(array.shape[rank - i - 1])
remaining_indices = [[cum_shape[i]*j for j in range(array.shape[i])]
for i in range(rank) if i not in taken_dims]
contracted_array = []
for icontrib in itertools.product(*remaining_indices):
i = sum(icontrib)
isum = S.Zero
for axes_group in contraction_axes:
for js in range(array.shape[axes_group[0]]):
isum += array[i + sum([cum_shape[ig]*js for ig in axes_group])]
contracted_array.append(isum)
if len(remaining_indices) == 0:
assert len(contracted_array) == 1
return contracted_array[0]
return type(array)(contracted_array, remaining_shape)
def derive_by_array(expr, dx):
r"""
Derivative by arrays. Supports both arrays and scalars.
Given the array `A_{i_1, \ldots, i_N}` and the array `X_{j_1, \ldots, j_M}`
this function will return a new array `B` defined by
`B_{j_1,\ldots,j_M,i_1,\ldots,i_N} := \frac{\partial A_{i_1,\ldots,i_N}}{\partial X_{j_1,\ldots,j_M}}`
Examples
========
>>> from sympy.tensor.array import derive_by_array
>>> from sympy.abc import x, y, z, t
>>> from sympy import cos
>>> derive_by_array(cos(x*t), x)
-t*sin(t*x)
>>> derive_by_array(cos(x*t), [x, y, z, t])
[-t*sin(t*x), 0, 0, -x*sin(t*x)]
>>> derive_by_array([x, y**2*z], [[x, y], [z, t]])
[[[1, 0], [0, 2*y*z]], [[0, y**2], [0, 0]]]
"""
array_types = (collections.Iterable, MatrixBase, NDimArray)
if isinstance(dx, array_types):
dx = ImmutableDenseNDimArray(dx)
for i in dx:
if not i._diff_wrt:
raise ValueError("cannot derive by this array")
if isinstance(expr, array_types):
expr = ImmutableDenseNDimArray(expr)
if isinstance(dx, array_types):
new_array = [[y.diff(x) for y in expr] for x in dx]
return type(expr)(new_array, dx.shape + expr.shape)
else:
return expr.diff(dx)
else:
if isinstance(dx, array_types):
return ImmutableDenseNDimArray([expr.diff(i) for i in dx], dx.shape)
else:
return diff(expr, dx)
|
unknown
|
codeparrot/codeparrot-clean
| ||
from .fields import ObjectReferenceTypeField
class ObjectReference(object):
def compute_type(self, force=None):
if not self.type or force:
type_set = False
if self.to_workspace:
if type_set:
raise RuntimeError('Role can reference only one object')
self.type = ObjectReferenceTypeField.TYPE_WORKSPACE
type_set = True
if self.to_vault:
if type_set:
raise RuntimeError('Role can reference only one object')
self.type = ObjectReferenceTypeField.TYPE_VAULT
type_set = True
if self.to_card:
if type_set:
raise RuntimeError('Role can reference only one object')
self.type = ObjectReferenceTypeField.TYPE_CARD
type_set = True
if not self.type:
raise RuntimeError('Role has no associated object')
def get_object(self):
self.compute_type()
if self.type == ObjectReferenceTypeField.TYPE_WORKSPACE:
return self.to_workspace
if self.type == ObjectReferenceTypeField.TYPE_VAULT:
return self.to_vault
if self.type == ObjectReferenceTypeField.TYPE_CARD:
return self.to_card
raise RuntimeError('Role has no associated object')
def set_object(self, object):
self.to_workspace = None
self.to_vault = None
self.to_card = None
self.type = None
if not object:
return
if object.__class__.__name__ == 'Workspace':
self.to_workspace = object
if object.__class__.__name__ == 'Vault':
self.to_vault = object
if object.__class__.__name__ == 'Card':
self.to_card = object
self.compute_type()
return
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START encrypted-keyset-example]
"""A command-line utility for generating, encrypting and storing keysets."""
from __future__ import absolute_import
from __future__ import division
# Placeholder for import for type annotations
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import tink
from tink import aead
from tink.integration import gcpkms
FLAGS = flags.FLAGS
flags.DEFINE_enum('mode', None, ['generate', 'encrypt', 'decrypt'],
'The operation to perform.')
flags.DEFINE_string('keyset_path', None,
'Path to the keyset used for encryption.')
flags.DEFINE_string('kek_uri', None,
'The Cloud KMS URI of the key encryption key.')
flags.DEFINE_string('gcp_credential_path', None,
'Path to the GCP credentials JSON file.')
flags.DEFINE_string('input_path', None, 'Path to the input file.')
flags.DEFINE_string('output_path', None, 'Path to the output file.')
flags.DEFINE_string('associated_data', None,
'Optional associated data to use with the '
'encryption operation.')
def main(argv):
del argv # Unused.
associated_data = b'' if not FLAGS.associated_data else bytes(
FLAGS.associated_data, 'utf-8')
# Initialise Tink
try:
aead.register()
except tink.TinkError as e:
logging.error('Error initialising Tink: %s', e)
return 1
# Read the GCP credentials and set up a client
try:
gcpkms.GcpKmsClient.register_client(
FLAGS.kek_uri, FLAGS.gcp_credential_path)
except tink.TinkError as e:
logging.error('Error initializing GCP client: %s', e)
return 1
# Create an AEAD primitive from the key-encryption key (KEK) for encrypting
# Tink keysets
try:
handle = tink.KeysetHandle.generate_new(
aead.aead_key_templates.create_kms_aead_key_template(
key_uri=FLAGS.kek_uri))
gcp_aead = handle.primitive(aead.Aead)
except tink.TinkError as e:
logging.exception('Error creating KMS AEAD primitive: %s', e)
return 1
if FLAGS.mode == 'generate':
# [START generate-a-new-keyset]
# Generate a new keyset
try:
key_template = aead.aead_key_templates.AES128_GCM
keyset_handle = tink.KeysetHandle.generate_new(key_template)
except tink.TinkError as e:
logging.exception('Error creating primitive: %s', e)
return 1
# [END generate-a-new-keyset]
# [START encrypt-a-keyset]
# Encrypt the keyset_handle with the remote key-encryption key (KEK)
with open(FLAGS.keyset_path, 'wt') as keyset_file:
try:
keyset_handle.write(tink.JsonKeysetWriter(keyset_file), gcp_aead)
except tink.TinkError as e:
logging.exception('Error writing key: %s', e)
return 1
return 0
# [END encrypt-a-keyset]
# Use the keyset to encrypt/decrypt data
# Read the encrypted keyset into a keyset_handle
with open(FLAGS.keyset_path, 'rt') as keyset_file:
try:
text = keyset_file.read()
keyset_handle = tink.KeysetHandle.read(
tink.JsonKeysetReader(text), gcp_aead)
except tink.TinkError as e:
logging.exception('Error reading key: %s', e)
return 1
# Get the primitive
try:
cipher = keyset_handle.primitive(aead.Aead)
except tink.TinkError as e:
logging.error('Error creating primitive: %s', e)
return 1
with open(FLAGS.input_path, 'rb') as input_file:
input_data = input_file.read()
if FLAGS.mode == 'decrypt':
output_data = cipher.decrypt(input_data, associated_data)
elif FLAGS.mode == 'encrypt':
output_data = cipher.encrypt(input_data, associated_data)
else:
logging.error(
'Error mode not supported. Please choose "encrypt" or "decrypt".')
return 1
with open(FLAGS.output_path, 'wb') as output_file:
output_file.write(output_data)
if __name__ == '__main__':
flags.mark_flags_as_required([
'mode', 'keyset_path', 'kek_uri', 'gcp_credential_path'])
app.run(main)
# [END encrypted-keyset-example]
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Implementation of compression/decompression based on the popular
* gzip compressed file format.
*
* @see <a href="http://www.gzip.org/">gzip</a>
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
package org.apache.hadoop.io.compress.zlib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
|
java
|
github
|
https://github.com/apache/hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/package-info.java
|
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package terraform
import (
"strings"
"testing"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/configs/configschema"
"github.com/hashicorp/terraform/internal/providers"
"github.com/hashicorp/terraform/internal/states"
"github.com/zclconf/go-cty/cty"
)
func TestGraphNodeImportStateExecute(t *testing.T) {
state := states.NewState()
provider := testProvider("aws")
provider.ImportResourceStateResponse = &providers.ImportResourceStateResponse{
ImportedResources: []providers.ImportedResource{
{
TypeName: "aws_instance",
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("bar"),
}),
},
},
}
provider.ConfigureProvider(providers.ConfigureProviderRequest{})
ctx := &MockEvalContext{
Scope: evalContextModuleInstance{Addr: addrs.RootModuleInstance},
StateState: state.SyncWrapper(),
ProviderProvider: provider,
ProviderSchemaSchema: providers.GetProviderSchemaResponse{
ResourceTypes: map[string]providers.Schema{
"aws_instance": {
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Required: true,
},
},
},
},
},
},
}
// Import a new aws_instance.foo, this time with ID=bar. The original
// aws_instance.foo object should be removed from state and replaced with
// the new.
node := graphNodeImportState{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ID: "bar",
ResolvedProvider: addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("aws"),
Module: addrs.RootModule,
},
}
diags := node.Execute(ctx, walkImport)
if diags.HasErrors() {
t.Fatalf("Unexpected error: %s", diags.Err())
}
if len(node.states) != 1 {
t.Fatalf("Wrong result! Expected one imported resource, got %d", len(node.states))
}
// Verify the ID for good measure
id := node.states[0].State.GetAttr("id")
if !id.RawEquals(cty.StringVal("bar")) {
t.Fatalf("Wrong result! Expected id \"bar\", got %q", id.AsString())
}
}
func TestGraphNodeImportStateSubExecute(t *testing.T) {
state := states.NewState()
provider := testProvider("aws")
provider.ConfigureProvider(providers.ConfigureProviderRequest{})
ctx := &MockEvalContext{
StateState: state.SyncWrapper(),
ProviderProvider: provider,
ProviderSchemaSchema: providers.ProviderSchema{
ResourceTypes: map[string]providers.Schema{
"aws_instance": {
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Computed: true,
},
},
},
},
},
},
}
importedResource := providers.ImportedResource{
TypeName: "aws_instance",
State: cty.ObjectVal(map[string]cty.Value{"id": cty.StringVal("bar")}),
}
node := graphNodeImportStateSub{
TargetAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
State: importedResource,
ResolvedProvider: addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("aws"),
Module: addrs.RootModule,
},
}
diags := node.Execute(ctx, walkImport)
if diags.HasErrors() {
t.Fatalf("Unexpected error: %s", diags.Err())
}
// check for resource in state
actual := strings.TrimSpace(state.String())
expected := `aws_instance.foo:
ID = bar
provider = provider["registry.terraform.io/hashicorp/aws"]`
if actual != expected {
t.Fatalf("bad state after import: \n%s", actual)
}
}
func TestGraphNodeImportStateSubExecuteNull(t *testing.T) {
state := states.NewState()
provider := testProvider("aws")
provider.ReadResourceFn = func(req providers.ReadResourceRequest) (resp providers.ReadResourceResponse) {
// return null indicating that the requested resource does not exist
resp.NewState = cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
}))
return resp
}
ctx := &MockEvalContext{
StateState: state.SyncWrapper(),
ProviderProvider: provider,
ProviderSchemaSchema: providers.ProviderSchema{
ResourceTypes: map[string]providers.Schema{
"aws_instance": {
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Computed: true,
},
},
},
},
},
},
}
importedResource := providers.ImportedResource{
TypeName: "aws_instance",
State: cty.ObjectVal(map[string]cty.Value{"id": cty.StringVal("bar")}),
}
node := graphNodeImportStateSub{
TargetAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
State: importedResource,
ResolvedProvider: addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("aws"),
Module: addrs.RootModule,
},
}
diags := node.Execute(ctx, walkImport)
if !diags.HasErrors() {
t.Fatal("expected error for non-existent resource")
}
}
|
go
|
github
|
https://github.com/hashicorp/terraform
|
internal/terraform/transform_import_state_test.go
|
#!/usr/bin/env python
import sys, os
from distutils.core import setup
from distutils.command.install_scripts import install_scripts
version = '2.0.3'
class md_install_scripts(install_scripts):
""" Customized install_scripts. Create markdown.bat for win32. """
def run(self):
install_scripts.run(self)
if sys.platform == 'win32':
try:
script_dir = os.path.join(sys.prefix, 'Scripts')
script_path = os.path.join(script_dir, 'markdown')
bat_str = '@"%s" "%s" %%*' % (sys.executable, script_path)
bat_path = os.path.join(self.install_dir, 'markdown.bat')
f = file(bat_path, 'w')
f.write(bat_str)
f.close()
print 'Created:', bat_path
except Exception, e:
print 'ERROR: Unable to create %s: %s' % (bat_path, e)
data = dict(
name = 'Markdown',
version = version,
url = 'http://www.freewisdom.org/projects/python-markdown',
download_url = 'http://pypi.python.org/packages/source/M/Markdown/Markdown-%s.tar.gz' % version,
description = 'Python implementation of Markdown.',
author = 'Manfred Stienstra and Yuri takhteyev',
author_email = 'yuri [at] freewisdom.org',
maintainer = 'Waylan Limberg',
maintainer_email = 'waylan [at] gmail.com',
license = 'BSD License',
packages = ['markdown', 'markdown.extensions'],
scripts = ['bin/markdown'],
cmdclass = {'install_scripts': md_install_scripts},
classifiers = ['Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.3',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Topic :: Communications :: Email :: Filters',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Software Development :: Documentation',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Filters',
'Topic :: Text Processing :: Markup :: HTML',
],
zip_safe = False
)
if sys.version[:3] < '2.5':
data['install_requires'] = ['elementtree']
setup(**data)
|
unknown
|
codeparrot/codeparrot-clean
| ||
import sys
sys.path += ["../"]
from mingus.containers.NoteContainer import NoteContainer
from mingus.containers.Note import Note
import unittest
class test_NoteContainers(unittest.TestCase):
def setUp(self):
self.n1 = NoteContainer()
self.n2 = NoteContainer("A")
self.n3 = NoteContainer(["A", "C", "E"])
self.n4 = NoteContainer(["A", "C", "E", "F", "G"])
self.n5 = NoteContainer(["A", "C", "E", "F", "G", "A"])
def test_add_note(self):
self.assertEqual(self.n2 , self.n2.add_note("A"))
self.assertEqual(NoteContainer("A"), self.n1.add_note("A"))
self.n1 - "A"
self.assertEqual(self.n3 + ["F", "G"], self.n4)
self.assertEqual(self.n2 + ["C", "E"], self.n3 - ["F", "G"])
self.n2 - ["C", "E"]
def test_add_notes(self):
self.assertEqual(self.n3, self.n1.add_notes(["A", "C", "E"]))
self.n1.empty()
self.assertEqual(self.n3, self.n1.add_notes([["A", 4], ["C", 5], ["E", 5]]))
self.n1.empty()
self.assertEqual(self.n2, self.n1.add_notes(Note("A")))
self.n1.empty()
self.assertEqual(self.n2, self.n1.add_notes([Note("A")]))
self.n1.empty()
self.assertEqual(self.n2, self.n1.add_notes("A"))
self.n1.empty()
self.assertEqual(self.n3, self.n2 + NoteContainer([["C", 5], ["E", 5]]))
self.n2 = NoteContainer("A")
def test_remove_note(self):
n = NoteContainer(["C", "E", "G"])
n.remove_note("C")
self.assertEqual(NoteContainer(["E", "G"]), n)
n.remove_note("E")
self.assertEqual(NoteContainer(["G"]), n)
n.remove_note("G")
self.assertEqual(NoteContainer([]), n)
def test_determine(self):
n = NoteContainer(["C", "E", "G"])
self.assertEqual(["C major triad"], n.determine())
n.transpose("3")
self.assertEqual(["E major triad"], n.determine())
def test_remove_notes(self):
pass
def test_sort(self):
n1 = NoteContainer(["Eb", "Gb", "C"])
n2 = NoteContainer(["Eb", "Gb", "Cb"])
n1.sort()
n2.sort()
self.assertEqual(Note("Eb"), n1[0])
self.assertEqual(Note("Gb"), n2[1])
def test_getitem(self):
self.assertEqual(self.n2[0], Note("A"))
self.assertEqual(self.n3[0], Note("A"))
self.assertEqual(self.n4[0], Note("A"))
self.assertEqual(self.n4[1], Note("C", 5))
self.assertEqual(self.n4[2], Note("E", 5))
def test_transpose(self):
n = NoteContainer(["C", "E", "G"])
self.assertEqual(NoteContainer(["E", "G#", "B"]), n.transpose("3"))
n = NoteContainer(["C-6", "E-4", "G-2"])
self.assertEqual(NoteContainer(["E-6", "G#-4", "B-2"]), n.transpose("3"))
def test_get_note_names(self):
self.assertEqual(['A', 'C', 'E'], self.n3.get_note_names())
self.assertEqual(['A', 'C', 'E', 'F', 'G'], self.n4.get_note_names())
self.assertEqual(['A', 'C', 'E', 'F', 'G'], self.n5.get_note_names())
def test_from_chord_shorthand(self):
self.assertEqual(self.n3, NoteContainer().from_chord_shorthand("Am"))
def test_from_progression_shorthand(self):
self.assertEqual(self.n3, NoteContainer().from_progression_shorthand("VI"))
def test_from_interval_shorthand(self):
self.assertEqual(NoteContainer(['C-4', 'G-4']), NoteContainer().from_interval_shorthand("C", "5"))
self.assertEqual(NoteContainer(['F-3', 'C-4']), NoteContainer().from_interval_shorthand("C", "5", False))
def test_is_consonant(self):
self.assert_(NoteContainer().from_chord("Am").is_consonant())
self.assert_(NoteContainer().from_chord("C").is_consonant())
self.assert_(NoteContainer().from_chord("G").is_consonant())
self.assert_(NoteContainer().from_chord("Dm").is_consonant())
self.assert_(NoteContainer().from_chord("E").is_consonant())
self.assert_(not NoteContainer().from_chord("E7").is_consonant())
self.assert_(not NoteContainer().from_chord("Am7").is_consonant())
self.assert_(not NoteContainer().from_chord("Gdim").is_consonant())
def test_is_perfect_consonant(self):
self.assert_(NoteContainer(['A', 'E']).is_perfect_consonant())
self.assert_(NoteContainer(['A-4', 'A-6']).is_perfect_consonant())
self.assert_(NoteContainer(['A', 'D']).is_perfect_consonant())
self.assert_(not NoteContainer(['A', 'D']).is_perfect_consonant(False))
self.assert_(not NoteContainer().from_chord("Am").is_perfect_consonant())
self.assert_(not NoteContainer().from_chord("C").is_perfect_consonant())
self.assert_(not NoteContainer().from_chord("G").is_perfect_consonant())
self.assert_(not NoteContainer().from_chord("Dm").is_perfect_consonant())
self.assert_(not NoteContainer().from_chord("E").is_perfect_consonant())
def test_is_imperfect_consonant(self):
self.assert_(NoteContainer(['A', 'C']).is_imperfect_consonant())
self.assert_(NoteContainer(['A', 'C#']).is_imperfect_consonant())
self.assert_(NoteContainer(['A', 'F']).is_imperfect_consonant())
self.assert_(NoteContainer(['A', 'F#']).is_imperfect_consonant())
self.assert_(not NoteContainer(['A', 'B']).is_imperfect_consonant())
self.assert_(not NoteContainer(['A', 'E']).is_imperfect_consonant())
self.assert_(not NoteContainer(['A-4', 'A-5']).is_imperfect_consonant())
def test_is_dissonant(self):
self.assert_(NoteContainer().from_chord("E7").is_dissonant())
self.assert_(NoteContainer().from_chord("Am7").is_dissonant())
self.assert_(NoteContainer().from_chord("Gdim").is_dissonant())
self.assert_(not NoteContainer().from_chord("Am").is_dissonant())
self.assert_(not NoteContainer().from_chord("C").is_dissonant())
self.assert_(not NoteContainer().from_chord("G").is_dissonant())
self.assert_(not NoteContainer().from_chord("Dm").is_dissonant())
def suite():
return unittest.TestLoader().loadTestsFromTestCase(test_NoteContainers)
|
unknown
|
codeparrot/codeparrot-clean
| ||
import cgi
import urllib, urllib2, Cookie
import cookielib
import re
import logging
from urlparse import urlparse
from django.utils import simplejson
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import urlfetch
################################################################################
class URLOpener:
def __init__(self):
self.cookie = Cookie.SimpleCookie()
self.jSessionId = ""
def open(self, url, data = None):
if data is None:
method = urlfetch.GET
else:
method = urlfetch.POST
while url is not None:
try:
o = urlparse(url)
path = o.path
str = "Getting url ["+url+"] cookie ["+self._makeCookieHeader(path)+"]"
if data != None:
str += " data ["+data+"]"
logging.debug(str)
response = urlfetch.fetch(url=url,
payload=data,
method=method,
headers=self._getHeaders(path),
allow_truncated=False,
follow_redirects=False,
deadline=10
)
data = None # Next request will be a get, so no need to send the data again.
method = urlfetch.GET
cookieStr = response.headers.get('Set-cookie', '')
if self.jSessionId == "":
if cookieStr.find("JSESSIONID") != -1:
pattern = re.compile('JSESSIONID=(.*?);')
match = pattern.search(cookieStr)
if match != None:
self.jSessionId = match.group(1)
logging.debug("Received cookies: ["+cookieStr + "]\n")
self.cookie.load(response.headers.get('Set-cookie', '')) # Load the cookies from the response
# Change cookie to the gathered JSESSIONID
url = response.headers.get('location')
except urllib2.URLError, e:
logging.error("Generic error")
self.response.out.write("Error")
handleError(e)
#except DownloadError:
# logging.error("Download error")
#except:
# logging.error("Other error")
return response
def _getHeaders(self, path):
headers = {
# 'Content-Type': 'text/html',
'User-agent': "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12",
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Keep-Alive': '300',
'Connection': 'keep-alive',
'Accept-Language': 'en-us,en;q=0.5',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Cookie' : self._makeCookieHeader(path)
}
return headers
def _makeCookieHeader(self, path):
cookieHeader = ""
logStr = ""
for k,v in self.cookie.items():
# if v.key == "JSESSIONID":
# if self.jSessionId != "":
# logging.debug("\n==== Replaced jsession ====\n")
# v.value = self.jSessionId
if 'path' in v:
if path.find(v['path'], 0, len(v['path'])) != -1:
logging.debug("\n==== "+v['path']+" ====\n")
cookieHeader += "%s=%s; " % (v.key, v.value)
elif v["path"] == "/,":
logging.debug("\n==== "+v['path']+" ====\n")
cookieHeader += "%s=%s; " % (v.key, v.value)
else:
logging.debug("\n==== Not Including "+v['path']+" ====\n")
else:
cookieHeader += "%s=%s; " % (v.key, v.value)
# return self.cookie.output("")
return cookieHeader
################################################################################
class BooksParser:
result = ""
results = ""
posList = ""
posEndList = ""
data = ""
count = 0
regex = {
'requirement': "<tr class=\"book course-(?P<requirement>[\w ]+)\">",
'image': "<td class=\"book-cover\"><a href=\"(?P<image>.*?)\"",
'title': '<span class=\"book-title\">(?P<title>.*?)</span>',
'author': '<span class=\"book-meta book-author\">(?P<author>.*?)</span>',
'isbn': '<span class=\"isbn\">(?P<isbn>\d+)</span>',
'copyright': '<span class=\"book-meta book-copyright\">(?P<copyright>.*?)</span>',
'publisher': '<span class=\"book-meta book-publisher\">(?P<publisher>.*?)</span>',
'edition': '<span class=\"book-meta book-edition\">(?P<edition>.*?)</span>',
'binding': '<span class=\"book-meta book-binding\">(?P<binding>.*?)</span>',
'priceNew': "<input type=\"hidden\" name=\"product-new-price-\d+\" id=\"product-new-price-\d+\" value=\"(?P<priceNew>\d+)\" />",
'priceUsed': "<input type=\"hidden\" name=\"product-used-price-\d+\" id=\"product-used-price-\d+\" value=\"(?P<priceUsed>\d+)\" />",
'priceNewRent': "<input type=\"hidden\" name=\"product-new-rental-price-\d+\" id=\"product-new-rental-price-\d+\" value=\"(?P<priceNewRent>\d+)\" />",
'priceUsedRent': "<input type=\"hidden\" name=\"product-used-rental-price-\d+\" id=\"product-used-rental-price-\d+\" value=\"(?P<priceUsedRent>\d+)\" />",
'availNew': "<td class=\"price\"><label for=\"radio-sku-new_\d+\">(?P<availNew>.*?)</label>",
'availUsed': "<td class=\"price\"><label for=\"radio-sku-used_\d+\">(?P<availUsed>.*?)</label>",
'availNewRent': "<td class=\"price\"><label for=\"radio-radio-sku-new-rental_\d+\">(?P<availNewRent>.*?)</label>",
'availUsedRent': "<td class=\"price\"><label for=\"radio-radio-sku-used-rental_\d+\">(?P<availUsedRent>.*?)</label>"
}
regexKeys = [ 'requirement', 'image', 'title', 'author', 'isbn', 'copyright', 'publisher', 'edition', 'binding', 'priceNew', 'priceUsed', 'priceNewRent', 'priceUsedRent', 'priceUsed', 'availNew', 'availUsed', 'availNewRent', 'availUsedRent'];
def __init__(self):
self.results = list()
self.posList = list()
self.posEndList = list()
self.data = ""
self.count = 0
self.result = {
'requirement': "",
'image': "",
'priceNew': "",
'priceUsed': "",
'priceNewRent': "",
'priceUsedRent': "",
'availNew': "",
'availUsed': "",
'availNewRent': "",
'availUsedRent': "",
'isbn': ""
}
def setData(self, data):
self.data = data;
def setup(self):
# Cut down the size of data
# Remove the recommended products
# TO-DO: support Recommended Products
endOffset = self.data.find("Recommended Products")
if endOffset != -1:
self.data = self.data[0:endOffset]
count = 0
# For each item (book) all regex strings may not be found
# We take care to associate meta-data with the correct book
# Assume that the first regex is always found, search the whole
# data books for all occurences of the first regex and note the offsets
# Then, only search between these books
k = self.regexKeys[0]
matchIter = re.finditer(self.regex[k], self.data)
startPrev = 0
endPrev = 0
flagFirst = True
for match in matchIter:
start = match.start(0)
end = match.end(0)
if flagFirst == True:
flagFirst = False
else:
self.posList.append(startPrev)
self.posEndList.append(start)
startPrev = start
endPrev = end
count += 1
# Add the final entry
self.posList.append(start)
self.posEndList.append(len(self.data))
self.posList.reverse()
self.posEndList.reverse()
def next(self):
if len(self.posList) == 0:
return False
pos = self.posList.pop()
posEnd = self.posEndList.pop()
for k in self.regexKeys:
pattern = re.compile(self.regex[k])
match = pattern.search(self.data, pos, posEnd)
if match == None:
self.result[k] = "NOT_FOUND" # fill result with nothing
else:
self.result[k] = match.group(1)
# self.results.append(self.result)
self.results.append(dict(self.result))
# self.results.append("%d %s" % (len(self.posList), "World"))
return True
class CampusTerm(webapp.RequestHandler):
url = "http://www.billsbookstore.com/"
regexSelect = '<select name=\"selTerm\" id=\"fTerm\" class=\"box\" title=\"Select a campus term\">(?P<select>.*?)</select>'
regexOption = '<option value=\"(\d+)\|(\d+)\">(.*?)</option>'
def get(self):
urlFull = self.url
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexSelect)
match = pattern.search(data)
datatmp = match.group(1)
pattern = re.compile(self.regexOption)
pos = 0;
results = list()
while True:
match = pattern.search(datatmp, pos)
if match == None:
break;
results.append({ 'campustermId' : match.group(1)+'|'+match.group(2), 'campusId' : match.group(1), 'termId' : match.group(2), 'campusName' : match.group(3) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Department(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=campus"
regexDept = '<department id=\"(\d+)\" abrev=\"(.*?)\" name=\"(.*?)\" />'
def get(self):
dept = self.request.get('campus')
term = self.request.get('term')
urlFull = self.url+'&campus='+dept+'&term='+term
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexDept)
pos = 0;
results = list()
while True:
match = pattern.search(data, pos)
if match == None:
break;
results.append({ 'deptId' : match.group(1), 'deptAbrev': match.group(2), 'deptName' : match.group(3) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Course(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=department"
regexCourse = '<course id=\"(\d+)\" name=\"(\d+)\s+\" />'
def get(self):
dept = self.request.get('dept')
term = self.request.get('term')
urlFull = self.url+'&dept='+dept+'&term='+term
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexCourse)
pos = 0;
results = list()
while True:
match = pattern.search(data, pos)
if match == None:
break;
results.append({ 'courseId' : match.group(1), 'courseNumber' : match.group(2) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Section(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=course"
regexSection = '<section id=\"(\d+)\" name=\"(.*?)\" instructor=\"(.*?)\" />'
def get(self):
dept = self.request.get('course')
term = self.request.get('term')
urlFull = self.url+'&course='+dept+'&term='+term
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexSection)
pos = 0;
results = list()
while True:
match = pattern.search(data, pos)
if match == None:
break;
results.append({ 'sectionId' : match.group(1), 'sectionName' : match.group(2), 'instructor' : match.group(3) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Books(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=section§ion="
def get(self):
section = self.request.get('id');
# section = "53867" # Single book
# section = "53857" # Many books, and a PRS clicker
# section = "55512" # Multiple books, single section
urlFull = self.url+section
try:
sp = BooksParser()
result = urllib2.urlopen(urlFull)
data = result.read()
sp.setData(data)
sp.setup()
while sp.next():
True
# for k in sp.regexKeys:
# self.response.out.write(k + "=" + sp.result[k] + "<br>\n")
self.response.out.write(simplejson.dumps(sp.results))
except urllib2.URLError, e:
handleError(e)
class BlackBoard(webapp.RequestHandler):
urlLogin = 'https://bb5.fsu.edu/cas/'
urlSession = 'https://bb5.fsu.edu/cas/login?loginurl=https%3A%2F%2Fapps.oti.fsu.edu%2FSecureLogin%2FLogin&service=https%3A%2F%2Fapps.oti.fsu.edu%2FSecureLogin%2FAuthenticator%3Fnoheader%3Dtrue%26nofooter%3Dtrue'
urlSecureApps = 'https://apps.oti.fsu.edu/SecureLogin/servlet/PortalHandler'
urlSchedule = 'https://apps.oti.fsu.edu/StudentClassSchedule/Schedule'
def get(self):
username = self.request.get('username');
password = self.request.get('password');
urlhandler = URLOpener()
urlhandler.cookie.clear()
# Login
try:
post_data = urllib.urlencode({
'username':username,
'password': password,
'service': 'https://campus.fsu.edu/webapps/login/',
'loginurl': 'https://campus.fsu.edu/webapps/login/bb_bb60/logincas.jsp',
'x': 0,
'y': 0
})
result = urlhandler.open(self.urlSession, post_data)
data = result.content
logging.debug(data)
pattern = re.compile('window.location.href="(.*?)"')
match = pattern.search(data)
if match != None:
urlRedirect = match.group(1)
# Complete login process
# by following window.location.href
result = urlhandler.open(urlRedirect)
data = result.content
# logging.debug(data)
except urllib2.URLError, e:
self.response.out.write("Error")
handleError(e)
# Session
# try:
# result = urlhandler.open(self.urlSession)
# data = result.content
# self.response.out.write(data)
# except urllib2.URLError, e:
# self.response.out.write("Error")
# handleError(e)
# Secure apps
try:
# Setup the session
result = urlhandler.open(self.urlSecureApps)
data = result.content
# logging.debug(data)
# Submit accept
post_data = urllib.urlencode({'submit' : 'Accept'})
result = urlhandler.open(self.urlSecureApps, post_data)
data = result.content
# logging.debug(data)
except urllib2.URLError, e:
self.response.out.write("Error")
handleError(e)
# Get schedule
try:
# Grab the list of "secure apps" links
result = urlhandler.open(self.urlSchedule)
data = result.content
# logging.debug(data)
# pattern = re.compile('<a target="" href=".*?SESSIONID=(.*?)\&.*?">My Class Schedule</a>')
# match = pattern.search(data)
# sessionId = ""
# if match != None:
# sessionId = match.group(1)
# urlhandler.cookie.load("JSESSIONID="+sessionId)
# logging.debug("\n\n Using session ["+sessionId+"]\n\n")
# else:
# logging.debug("\n\n\n\n=============================Session not found\n\n\n\n")
# pattern = re.compile('<a target="" href="(.*?)">My Class Schedule</a>')
# match = pattern.search(data)
# if match != None:
# urlRedirect = match.group(1)
# result = urlhandler.open('https://apps.oti.fsu.edu/'+urlRedirect)
# data = result.content
# logging.debug(data)
post_data = urllib.urlencode({
'CALLINGURI' : '/jsp/schedule_index.jsp',
'refer' : 'null',
'YEAR' : 2011,
'TERM' : 9,
'genSched' : 'Generate A Schedule'
})
result = urlhandler.open(self.urlSchedule, post_data)
data = result.content
logging.debug(data)
# self.response.out.write(urlhandler._makeCookieHeader())
# self.response.out.write(data)
# result = urlhandler.open('https://campus.fsu.edu/webapps/login?action=logout')
matchIter = re.finditer('<td class="only" align=".*?">(.*?)</td>', data)
count=0
matchKeys = [ 'alert', 'num', 'course', 'section', 'session', 'sectionId', 'title', 'hours', 'building', 'room', 'days', 'begin', 'end' ]
countMax = len(matchKeys)
schedule = []
klass = {}
# TODO: alert element will have some extra html in it (<span></span>)
for match in matchIter:
klass[matchKeys[count]] = match.group(1)
count += 1
if count == countMax:
schedule.append(klass)
count = 0
klass = {}
self.response.out.write(simplejson.dumps(schedule))
except urllib2.URLError, e:
self.response.out.write("Error")
handleError(e)
application = webapp.WSGIApplication([('/campusterm', CampusTerm),
('/dept', Department),
('/course', Course),
('/section', Section),
('/books', Books),
('/bb', BlackBoard)],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package discovery
import (
"context"
"log/slog"
"reflect"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/config"
"github.com/prometheus/prometheus/discovery/targetgroup"
)
// Discoverer provides information about target groups. It maintains a set
// of sources from which TargetGroups can originate. Whenever a discovery provider
// detects a potential change, it sends the TargetGroup through its channel.
//
// Discoverer does not know if an actual change happened.
// It does guarantee that it sends the new TargetGroup whenever a change happens.
//
// Discoverers should initially send a full set of all discoverable TargetGroups.
type Discoverer interface {
// Run hands a channel to the discovery provider (Consul, DNS, etc.) through which
// it can send updated target groups. It must return when the context is canceled.
// It should not close the update channel on returning.
Run(ctx context.Context, up chan<- []*targetgroup.Group)
}
// DiscovererMetrics are internal metrics of service discovery mechanisms.
type DiscovererMetrics interface {
Register() error
Unregister()
}
// DiscovererOptions provides options for a Discoverer.
type DiscovererOptions struct {
Logger *slog.Logger
Metrics DiscovererMetrics
// Extra HTTP client options to expose to Discoverers. This field may be
// ignored; Discoverer implementations must opt-in to reading it.
HTTPClientOptions []config.HTTPClientOption
// SetName identifies this discoverer set.
SetName string
}
// RefreshMetrics are used by the "refresh" package.
// We define them here in the "discovery" package in order to avoid a cyclic dependency between
// "discovery" and "refresh".
type RefreshMetrics struct {
Failures prometheus.Counter
Duration prometheus.Observer
DurationHistogram prometheus.Observer
}
// RefreshMetricsInstantiator instantiates the metrics used by the "refresh" package.
type RefreshMetricsInstantiator interface {
Instantiate(mech, setName string) *RefreshMetrics
}
// RefreshMetricsManager is an interface for registering, unregistering, and
// instantiating metrics for the "refresh" package. Refresh metrics are
// registered and unregistered outside of the service discovery mechanism. This
// is so that the same metrics can be reused across different service discovery
// mechanisms. To manage refresh metrics inside the SD mechanism, we'd need to
// use const labels which are specific to that SD. However, doing so would also
// expose too many unused metrics on the Prometheus /metrics endpoint.
type RefreshMetricsManager interface {
DiscovererMetrics
RefreshMetricsInstantiator
}
// A Config provides the configuration and constructor for a Discoverer.
type Config interface {
// Name returns the name of the discovery mechanism.
Name() string
// NewDiscoverer returns a Discoverer for the Config
// with the given DiscovererOptions.
NewDiscoverer(DiscovererOptions) (Discoverer, error)
// NewDiscovererMetrics returns the metrics used by the service discovery.
NewDiscovererMetrics(prometheus.Registerer, RefreshMetricsInstantiator) DiscovererMetrics
}
// Configs is a slice of Config values that uses custom YAML marshaling and unmarshaling
// to represent itself as a mapping of the Config values grouped by their types.
type Configs []Config
// SetDirectory joins any relative file paths with dir.
func (c *Configs) SetDirectory(dir string) {
for _, c := range *c {
if v, ok := c.(config.DirectorySetter); ok {
v.SetDirectory(dir)
}
}
}
// UnmarshalYAML implements yaml.Unmarshaler.
func (c *Configs) UnmarshalYAML(unmarshal func(any) error) error {
cfgTyp := reflect.StructOf(configFields)
cfgPtr := reflect.New(cfgTyp)
cfgVal := cfgPtr.Elem()
if err := unmarshal(cfgPtr.Interface()); err != nil {
return replaceYAMLTypeError(err, cfgTyp, configsType)
}
var err error
*c, err = readConfigs(cfgVal, 0)
return err
}
// MarshalYAML implements yaml.Marshaler.
func (c Configs) MarshalYAML() (any, error) {
cfgTyp := reflect.StructOf(configFields)
cfgPtr := reflect.New(cfgTyp)
cfgVal := cfgPtr.Elem()
if err := writeConfigs(cfgVal, c); err != nil {
return nil, err
}
return cfgPtr.Interface(), nil
}
// A StaticConfig is a Config that provides a static list of targets.
type StaticConfig []*targetgroup.Group
// Name returns the name of the service discovery mechanism.
func (StaticConfig) Name() string { return "static" }
// NewDiscoverer returns a Discoverer for the Config.
func (c StaticConfig) NewDiscoverer(DiscovererOptions) (Discoverer, error) {
return staticDiscoverer(c), nil
}
// NewDiscovererMetrics returns NoopDiscovererMetrics because no metrics are
// needed for this service discovery mechanism.
func (StaticConfig) NewDiscovererMetrics(prometheus.Registerer, RefreshMetricsInstantiator) DiscovererMetrics {
return &NoopDiscovererMetrics{}
}
type staticDiscoverer []*targetgroup.Group
func (c staticDiscoverer) Run(ctx context.Context, up chan<- []*targetgroup.Group) {
// TODO: existing implementation closes up chan, but documentation explicitly forbids it...?
defer close(up)
select {
case <-ctx.Done():
case up <- c:
}
}
|
go
|
github
|
https://github.com/prometheus/prometheus
|
discovery/discovery.go
|
from django.views.decorators.cache import cache_page
from angkot.common.decorators import wapi
from angkot.geo.models import Province, City
def _province_to_dict(province):
return dict(pid=province.id,
name=province.name,
code=province.code)
def _city_to_dict(city):
data = dict(cid=city.id,
name=city.name,
pid=city.province.id)
return (city.id, data)
@cache_page(60 * 60 * 24)
@wapi.endpoint
def province_list(req):
provinces = Province.objects.filter(enabled=True) \
.order_by('order')
provinces = list(map(_province_to_dict, provinces))
return dict(provinces=provinces)
@wapi.endpoint
def city_list(req):
limit = 500
try:
page = int(req.GET.get('page', 0))
except ValueError:
page = 0
start = page * limit
end = start + limit
query = City.objects.filter(enabled=True) \
.order_by('pk')
cities = query[start:end]
cities = dict(map(_city_to_dict, cities))
total = len(query)
return dict(cities=cities,
page=page,
count=len(cities),
total=total)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# frozen_string_literal: true
module ActiveJob
module Serializers
class DateTimeSerializer < TimeObjectSerializer # :nodoc:
def deserialize(hash)
DateTime.iso8601(hash["value"])
end
def klass
DateTime
end
end
end
end
|
ruby
|
github
|
https://github.com/rails/rails
|
activejob/lib/active_job/serializers/date_time_serializer.rb
|
# (c) 2018 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch, MagicMock
from units.modules.utils import set_module_args
from .junos_module import TestJunosModule
jnpr_mock = MagicMock()
scp_mock = MagicMock()
modules = {
'jnpr': jnpr_mock,
'jnpr.junos': jnpr_mock.junos,
'jnpr.junos.utils': jnpr_mock.junos.utils,
'jnpr.junos.utils.scp': jnpr_mock.junos.utils.scp,
'jnpr.junos.exception': jnpr_mock.junos.execption
}
module_patcher = patch.dict('sys.modules', modules)
module_patcher.start()
jnpr_mock.junos.utils.scp.SCP().__enter__.return_value = scp_mock
from ansible.modules.network.junos import junos_scp
class TestJunosCommandModule(TestJunosModule):
module = junos_scp
def setUp(self):
super(TestJunosCommandModule, self).setUp()
def tearDown(self):
super(TestJunosCommandModule, self).tearDown()
def test_junos_scp_src(self):
set_module_args(dict(src='test.txt'))
result = self.execute_module(changed=True)
args, kwargs = scp_mock.put.call_args
self.assertEqual(args[0], 'test.txt')
self.assertEqual(result['changed'], True)
def test_junos_scp_src_fail(self):
scp_mock.put.side_effect = OSError("[Errno 2] No such file or directory: 'text.txt'")
set_module_args(dict(src='test.txt'))
result = self.execute_module(changed=True, failed=True)
self.assertEqual(result['msg'], "[Errno 2] No such file or directory: 'text.txt'")
def test_junos_scp_remote_src(self):
set_module_args(dict(src='test.txt', remote_src=True))
result = self.execute_module(changed=True)
args, kwargs = scp_mock.get.call_args
self.assertEqual(args[0], 'test.txt')
self.assertEqual(result['changed'], True)
def test_junos_scp_all(self):
set_module_args(dict(src='test', remote_src=True, dest="tmp", recursive=True))
result = self.execute_module(changed=True)
args, kwargs = scp_mock.get.call_args
self.assertEqual(args[0], 'test')
self.assertEqual(kwargs['local_path'], 'tmp')
self.assertEqual(kwargs['recursive'], True)
self.assertEqual(result['changed'], True)
def test_junos_scp_device_param(self):
set_module_args(dict(src='test.txt',
provider={'username': 'unit', 'host': 'test', 'ssh_keyfile': 'path',
'password': 'test', 'port': 234}))
self.execute_module(changed=True)
args, kwargs = jnpr_mock.junos.Device.call_args
self.assertEqual(args[0], 'test')
self.assertEqual(kwargs['passwd'], 'test')
self.assertEqual(kwargs['ssh_private_key_file'], 'path')
self.assertEqual(kwargs['port'], 234)
self.assertEqual(kwargs['user'], 'unit')
|
unknown
|
codeparrot/codeparrot-clean
| ||
import numpy as np
from lxmls.deep_learning.utils import (
Model,
glorot_weight_init,
index2onehot,
logsumexp
)
class NumpyLogLinear(Model):
def __init__(self, **config):
# Initialize parameters
weight_shape = (config['input_size'], config['num_classes'])
# after Xavier Glorot et al
self.weight = glorot_weight_init(weight_shape, 'softmax')
self.bias = np.zeros((1, config['num_classes']))
self.learning_rate = config['learning_rate']
def log_forward(self, input=None):
"""Forward pass of the computation graph"""
# Linear transformation
z = np.dot(input, self.weight.T) + self.bias
# Softmax implemented in log domain
log_tilde_z = z - logsumexp(z, axis=1, keepdims=True)
return log_tilde_z
def predict(self, input=None):
"""Most probable class index"""
return np.argmax(np.exp(self.log_forward(input)), axis=1)
def update(self, input=None, output=None):
"""Stochastic Gradient Descent update"""
# Probabilities of each class
class_probabilities = np.exp(self.log_forward(input))
batch_size, num_classes = class_probabilities.shape
# Error derivative at softmax layer
I = index2onehot(output, num_classes)
error = (class_probabilities - I) / batch_size
# Weight gradient
gradient_weight = np.zeros(self.weight.shape)
for l in np.arange(batch_size):
gradient_weight += np.outer(error[l, :], input[l, :])
# Bias gradient
gradient_bias = np.sum(error, axis=0, keepdims=True)
# SGD update
self.weight = self.weight - self.learning_rate * gradient_weight
self.bias = self.bias - self.learning_rate * gradient_bias
|
unknown
|
codeparrot/codeparrot-clean
| ||
import codecs, os
MALE = 0
FEMALE = 1
ANDROGYNOUS = 2
class Detector:
def __init__(self, fname=None):
fname = fname or os.path.join(os.path.dirname(__file__), 'data', "nam_dict.txt")
self.parse(fname)
def parse(self, fname):
self.names = {}
f = codecs.open(fname, encoding='iso8859-1')
line = f.readline()
while line:
self.eatNameLine(line)
line = f.readline()
f.close()
def eatNameLine(self, line):
if line.startswith("#") or line.startswith("="):
return
parts = filter(lambda p: p.strip() != "", line.split(" "))
if "F" in parts[0]:
self.set(parts[1], FEMALE)
elif "M" in parts[0]:
self.set(parts[1], MALE)
else:
self.set(parts[1], ANDROGYNOUS)
def set(self, name, gender):
# go w/ first option, don't reset
if self.names.has_key(name):
return
if "+" in name:
for replacement in [ '', '-', ' ' ]:
self.set(name.replace('+', replacement), gender)
else:
self.names[name] = gender
def getGender(self, name):
return self.names.get(name, ANDROGYNOUS)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""Miscellaneous functions to mask Python version differences."""
import sys
import os
import math
import binascii
if sys.version_info >= (3,0):
def compat26Str(x): return x
# Python 3 requires bytes instead of bytearrays for HMAC
# So, python 2.6 requires strings, python 3 requires 'bytes',
# and python 2.7 can handle bytearrays...
def compatHMAC(x): return bytes(x)
def raw_input(s):
return input(s)
# So, the python3 binascii module deals with bytearrays, and python2
# deals with strings... I would rather deal with the "a" part as
# strings, and the "b" part as bytearrays, regardless of python version,
# so...
def a2b_hex(s):
try:
b = bytearray(binascii.a2b_hex(bytearray(s, "ascii")))
except Exception as e:
raise SyntaxError("base16 error: %s" % e)
return b
def a2b_base64(s):
try:
b = bytearray(binascii.a2b_base64(bytearray(s, "ascii")))
except Exception as e:
raise SyntaxError("base64 error: %s" % e)
return b
def b2a_hex(b):
return binascii.b2a_hex(b).decode("ascii")
def b2a_base64(b):
return binascii.b2a_base64(b).decode("ascii")
def readStdinBinary():
return sys.stdin.buffer.read()
else:
# Python 2.6 requires strings instead of bytearrays in a couple places,
# so we define this function so it does the conversion if needed.
if sys.version_info < (2,7):
def compat26Str(x): return str(x)
else:
def compat26Str(x): return x
# So, python 2.6 requires strings, python 3 requires 'bytes',
# and python 2.7 can handle bytearrays...
def compatHMAC(x): return compat26Str(x)
def a2b_hex(s):
try:
b = bytearray(binascii.a2b_hex(s))
except Exception as e:
raise SyntaxError("base16 error: %s" % e)
return b
def a2b_base64(s):
try:
b = bytearray(binascii.a2b_base64(s))
except Exception as e:
raise SyntaxError("base64 error: %s" % e)
return b
def b2a_hex(b):
return binascii.b2a_hex(compat26Str(b))
def b2a_base64(b):
return binascii.b2a_base64(compat26Str(b))
import traceback
def formatExceptionTrace(e):
newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
return newStr
|
unknown
|
codeparrot/codeparrot-clean
| ||
#-*- coding: UTF-8 -*-
'''
Created on 2015年6月19日
@author: joel
'''
PATH_SEPARATOR = "/"
PATH_SEPARATOR_R = "`"
def vanishSlash(jstr):
return jstr.replace(PATH_SEPARATOR,PATH_SEPARATOR_R)
def unVanishSlash(jstr):
return jstr.replace(PATH_SEPARATOR_R,PATH_SEPARATOR)
def covert2int(jstr):
try:
jstr = int(jstr)
except Exception as ex:
pass
return jstr
def cmplist(x,y):
'''
custumed comparation of two lists
'''
x = [covert2int(xx) for xx in x.split(PATH_SEPARATOR)]
y = [covert2int(yy) for yy in y.split(PATH_SEPARATOR)]
return cmp(x,y)
def getEvenindex(ls):
'''
获取列表偶数下标
'''
Rls = []
for i in zip(filter(lambda x :not x%2,xrange(len(ls))),filter(lambda x :x%2,xrange(len(ls)))):
Rls.append(i)
return Rls
def genDataByRulesPath(tdict):
rdict = {}
sortedkey = sorted(tdict,cmp=cmplist)
for k,v in zip(sortedkey,[tdict[v] for v in sortedkey]):
if isinstance(k, str) and PATH_SEPARATOR in k:
tls = k.split(PATH_SEPARATOR)
tls.append(v)
glength = len(tls)/2 - 1
cur_exestr = 'rdict'
for keyindex,kindindex in getEvenindex(tls):
kind_exestr = 'isinstance(%s,dict)' %(cur_exestr)
kind_exestr1 = 'isinstance(%s,list)' %(cur_exestr)
# print kind_exestr,cur_exestr
if eval(kind_exestr):#dict
has_exestr = cur_exestr+'.has_key("%s")'%(tls[keyindex])
if not eval(has_exestr):#dict
if glength == 0:
if isinstance(tls[kindindex], str):
tmp_exestr = cur_exestr+'["'+tls[keyindex]+'"]="%s"'%(tls[kindindex])
else:
tmp_exestr = cur_exestr+'["'+tls[keyindex]+'"]=%s'%(tls[kindindex])
elif isinstance(tls[kindindex], str) and tls[kindindex].lower() == 'list':
tmp_exestr =cur_exestr+'["'+tls[keyindex]+'"]=[]'
elif isinstance(tls[kindindex], str) and tls[kindindex].lower() == 'dict':
tmp_exestr =cur_exestr+'["'+tls[keyindex]+'"]={}'
exec(tmp_exestr)
cur_exestr += '["'+tls[keyindex]+'"]'
else:
cur_exestr += '["'+tls[keyindex]+'"]'
elif eval(kind_exestr1):#list
index = 0
if isinstance(tls[keyindex], str) and tls[keyindex].lower() != 'none':
index = int(tls[keyindex])
len_exestr = 'len(%s)'%(cur_exestr)
length = eval(len_exestr)
if isinstance(tls[kindindex], str) and tls[kindindex].lower() == 'none' and length == 0:
index = 0
if index >= length:
if glength == 0:
if isinstance(tls[kindindex], str):
tmp_exestr = cur_exestr+'.append("%s")'%(tls[kindindex])
else:
tmp_exestr = cur_exestr+'.append(%s)'%(tls[kindindex])
elif isinstance(tls[kindindex], str) and tls[kindindex].lower() == 'list':
if length == 0:
tmp_exestr =cur_exestr+'[append([])'
else:
tmp_exestr =cur_exestr+'[append([])'
elif isinstance(tls[kindindex], str) and tls[kindindex].lower() == 'dict':
tmp_exestr =cur_exestr+'.append({})'
exec(tmp_exestr)
cur_exestr += '['+str(index)+']'
else:
cur_exestr += '['+str(index)+']'
else:
pass
glength -= 1
else:
rdict[k]=v
return rdict
|
unknown
|
codeparrot/codeparrot-clean
| ||
//go:build !linux && !freebsd
package zfs
func checkRootdirFs(rootdir string) error {
return nil
}
func getMountpoint(id string) string {
return id
}
|
go
|
github
|
https://github.com/moby/moby
|
daemon/graphdriver/zfs/zfs_unsupported.go
|
""" Constants for this app as well as the external API. """
class OrderStatus(object):
"""Constants representing all known order statuses. """
OPEN = 'Open'
FULFILLMENT_ERROR = 'Fulfillment Error'
COMPLETE = 'Complete'
class Messages(object):
""" Strings used to populate response messages. """
NO_ECOM_API = u'E-Commerce API not setup. Enrolled {username} in {course_id} directly.'
NO_SKU_ENROLLED = u'The {enrollment_mode} mode for {course_id} does not have a SKU. Enrolling {username} directly.'
ENROLL_DIRECTLY = u'Enroll {username} in {course_id} directly because no need for E-Commerce baskets and orders.'
ORDER_COMPLETED = u'Order {order_number} was completed.'
ORDER_INCOMPLETE_ENROLLED = u'Order {order_number} was created, but is not yet complete. User was enrolled.'
NO_HONOR_MODE = u'Course {course_id} does not have an honor mode.'
NO_DEFAULT_ENROLLMENT_MODE = u'Course {course_id} does not have an honor or audit mode.'
ENROLLMENT_EXISTS = u'User {username} is already enrolled in {course_id}.'
ENROLLMENT_CLOSED = u'Enrollment is closed for {course_id}.'
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Security Policy
## Supported Versions
Currently, we are providing security updates for the latest release in the v1.x series:
| Version | Supported |
| ------- | ------------------ |
| Latest v1.x | :white_check_mark: |
## Reporting a Vulnerability
If you believe you have found a security vulnerability in `libuv`, please use the [GitHub's private vulnerability reporting feature](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing-information-about-vulnerabilities/privately-reporting-a-security-vulnerability#privately-reporting-a-security-vulnerability) in the [libuv repository](https://github.com/libuv/libuv) to report it to us.
This will allow us to assess the risk, and make a fix available before we add a bug report to the GitHub repository.
Please do:
* Provide as much information as you can about the vulnerability.
* Provide details about your configuration and environment, if applicable.
Please do not:
* Post any information about the vulnerability in public places.
* Attempt to exploit the vulnerability yourself.
We take all security bugs seriously. Thank you for improving the security of `libuv`. We appreciate your efforts and responsible disclosure and will make every effort to acknowledge your contributions.
|
unknown
|
github
|
https://github.com/nodejs/node
|
deps/uv/SECURITY.md
|
"""
To run this, you'll need to have installed.
* glmnet-python
* scikit-learn (of course)
Does two benchmarks
First, we fix a training set and increase the number of
samples. Then we plot the computation time as function of
the number of samples.
In the second benchmark, we increase the number of dimensions of the
training set. Then we plot the computation time as function of
the number of dimensions.
In both cases, only 10% of the features are informative.
"""
import gc
from time import time
import numpy as np
from sklearn.datasets import make_regression
alpha = 0.1
# alpha = 0.01
def rmse(a, b):
return np.sqrt(np.mean((a - b) ** 2))
def bench(factory, X, Y, X_test, Y_test, ref_coef):
gc.collect()
# start time
tstart = time()
clf = factory(alpha=alpha).fit(X, Y)
delta = time() - tstart
# stop time
print("duration: %0.3fs" % delta)
print("rmse: %f" % rmse(Y_test, clf.predict(X_test)))
print("mean coef abs diff: %f" % abs(ref_coef - clf.coef_.ravel()).mean())
return delta
if __name__ == "__main__":
# Delayed import of matplotlib.pyplot
import matplotlib.pyplot as plt
from glmnet.elastic_net import Lasso as GlmnetLasso
from sklearn.linear_model import Lasso as ScikitLasso
scikit_results = []
glmnet_results = []
n = 20
step = 500
n_features = 1000
n_informative = n_features / 10
n_test_samples = 1000
for i in range(1, n + 1):
print("==================")
print("Iteration %s of %s" % (i, n))
print("==================")
X, Y, coef_ = make_regression(
n_samples=(i * step) + n_test_samples,
n_features=n_features,
noise=0.1,
n_informative=n_informative,
coef=True,
)
X_test = X[-n_test_samples:]
Y_test = Y[-n_test_samples:]
X = X[: (i * step)]
Y = Y[: (i * step)]
print("benchmarking scikit-learn: ")
scikit_results.append(bench(ScikitLasso, X, Y, X_test, Y_test, coef_))
print("benchmarking glmnet: ")
glmnet_results.append(bench(GlmnetLasso, X, Y, X_test, Y_test, coef_))
plt.clf()
xx = range(0, n * step, step)
plt.title("Lasso regression on sample dataset (%d features)" % n_features)
plt.plot(xx, scikit_results, "b-", label="scikit-learn")
plt.plot(xx, glmnet_results, "r-", label="glmnet")
plt.legend()
plt.xlabel("number of samples to classify")
plt.ylabel("Time (s)")
plt.show()
# now do a benchmark where the number of points is fixed
# and the variable is the number of features
scikit_results = []
glmnet_results = []
n = 20
step = 100
n_samples = 500
for i in range(1, n + 1):
print("==================")
print("Iteration %02d of %02d" % (i, n))
print("==================")
n_features = i * step
n_informative = n_features / 10
X, Y, coef_ = make_regression(
n_samples=(i * step) + n_test_samples,
n_features=n_features,
noise=0.1,
n_informative=n_informative,
coef=True,
)
X_test = X[-n_test_samples:]
Y_test = Y[-n_test_samples:]
X = X[:n_samples]
Y = Y[:n_samples]
print("benchmarking scikit-learn: ")
scikit_results.append(bench(ScikitLasso, X, Y, X_test, Y_test, coef_))
print("benchmarking glmnet: ")
glmnet_results.append(bench(GlmnetLasso, X, Y, X_test, Y_test, coef_))
xx = np.arange(100, 100 + n * step, step)
plt.figure("scikit-learn vs. glmnet benchmark results")
plt.title("Regression in high dimensional spaces (%d samples)" % n_samples)
plt.plot(xx, scikit_results, "b-", label="scikit-learn")
plt.plot(xx, glmnet_results, "r-", label="glmnet")
plt.legend()
plt.xlabel("number of features")
plt.ylabel("Time (s)")
plt.axis("tight")
plt.show()
|
python
|
github
|
https://github.com/scikit-learn/scikit-learn
|
benchmarks/bench_glmnet.py
|
{
"STRLEN": {
"summary": "Returns the length of a string value.",
"complexity": "O(1)",
"group": "string",
"since": "2.2.0",
"arity": 2,
"function": "strlenCommand",
"command_flags": [
"READONLY",
"FAST"
],
"acl_categories": [
"STRING"
],
"key_specs": [
{
"flags": [
"RO"
],
"begin_search": {
"index": {
"pos": 1
}
},
"find_keys": {
"range": {
"lastkey": 0,
"step": 1,
"limit": 0
}
}
}
],
"reply_schema": {
"description": "The length of the string value stored at key, or 0 when key does not exist.",
"type": "integer",
"minimum": 0
},
"arguments": [
{
"name": "key",
"type": "key",
"key_spec_index": 0
}
]
}
}
|
json
|
github
|
https://github.com/redis/redis
|
src/commands/strlen.json
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for TestRunResults."""
import unittest
from base_test_result import BaseTestResult
from base_test_result import TestRunResults
from base_test_result import ResultType
class TestTestRunResults(unittest.TestCase):
def setUp(self):
self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
other_p1 = BaseTestResult('p1', ResultType.PASS)
self.p2 = BaseTestResult('p2', ResultType.PASS)
self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
self.tr = TestRunResults()
self.tr.AddResult(self.p1)
self.tr.AddResult(other_p1)
self.tr.AddResult(self.p2)
self.tr.AddResults(set([self.f1, self.c1, self.u1]))
def testGetAll(self):
self.assertFalse(
self.tr.GetAll().symmetric_difference(
[self.p1, self.p2, self.f1, self.c1, self.u1]))
def testGetPass(self):
self.assertFalse(self.tr.GetPass().symmetric_difference(
[self.p1, self.p2]))
def testGetNotPass(self):
self.assertFalse(self.tr.GetNotPass().symmetric_difference(
[self.f1, self.c1, self.u1]))
def testGetAddTestRunResults(self):
tr2 = TestRunResults()
other_p1 = BaseTestResult('p1', ResultType.PASS)
f2 = BaseTestResult('f2', ResultType.FAIL)
tr2.AddResult(other_p1)
tr2.AddResult(f2)
tr2.AddTestRunResults(self.tr)
self.assertFalse(
tr2.GetAll().symmetric_difference(
[self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
def testGetLogs(self):
log_print = ('[FAIL] f1:\n'
'failure1\n'
'[CRASH] c1:\n'
'crash1')
self.assertEqual(self.tr.GetLogs(), log_print)
def testGetShortForm(self):
short_print = ('ALL: 5 PASS: 2 FAIL: 1 '
'CRASH: 1 TIMEOUT: 0 UNKNOWN: 1 ')
self.assertEqual(self.tr.GetShortForm(), short_print)
def testGetLongForm(self):
long_print = ('ALL (5 tests)\n'
'PASS (2 tests)\n'
'FAIL (1 tests): [f1]\n'
'CRASH (1 tests): [c1]\n'
'TIMEOUT (0 tests): []\n'
'UNKNOWN (1 tests): [u1]')
self.assertEqual(self.tr.GetLongForm(), long_print)
def testRunPassed(self):
self.assertFalse(self.tr.DidRunPass())
tr2 = TestRunResults()
self.assertTrue(tr2.DidRunPass())
if __name__ == '__main__':
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#include "test/jemalloc_test.h"
#include "jemalloc/internal/prof_sys.h"
static const char *test_filename = "test_filename";
static bool did_prof_dump_open;
static int
prof_dump_open_file_intercept(const char *filename, int mode) {
int fd;
did_prof_dump_open = true;
/*
* Stronger than a strcmp() - verifying that we internally directly use
* the caller supplied char pointer.
*/
expect_ptr_eq(filename, test_filename,
"Dump file name should be \"%s\"", test_filename);
fd = open("/dev/null", O_WRONLY);
assert_d_ne(fd, -1, "Unexpected open() failure");
return fd;
}
TEST_BEGIN(test_mdump_normal) {
test_skip_if(!config_prof);
prof_dump_open_file_t *open_file_orig = prof_dump_open_file;
void *p = mallocx(1, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
prof_dump_open_file = prof_dump_open_file_intercept;
did_prof_dump_open = false;
expect_d_eq(mallctl("prof.dump", NULL, NULL, (void *)&test_filename,
sizeof(test_filename)), 0,
"Unexpected mallctl failure while dumping");
expect_true(did_prof_dump_open, "Expected a profile dump");
dallocx(p, 0);
prof_dump_open_file = open_file_orig;
}
TEST_END
static int
prof_dump_open_file_error(const char *filename, int mode) {
return -1;
}
/*
* In the context of test_mdump_output_error, prof_dump_write_file_count is the
* total number of times prof_dump_write_file_error() is expected to be called.
* In the context of test_mdump_maps_error, prof_dump_write_file_count is the
* total number of times prof_dump_write_file_error() is expected to be called
* starting from the one that contains an 'M' (beginning the "MAPPED_LIBRARIES"
* header).
*/
static int prof_dump_write_file_count;
static ssize_t
prof_dump_write_file_error(int fd, const void *s, size_t len) {
--prof_dump_write_file_count;
expect_d_ge(prof_dump_write_file_count, 0,
"Write is called after error occurs");
if (prof_dump_write_file_count == 0) {
return -1;
} else {
/*
* Any non-negative number indicates success, and for
* simplicity we just use 0. When prof_dump_write_file_count
* is positive, it means that we haven't reached the write that
* we want to fail; when prof_dump_write_file_count is
* negative, it means that we've already violated the
* expect_d_ge(prof_dump_write_file_count, 0) statement above,
* but instead of aborting, we continue the rest of the test,
* and we indicate that all the writes after the failed write
* are successful.
*/
return 0;
}
}
static void
expect_write_failure(int count) {
prof_dump_write_file_count = count;
expect_d_eq(mallctl("prof.dump", NULL, NULL, (void *)&test_filename,
sizeof(test_filename)), EFAULT, "Dump should err");
expect_d_eq(prof_dump_write_file_count, 0,
"Dumping stopped after a wrong number of writes");
}
TEST_BEGIN(test_mdump_output_error) {
test_skip_if(!config_prof);
test_skip_if(!config_debug);
prof_dump_open_file_t *open_file_orig = prof_dump_open_file;
prof_dump_write_file_t *write_file_orig = prof_dump_write_file;
prof_dump_write_file = prof_dump_write_file_error;
void *p = mallocx(1, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
/*
* When opening the dump file fails, there shouldn't be any write, and
* mallctl() should return failure.
*/
prof_dump_open_file = prof_dump_open_file_error;
expect_write_failure(0);
/*
* When the n-th write fails, there shouldn't be any more write, and
* mallctl() should return failure.
*/
prof_dump_open_file = prof_dump_open_file_intercept;
expect_write_failure(1); /* First write fails. */
expect_write_failure(2); /* Second write fails. */
dallocx(p, 0);
prof_dump_open_file = open_file_orig;
prof_dump_write_file = write_file_orig;
}
TEST_END
static int
prof_dump_open_maps_error() {
return -1;
}
static bool started_piping_maps_file;
static ssize_t
prof_dump_write_maps_file_error(int fd, const void *s, size_t len) {
/* The main dump doesn't contain any capital 'M'. */
if (!started_piping_maps_file && strchr(s, 'M') != NULL) {
started_piping_maps_file = true;
}
if (started_piping_maps_file) {
return prof_dump_write_file_error(fd, s, len);
} else {
/* Return success when we haven't started piping maps. */
return 0;
}
}
static void
expect_maps_write_failure(int count) {
int mfd = prof_dump_open_maps();
if (mfd == -1) {
/* No need to continue if we just can't find the maps file. */
return;
}
close(mfd);
started_piping_maps_file = false;
expect_write_failure(count);
expect_true(started_piping_maps_file, "Should start piping maps");
}
TEST_BEGIN(test_mdump_maps_error) {
test_skip_if(!config_prof);
test_skip_if(!config_debug);
prof_dump_open_file_t *open_file_orig = prof_dump_open_file;
prof_dump_write_file_t *write_file_orig = prof_dump_write_file;
prof_dump_open_maps_t *open_maps_orig = prof_dump_open_maps;
prof_dump_open_file = prof_dump_open_file_intercept;
prof_dump_write_file = prof_dump_write_maps_file_error;
void *p = mallocx(1, 0);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
/*
* When opening the maps file fails, there shouldn't be any maps write,
* and mallctl() should return success.
*/
prof_dump_open_maps = prof_dump_open_maps_error;
started_piping_maps_file = false;
prof_dump_write_file_count = 0;
expect_d_eq(mallctl("prof.dump", NULL, NULL, (void *)&test_filename,
sizeof(test_filename)), 0,
"mallctl should not fail in case of maps file opening failure");
expect_false(started_piping_maps_file, "Shouldn't start piping maps");
expect_d_eq(prof_dump_write_file_count, 0,
"Dumping stopped after a wrong number of writes");
/*
* When the n-th maps write fails (given that we are able to find the
* maps file), there shouldn't be any more maps write, and mallctl()
* should return failure.
*/
prof_dump_open_maps = open_maps_orig;
expect_maps_write_failure(1); /* First write fails. */
expect_maps_write_failure(2); /* Second write fails. */
dallocx(p, 0);
prof_dump_open_file = open_file_orig;
prof_dump_write_file = write_file_orig;
}
TEST_END
int
main(void) {
return test(
test_mdump_normal,
test_mdump_output_error,
test_mdump_maps_error);
}
|
c
|
github
|
https://github.com/redis/redis
|
deps/jemalloc/test/unit/prof_mdump.c
|
#!/usr/bin/env python
from tests.compat import mock, unittest
from boto.ec2.connection import EC2Connection
INSTANCE_STATUS_RESPONSE = br"""<?xml version="1.0" encoding="UTF-8"?>
<DescribeInstanceStatusResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
<requestId>3be1508e-c444-4fef-89cc-0b1223c4f02fEXAMPLE</requestId>
<nextToken>page-2</nextToken>
<instanceStatusSet />
</DescribeInstanceStatusResponse>
"""
class TestInstanceStatusResponseParsing(unittest.TestCase):
def test_next_token(self):
ec2 = EC2Connection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key')
mock_response = mock.Mock()
mock_response.read.return_value = INSTANCE_STATUS_RESPONSE
mock_response.status = 200
ec2.make_request = mock.Mock(return_value=mock_response)
all_statuses = ec2.get_all_instance_status()
self.assertNotIn('IncludeAllInstances', ec2.make_request.call_args[0][1])
self.assertEqual(all_statuses.next_token, 'page-2')
def test_include_all_instances(self):
ec2 = EC2Connection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key')
mock_response = mock.Mock()
mock_response.read.return_value = INSTANCE_STATUS_RESPONSE
mock_response.status = 200
ec2.make_request = mock.Mock(return_value=mock_response)
all_statuses = ec2.get_all_instance_status(include_all_instances=True)
self.assertIn('IncludeAllInstances', ec2.make_request.call_args[0][1])
self.assertEqual('true', ec2.make_request.call_args[0][1]['IncludeAllInstances'])
self.assertEqual(all_statuses.next_token, 'page-2')
if __name__ == '__main__':
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
"""Plots model samples."""
import argparse
import numpy as np
import random
import sys
import json
from scipy.misc import imread, imsave, imresize
import os
from plat.utils import anchors_from_image, get_json_vectors, offset_from_string
from plat.canvas_layout import create_mine_canvas
import plat.sampling
from PIL import Image
import importlib
from plat import zoo
channels = 4
# modified from http://stackoverflow.com/a/3375291/1010653
def alpha_composite(src, src_mask, dst):
'''
Return the alpha composite of src and dst.
Parameters:
src -- RGBA in range 0.0 - 1.0
dst -- RGBA in range 0.0 - 1.0
The algorithm comes from http://en.wikipedia.org/wiki/Alpha_compositing
'''
out = np.empty(dst.shape, dtype = 'float')
src_shape = src.shape
if src_shape[1] == 1 and src_shape[2] == 1:
return out
alpha = np.index_exp[3:, :, :]
rgb = np.index_exp[:3, :, :]
epsilon = 0.001
if src_mask is not None:
src_a = np.maximum(src_mask, epsilon)
else:
src_a = 1.0
dst_a = np.maximum(dst[alpha], epsilon)
out[alpha] = src_a+dst_a*(1-src_a)
old_setting = np.seterr(invalid = 'ignore')
out[rgb] = (src[rgb]*src_a + dst[rgb]*dst_a*(1-src_a))/out[alpha]
np.seterr(**old_setting)
np.clip(out,0,1.0)
return out
def additive_composite(src, src_mask, dst):
'''
Return the additive composite of src and dst.
'''
out = np.empty(dst.shape, dtype = 'float')
alpha = np.index_exp[3:, :, :]
rgb = np.index_exp[:3, :, :]
if src_mask is not None:
out[alpha] = np.maximum(src_mask,dst[alpha])
else:
out[alpha] = 1.0
out[rgb] = np.maximum(src[rgb],dst[rgb])
np.clip(out,0,1.0)
return out
# gsize = 64
# gsize2 = gsize/2
class Canvas:
"""Simple Canvas Thingy"""
def __init__(self, width, height, xmin, xmax, ymin, ymax, mask_name, image_size, do_check_bounds, init_black=False):
self.pixels = np.zeros((channels, height, width))
if init_black:
alpha_channel = np.index_exp[3:, :, :]
self.pixels[alpha_channel] = 1.0
self.canvas_xmin = 0
self.canvas_xmax = width
self.canvas_ymin = 0
self.canvas_ymax = height
self.xmin = xmin
self.xmax = xmax
self.ymin = ymin
self.ymax = ymax
self.do_check_bounds = do_check_bounds
self.canvas_xspread = self.canvas_xmax - self.canvas_xmin
self.canvas_yspread = self.canvas_ymax - self.canvas_ymin
self.xspread = self.xmax - self.xmin
self.yspread = self.ymax - self.ymin
self.xspread_ratio = float(self.canvas_xspread) / self.xspread
self.yspread_ratio = float(self.canvas_yspread) / self.yspread
self.gsize = image_size
self.gsize2 = image_size/2
self.gsize4 = image_size/4
if mask_name is not None:
_, _, mask_images = anchors_from_image("mask/{}_mask{}.png".format(mask_name, image_size), image_size=(image_size, image_size))
# _, _, mask_images = anchors_from_image("mask/rounded_mask{}.png".format(gsize), image_size=(gsize, gsize))
# _, _, mask_images = anchors_from_image("mask/hexagons/hex1_{}_blur.png".format(gsize), image_size=(gsize, gsize))
self.mask = mask_images[0][0]
else:
self.mask = None
# To map
# [A, B] --> [a, b]
# use this formula
# (val - A)*(b-a)/(B-A) + a
# A,B is virtual
# a,b is canvas
def map_to_canvas(self, x, y):
new_x = int((x - self.xmin) * self.xspread_ratio + self.canvas_xmin)
new_y = int((y - self.ymin) * self.yspread_ratio + self.canvas_ymin)
return new_x, new_y
def set_background(self, fname):
rawim = imread(fname);
h, w, c = rawim.shape
if h > self.canvas_ymax:
h = self.canvas_ymax
if w > self.canvas_xmax:
w = self.canvas_xmax
s_im = np.asarray([rawim[:,:,0]/255.0, rawim[:,:,1]/255.0, rawim[:,:,2]/255.0])
self.pixels[0:3, 0:h, 0:w] = s_im[:,0:h,0:w]
self.pixels[3, 0:h, 0:w] = 1
def place_square(self, x, y, s):
square = np.zeros((channels, self.gsize, self.gsize))
square.fill(1)
cx, cy = self.map_to_canvas(x, y)
self.pixels[:, (cy-self.gsize2):(cy+self.gsize2), (cx-self.gsize2):(cx+self.gsize2)] = square
def check_bounds(self, cx, cy, border):
if not self.do_check_bounds:
return True
if (cx < self.canvas_xmin) or (cy < self.canvas_ymin) or (cx > self.canvas_xmax - border) or (cy > self.canvas_ymax - border):
return False
return True
def get_anchor(self, x, y, im_size):
cx, cy = self.map_to_canvas(x, y)
im_size = int(im_size)
border = self.gsize2
anchor_im = self.pixels[0:3, cy-border:cy+border, cx-border:cx+border]
anchor = np.zeros([3, im_size, im_size])
tc, th, tw = anchor_im.shape
anchor[:, 0:th, 0:tw] = anchor_im
return anchor.astype('float32')
def place_image(self, im, x, y, additive=False, scale=None):
# print("place_image {} at {}, {} with scale {}".format(im.shape, x, y, scale))
if scale is not None:
border = int(scale)
slices = [
slice(0, 4),
slice(y, y+border),
slice(x, x+border)
]
out_stack = np.dstack(im)
out_stack = (255 * out_stack).astype(np.uint8)
rawim = imresize(out_stack, (border, border))
s_im = np.asarray([rawim[:,:,0]/255.0, rawim[:,:,1]/255.0, rawim[:,:,2]/255.0])
else:
cx, cy = self.map_to_canvas(x, y)
border = self.gsize2
slices = [
slice(0, 4),
slice(cy-border, cy+border),
slice(cx-border, cx+border)
]
s_im = im
if not self.check_bounds(x, y, border):
return
if additive:
self.pixels[slices] = additive_composite(s_im, self.mask, self.pixels[slices])
else:
self.pixels[slices] = alpha_composite(s_im, self.mask, self.pixels[slices])
def save(self, save_path):
print("Preparing image file {}".format(save_path))
out = np.dstack(self.pixels)
out = (255 * out).astype(np.uint8)
img = Image.fromarray(out)
img.save(save_path)
def apply_anchor_offsets(anchor, offsets, a, b, a_indices_str, b_indices_str):
sa = 2.0 * (a - 0.5)
sb = 2.0 * (b - 0.5)
dim = len(anchor)
a_offset = offset_from_string(a_indices_str, offsets, dim)
b_offset = offset_from_string(b_indices_str, offsets, dim)
new_anchor = anchor + sa * a_offset + sb * b_offset
# print(a, a*a_offset)
return new_anchor
def make_mask_layout(height, width, radius):
I = np.zeros((height, width)).astype(np.uint8)
center = np.array([width/2.0, height/2.0])
for y in range(height):
for x in range(width):
x_off = 0.5
if y%2 == 0:
x_off = 1.0
pos = np.array([x+x_off, y+0.5])
length = np.linalg.norm(pos - center)
if length < radius:
I[y][x] = 255
return I
def canvas(parser, context, args):
parser = argparse.ArgumentParser(description="Plot model samples")
parser.add_argument("--model", dest='model', type=str, default=None,
help="name of model in plat zoo")
parser.add_argument("--model-file", dest='model_file', type=str, default=None,
help="path to the saved model")
parser.add_argument("--model-type", dest='model_type', type=str, default=None,
help="the type of model (usually inferred from filename)")
parser.add_argument("--model-interface", dest='model_interface', type=str,
default=None,
help="class interface for model (usually inferred from model-type)")
parser.add_argument("--width", type=int, default=512,
help="width of canvas to render in pixels")
parser.add_argument("--height", type=int, default=512,
help="height of canvas to render in pixels")
parser.add_argument("--rows", type=int, default=3,
help="number of rows of anchors")
parser.add_argument("--cols", type=int, default=3,
help="number of columns of anchors")
parser.add_argument("--xmin", type=int, default=0,
help="min x in virtual space")
parser.add_argument("--xmax", type=int, default=100,
help="max x in virtual space")
parser.add_argument("--ymin", type=int, default=0,
help="min y in virtual space")
parser.add_argument("--ymax", type=int, default=100,
help="max y in virtual space")
parser.add_argument("--outfile", dest='save_path', type=str, default="canvas_%DATE%_%MODEL%_%SEQ%.png",
help="where to save the generated samples")
parser.add_argument("--seed", type=int,
default=None, help="Optional random seed")
parser.add_argument('--do-check-bounds', dest='do_check_bounds', default=False, action='store_true',
help="clip to drawing bounds")
parser.add_argument('--background-image', dest='background_image', default=None,
help="use image initial background")
parser.add_argument('--anchor-image', dest='anchor_image', default=None,
help="use image as source of anchors")
parser.add_argument('--anchor-mine', dest='anchor_mine', default=None,
help="use image as single source of mine coordinates")
parser.add_argument('--anchor-canvas', dest='anchor_canvas', default=False, action='store_true',
help="anchor image from canvas")
parser.add_argument('--random-mine', dest='random_mine', default=False, action='store_true',
help="use random sampling as source of mine coordinates")
parser.add_argument('--additive', dest='additive', default=False, action='store_true',
help="use additive compositing")
parser.add_argument('--mask-name', dest='mask_name', default=None,
help="prefix name for alpha mask to use (full/rounded/hex")
parser.add_argument('--mask-layout', dest='mask_layout', default=None,
help="use image as source of mine grid points")
parser.add_argument('--mask-scale', dest='mask_scale', default=1.0, type=float,
help="Scale mask layout (squeeze)")
parser.add_argument('--mask-width', dest='mask_width', type=int, default=15,
help="width for computed mask")
parser.add_argument('--mask-height', dest='mask_height', type=int, default=15,
help="height for computed mask")
parser.add_argument('--mask-radius', dest='mask_radius', default=None, type=float,
help="radius for computed mask")
parser.add_argument('--layout', dest='layout', default=None,
help="layout json file")
parser.add_argument('--layout-scale', dest='layout_scale', default=1, type=int,
help="Scale layout")
parser.add_argument('--batch-size', dest='batch_size', type=int, default=100,
help="number of images to decode at once")
parser.add_argument('--passthrough', dest='passthrough', default=False, action='store_true',
help="Use originals instead of reconstructions")
parser.add_argument('--anchor-offset', dest='anchor_offset', default=None,
help="use json file as source of each anchors offsets")
parser.add_argument('--anchor-offset-a', dest='anchor_offset_a', default="42", type=str,
help="which indices to combine for offset a")
parser.add_argument('--anchor-offset-b', dest='anchor_offset_b', default="31", type=str,
help="which indices to combine for offset b")
parser.add_argument("--image-size", dest='image_size', type=int, default=64,
help="size of (offset) images")
parser.add_argument('--global-offset', dest='global_offset', default=None,
help="use json file as source of global offsets")
parser.add_argument('--global-indices', dest='global_indices', default=None, type=str,
help="offset indices to apply globally")
parser.add_argument('--global-scale', dest='global_scale', default=1.0, type=float,
help="scaling factor for global offset")
args = parser.parse_args(args)
template_dict = {}
if args.seed:
np.random.seed(args.seed)
random.seed(args.seed)
global_offset = None
if args.global_offset is not None:
offsets = get_json_vectors(args.global_offset)
global_offset = plat.sampling.get_global_offset(offsets, args.global_indices, args.global_scale)
anchor_images = None
if args.anchor_image is not None:
_, _, anchor_images = anchors_from_image(args.anchor_image, image_size=(args.image_size, args.image_size))
elif args.anchor_mine is not None:
_, _, anchor_images = anchors_from_image(args.anchor_mine, image_size=(args.image_size, args.image_size))
basename = os.path.basename(args.anchor_mine)
template_dict["BASENAME"] = os.path.splitext(basename)[0]
anchors = None
if not args.passthrough:
dmodel = zoo.load_model(args.model, args.model_file, args.model_type, args.model_interface)
workq = anchor_images[:]
anchors_list = []
while(len(workq) > 0):
print("Processing {} anchors".format(args.batch_size))
curq = workq[:args.batch_size]
workq = workq[args.batch_size:]
cur_anchors = dmodel.encode_images(curq)
for c in cur_anchors:
anchors_list.append(c)
anchors = np.asarray(anchors_list)
if anchors is None:
anchors = np.random.normal(loc=0, scale=1, size=(args.cols * args.rows, 100))
anchor_offsets = None
if args.anchor_offset is not None:
# compute anchors as offsets from existing anchor
anchor_offsets = get_json_vectors(args.anchor_offset)
canvas = Canvas(args.width, args.height, args.xmin, args.xmax, args.ymin, args.ymax, args.mask_name, args.image_size, args.do_check_bounds)
if args.background_image is not None:
canvas.set_background(args.background_image)
workq = []
do_hex = True
if args.layout:
with open(args.layout) as json_file:
layout_data = json.load(json_file)
xy = np.array(layout_data["xy"])
grid_size = layout_data["size"]
roots = layout_data["r"]
if "s" in layout_data:
s = layout_data["s"]
else:
s = None
for i, pair in enumerate(xy):
x = pair[0] * canvas.canvas_xmax / grid_size[0]
y = pair[1] * canvas.canvas_ymax / grid_size[1]
a = (pair[0] + 0.5 * s[i]) / float(grid_size[0])
b = (pair[1] + 0.5 * s[i]) / float(grid_size[1])
r = roots[i]
if s is None:
scale = args.layout_scale
else:
scale = s[i] * args.layout_scale
# print("Placing {} at {}, {} because {},{} and {}, {}".format(scale, x, y, canvas.canvas_xmax, canvas.canvas_ymax, grid_size[0], grid_size[1]))
if args.passthrough:
output_image = anchor_images[r]
canvas.place_image(output_image, x, y, args.additive, scale=scale)
else:
if args.anchor_mine is not None or args.random_mine:
z = create_mine_canvas(args.rows, args.cols, b, a, anchors)
elif anchor_offsets is not None:
z = apply_anchor_offsets(anchors[r], anchor_offsets, a, b, args.anchor_offset_a, args.anchor_offset_b)
else:
z = anchors[r]
if global_offset is not None:
z = z + global_offset
# print("Storing {},{} with {}".format(x, y, len(z)))
workq.append({
"z": z,
"x": x,
"y": y,
"s": scale
})
elif args.mask_layout or args.mask_radius:
if args.mask_layout:
rawim = imread(args.mask_layout);
if len(rawim.shape) == 2:
im_height, im_width = rawim.shape
mask_layout = rawim
else:
im_height, im_width, _ = rawim.shape
mask_layout = rawim[:,:,0]
else:
im_height, im_width = args.mask_height, args.mask_width
mask_layout = make_mask_layout(im_height, im_width, args.mask_radius)
for xpos in range(im_width):
for ypos in range(im_height):
a = float(xpos) / (im_width - 1)
if do_hex and ypos % 2 == 0:
a = a + 0.5 / (im_width - 1)
x = args.mask_scale * canvas.xmax * a
b = float(ypos) / (im_height - 1)
y = args.mask_scale * canvas.ymax * b
if not mask_layout[ypos][xpos] > 128:
pass
elif args.passthrough:
if args.anchor_canvas:
cur_anchor_image = canvas.get_anchor(x, y, args.image_size)
else:
cur_anchor_image = anchor_images[0]
canvas.place_image(cur_anchor_image, x, y, args.additive, None)
else:
if args.anchor_canvas:
cur_anchor_image = canvas.get_anchor(x, y, args.image_size)
zs = dmodel.encode_images([cur_anchor_image])
z = zs[0]
elif len(anchors) == 1 or anchor_offsets is not None:
z = apply_anchor_offsets(anchors[0], anchor_offsets, a, b, args.anchor_offset_a, args.anchor_offset_b)
else:
z = create_mine_canvas(args.rows, args.cols, b, a, anchors)
if global_offset is not None:
z = z + global_offset
workq.append({
"z": z,
"x": x,
"y": y,
"s": None
})
while(len(workq) > 0):
curq = workq[:args.batch_size]
workq = workq[args.batch_size:]
latents = [e["z"] for e in curq]
images = dmodel.sample_at(np.array(latents))
for i in range(len(curq)):
# print("Placing {},{} with {}".format(curq[i]["x"], curq[i]["y"], len(latents)))
canvas.place_image(images[i], curq[i]["x"], curq[i]["y"], args.additive, scale=curq[i]["s"])
# print("Placed")
template_dict["SIZE"] = args.image_size
outfile = plat.sampling.emit_filename(args.save_path, template_dict, args);
canvas.save(outfile)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Plot model samples on canvas")
canvas(parser, None, sys.argv[1:])
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""unittest.TestCase for multi-statement transaction passthrough tests."""
from typing import Optional
from buildscripts.resmokelib import logging
from buildscripts.resmokelib.testing.testcases import jsrunnerfile
class MultiStmtTxnTestCase(jsrunnerfile.JSRunnerFileTestCase):
"""Test case for multi statement transactions."""
REGISTERED_NAME = "multi_stmt_txn_passthrough"
def __init__(
self,
logger: logging.Logger,
multi_stmt_txn_test_files: list[str],
shell_executable: Optional[str] = None,
shell_options: Optional[dict] = None,
):
"""Initilize MultiStmtTxnTestCase."""
assert len(multi_stmt_txn_test_files) == 1
jsrunnerfile.JSRunnerFileTestCase.__init__(
self,
logger,
"Multi-statement Transaction Passthrough",
multi_stmt_txn_test_files[0],
test_runner_file="jstests/libs/txns/txn_passthrough_runner.js",
shell_executable=shell_executable,
shell_options=shell_options,
)
@property
def multi_stmt_txn_test_file(self):
"""Return the name of the test file."""
return self.test_name
def _populate_test_data(self, test_data):
test_data["multiStmtTxnTestFile"] = self.multi_stmt_txn_test_file
test_data["peerPids"] = self.fixture.pids()
test_data["implicitlyShardOnCreateCollectionOnly"] = (
"/timeseries/" in self.test_name or "\\timeseries\\" in self.test_name
)
|
python
|
github
|
https://github.com/mongodb/mongo
|
buildscripts/resmokelib/testing/testcases/multi_stmt_txn_test.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
from heat.common import identifier
from heat.common import template_format
from heat.engine import environment
from heat.engine.resources.aws.cfn.wait_condition_handle \
import WaitConditionHandle
from heat.engine.resources.aws.ec2 import instance
from heat.engine.resources.openstack.nova.server import Server
from heat.engine.scheduler import TaskRunner
from heat.engine import service
from heat.engine import stack as stk
from heat.engine import template as tmpl
from heat.tests import common
from heat.tests import utils
TEST_TEMPLATE_METADATA = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "",
"Parameters" : {
"KeyName" : {"Type" : "String", "Default": "mine" },
},
"Resources" : {
"S1": {
"Type": "AWS::EC2::Instance",
"Metadata" : {
"AWS::CloudFormation::Init" : {
"config" : {
"files" : {
"/tmp/random_file" : {
"content" : { "Fn::Join" : ["", [
"s2-ip=", {"Fn::GetAtt": ["S2", "PublicIp"]}
]]},
"mode" : "000400",
"owner" : "root",
"group" : "root"
}
}
}
}
},
"Properties": {
"ImageId" : "a",
"InstanceType" : "m1.large",
"KeyName" : { "Ref" : "KeyName" },
"UserData" : "#!/bin/bash -v\n"
}
},
"S2": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId" : "a",
"InstanceType" : "m1.large",
"KeyName" : { "Ref" : "KeyName" },
"UserData" : "#!/bin/bash -v\n"
}
}
}
}
'''
TEST_TEMPLATE_WAIT_CONDITION = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a WaitCondition.",
"Parameters" : {
"KeyName" : {"Type" : "String", "Default": "mine" },
},
"Resources" : {
"WH" : {
"Type" : "AWS::CloudFormation::WaitConditionHandle"
},
"S1": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId" : "a",
"InstanceType" : "m1.large",
"KeyName" : { "Ref" : "KeyName" },
"UserData" : { "Fn::Join" : [ "", [ "#!/bin/bash -v\n",
"echo ",
{ "Ref" : "WH" },
"\n" ] ] }
}
},
"WC" : {
"Type" : "AWS::CloudFormation::WaitCondition",
"DependsOn": "S1",
"Properties" : {
"Handle" : {"Ref" : "WH"},
"Timeout" : "5"
}
},
"S2": {
"Type": "AWS::EC2::Instance",
"Metadata" : {
"test" : {"Fn::GetAtt": ["WC", "Data"]}
},
"Properties": {
"ImageId" : "a",
"InstanceType" : "m1.large",
"KeyName" : { "Ref" : "KeyName" },
"UserData" : "#!/bin/bash -v\n"
}
}
}
}
'''
TEST_TEMPLATE_SERVER = '''
heat_template_version: 2013-05-23
resources:
instance1:
type: OS::Nova::Server
metadata: {"template_data": {get_attr: [instance2, first_address]}}
properties:
image: cirros-0.3.2-x86_64-disk
flavor: m1.small
key_name: stack_key
instance2:
type: OS::Nova::Server
metadata: {'apples': 'pears'}
properties:
image: cirros-0.3.2-x86_64-disk
flavor: m1.small
key_name: stack_key
'''
class MetadataRefreshTests(common.HeatTestCase):
@mock.patch.object(instance.Instance, 'handle_create')
@mock.patch.object(instance.Instance, 'check_create_complete')
@mock.patch.object(instance.Instance, 'FnGetAtt')
def test_FnGetAtt_metadata_updated(self, mock_get,
mock_check, mock_handle):
"""Tests that metadata gets updated when FnGetAtt return changes."""
# Setup
temp = template_format.parse(TEST_TEMPLATE_METADATA)
template = tmpl.Template(temp,
env=environment.Environment({}))
ctx = utils.dummy_context()
stack = stk.Stack(ctx, 'test_stack', template, disable_rollback=True)
stack.store()
self.stub_ImageConstraint_validate()
self.stub_KeypairConstraint_validate()
self.stub_FlavorConstraint_validate()
# Configure FnGetAtt to return different values on subsequent calls
mock_get.side_effect = [
'10.0.0.1',
'10.0.0.2',
]
# Initial resolution of the metadata
stack.create()
# Sanity check on S2
s2 = stack['S2']
self.assertEqual((s2.CREATE, s2.COMPLETE), s2.state)
# Verify S1 is using the initial value from S2
s1 = stack['S1']
content = self._get_metadata_content(s1.metadata_get())
self.assertEqual('s2-ip=10.0.0.1', content)
# Run metadata update to pick up the new value from S2
s1.metadata_update()
s2.metadata_update()
# Verify the updated value is correct in S1
content = self._get_metadata_content(s1.metadata_get())
self.assertEqual('s2-ip=10.0.0.2', content)
# Verify outgoing calls
mock_get.assert_has_calls([
mock.call('PublicIp'),
mock.call('PublicIp')])
self.assertEqual(2, mock_handle.call_count)
self.assertEqual(2, mock_check.call_count)
@staticmethod
def _get_metadata_content(m):
tmp = m['AWS::CloudFormation::Init']['config']['files']
return tmp['/tmp/random_file']['content']
class WaitConditionMetadataUpdateTests(common.HeatTestCase):
def setUp(self):
super(WaitConditionMetadataUpdateTests, self).setUp()
self.man = service.EngineService('a-host', 'a-topic')
self.man.create_periodic_tasks()
@mock.patch.object(instance.Instance, 'handle_create')
@mock.patch.object(instance.Instance, 'check_create_complete')
@mock.patch.object(instance.Instance, 'is_service_available')
@mock.patch.object(TaskRunner, '_sleep')
@mock.patch.object(WaitConditionHandle, 'identifier')
def test_wait_metadata(self, mock_identifier, mock_sleep, mock_available,
mock_check, mock_handle):
"""Tests a wait condition metadata update after a signal call."""
# Setup Stack
temp = template_format.parse(TEST_TEMPLATE_WAIT_CONDITION)
template = tmpl.Template(temp)
ctx = utils.dummy_context()
stack = stk.Stack(ctx, 'test-stack', template, disable_rollback=True)
stack.store()
self.stub_ImageConstraint_validate()
self.stub_KeypairConstraint_validate()
self.stub_FlavorConstraint_validate()
res_id = identifier.ResourceIdentifier('test_tenant_id', stack.name,
stack.id, '', 'WH')
mock_identifier.return_value = res_id
watch = stack['WC']
inst = stack['S2']
# Setup Sleep Behavior
self.run_empty = True
def check_empty(sleep_time):
self.assertEqual('{}', watch.FnGetAtt('Data'))
self.assertIsNone(inst.metadata_get()['test'])
def update_metadata(unique_id, data, reason):
self.man.resource_signal(ctx,
dict(stack.identifier()),
'WH',
{'Data': data, 'Reason': reason,
'Status': 'SUCCESS',
'UniqueId': unique_id},
sync_call=True)
def post_success(sleep_time):
update_metadata('123', 'foo', 'bar')
def side_effect_popper(sleep_time):
if self.run_empty:
self.run_empty = False
check_empty(sleep_time)
else:
post_success(sleep_time)
mock_sleep.side_effect = side_effect_popper
# Test Initial Creation
stack.create()
self.assertEqual((stack.CREATE, stack.COMPLETE), stack.state)
self.assertEqual('{"123": "foo"}', watch.FnGetAtt('Data'))
self.assertEqual('{"123": "foo"}', inst.metadata_get()['test'])
# Test Update
update_metadata('456', 'blarg', 'wibble')
self.assertEqual({'123': 'foo', '456': 'blarg'},
jsonutils.loads(watch.FnGetAtt('Data')))
self.assertEqual('{"123": "foo"}',
inst.metadata_get()['test'])
self.assertEqual(
{'123': 'foo', '456': 'blarg'},
jsonutils.loads(inst.metadata_get(refresh=True)['test']))
# Verify outgoing calls
self.assertTrue(mock_available.call_count > 0)
self.assertEqual(2, mock_handle.call_count)
self.assertEqual(2, mock_check.call_count)
class MetadataRefreshServerTests(common.HeatTestCase):
@mock.patch.object(Server, 'handle_create')
@mock.patch.object(Server, 'check_create_complete')
@mock.patch.object(Server, 'FnGetAtt')
def test_FnGetAtt_metadata_update(self, mock_get, mock_check, mock_handle):
temp = template_format.parse(TEST_TEMPLATE_SERVER)
template = tmpl.Template(temp,
env=environment.Environment({}))
ctx = utils.dummy_context()
stack = stk.Stack(ctx, 'test-stack', template, disable_rollback=True)
stack.store()
self.stub_ImageConstraint_validate()
self.stub_KeypairConstraint_validate()
self.stub_FlavorConstraint_validate()
# Note dummy addresses are from TEST-NET-1 ref rfc5737
mock_get.side_effect = ['192.0.2.1', '192.0.2.2', '192.0.2.2']
# Test
stack.create()
self.assertEqual((stack.CREATE, stack.COMPLETE), stack.state)
s1 = stack['instance1']
md = s1.metadata_get()
self.assertEqual({u'template_data': '192.0.2.1'}, md)
# Now set some metadata via the resource, like is done by
# _populate_deployments_metadata. This should be persisted over
# calls to metadata_update()
new_md = {u'template_data': '192.0.2.2', 'set_by_rsrc': 'orange'}
s1.metadata_set(new_md)
md = s1.metadata_get(refresh=True)
self.assertEqual(new_md, md)
s1.metadata_update()
md = s1.metadata_get(refresh=True)
self.assertEqual(new_md, md)
# Verify outgoing calls
mock_get.assert_has_calls([
mock.call('first_address'),
mock.call('first_address')])
self.assertEqual(2, mock_handle.call_count)
self.assertEqual(2, mock_check.call_count)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
#############################################################################################
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, see http://www.gnu.org/licenses #
# or write to the Free Software Foundation, Inc., 51 Franklin Street, #
# Fifth Floor, Boston, MA 02110-1301 USA #
# #
#############################################################################################
import sys
import random
from numpy import array
import esvm.parse
import esvm.plots
from esvm.datafuncs import MotifDataDef, fastawrite_sequence, arffwrite_sequence, arffwrite_real
from esvm.mldata import init_datasetfile
if __name__ == '__main__':
if len(sys.argv)<3 or (sys.argv[1]=='motif' and sys.argv[2]!='arff' and sys.argv[2]!='fasta') \
or (sys.argv[1]=='motif' and sys.argv[2]=='fasta' and len(sys.argv)<9) \
or (sys.argv[1]=='motif' and sys.argv[2]=='arff' and len(sys.argv)<14) \
or (sys.argv[1]=='cloud' and len(sys.argv)<7) or (sys.argv[1]!='motif') \
and (sys.argv[1]!='cloud'):
sys.stderr.write( "usage: %s motif fasta MOTIF numSeq seqLenRange"+\
"positionRange mutationRate output.fa\n"+\
"or: %s motif arff MOTIFPOS numSeq-pos seqLenRange-pos "+\
"positionRange-pos mutationRate-pos \\\n"+\
"motif-neg numSeq-neg seqLenRange-neg positionRange-neg "+\
"mutationRange-neg output.arff\n"+\
"or: %s cloud numpoints dimensions fractionOfPositives "+\
"cloudWidth output.arff\n" % (sys.argv[0],sys.argv[0],sys.argv[0]) )
sys.exit(-1)
random.seed()
if sys.argv[1] == 'motif':
if sys.argv[2]=='fasta':
# generate sequences in FASTA format
p = MotifDataDef()
p.motif = sys.argv[3]
p.numseq = int(sys.argv[4])
(p.seqlenmin,p.seqlenmax) = esvm.parse.parse_range(sys.argv[5])
(p.posstart,p.posend) = esvm.parse.parse_range(sys.argv[6])
p.mutrate = float(sys.argv[7])
filename = sys.argv[8]
fastawrite_sequence(filename, p)
else:
# generate sequences in ARFF format
assert(sys.argv[2]=='arff')
p = MotifDataDef()
p.motif = sys.argv[3]
p.numseq = int(sys.argv[4])
(p.seqlenmin,p.seqlenmax) = esvm.parse.parse_range(sys.argv[5])
(p.posstart,p.posend) = esvm.parse.parse_range(sys.argv[6])
p.mutrate = float(sys.argv[7])
n = MotifDataDef()
n.motif = sys.argv[8]
n.numseq = int(sys.argv[9])
(n.seqlenmin,n.seqlenmax) = esvm.parse.parse_range(sys.argv[10])
(n.posstart,n.posend) = esvm.parse.parse_range(sys.argv[11])
n.mutrate = float(sys.argv[12])
filename = sys.argv[13]
arffwrite_sequence(filename, p, n)
elif sys.argv[1] == 'cloud':
# generate a data cloud in ARFF format
numpoint = int(sys.argv[2])
numfeat = int(sys.argv[3])
fracpos = float(sys.argv[4])
width = float(sys.argv[5])
filename = sys.argv[6]
arffwrite_real(filename, numpoint, numfeat, fracpos, width)
if len(sys.argv)>=8:
fp = init_datasetfile(filename,'vec')
(examples,labels) = fp.readlines()
pointcloud = []
for ix in xrange(numpoint):
pointcloud.append(array([labels[ix],examples[0,ix],examples[1,ix]]))
esvm.plots.plotcloud(pointcloud,sys.argv[7],'Pointcloud')
#(examples,labels,metadata)=arffwrite_real(filename, numpoint, numfeat, fracpos, width)
#if len(sys.argv)>=8:
# plots.plotcloud(pointcloud,sys.argv[7],metadata)
else:
print 'Unknown option %s\n' % sys.argv[1]
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from tarfile import TarFile, TarInfo
from matplotlib import pylab as pl
import numpy as n
import getopt as opt
from python_util.util import *
from math import sqrt, ceil, floor
from python_util.gpumodel import IGPUModel
import random as r
import numpy.random as nr
from convnet import ConvNet
from python_util.options import *
from PIL import Image
from time import sleep
class ShowNetError(Exception):
pass
class ShowConvNet(ConvNet):
def __init__(self, op, load_dic):
ConvNet.__init__(self, op, load_dic)
def init_data_providers(self):
self.need_gpu = self.op.get_value('show_preds')
class Dummy:
def advance_batch(self):
pass
if self.need_gpu:
ConvNet.init_data_providers(self)
else:
self.train_data_provider = self.test_data_provider = Dummy()
def import_model(self):
if self.need_gpu:
ConvNet.import_model(self)
def init_model_state(self):
if self.op.get_value('show_preds'):
self.softmax_name = self.op.get_value('show_preds')
def init_model_lib(self):
if self.need_gpu:
ConvNet.init_model_lib(self)
def plot_cost(self):
if self.show_cost not in self.train_outputs[0][0]:
raise ShowNetError("Cost function with name '%s' not defined by given convnet." % self.show_cost)
# print self.test_outputs
train_errors = [eval(self.layers[self.show_cost]['outputFilter'])(o[0][self.show_cost], o[1])[self.cost_idx] for o in self.train_outputs]
test_errors = [eval(self.layers[self.show_cost]['outputFilter'])(o[0][self.show_cost], o[1])[self.cost_idx] for o in self.test_outputs]
if self.smooth_test_errors:
test_errors = [sum(test_errors[max(0,i-len(self.test_batch_range)):i])/(i-max(0,i-len(self.test_batch_range))) for i in xrange(1,len(test_errors)+1)]
numbatches = len(self.train_batch_range)
test_errors = n.row_stack(test_errors)
test_errors = n.tile(test_errors, (1, self.testing_freq))
test_errors = list(test_errors.flatten())
test_errors += [test_errors[-1]] * max(0,len(train_errors) - len(test_errors))
test_errors = test_errors[:len(train_errors)]
numepochs = len(train_errors) / float(numbatches)
pl.figure(1)
x = range(0, len(train_errors))
pl.plot(x, train_errors, 'k-', label='Training set')
pl.plot(x, test_errors, 'r-', label='Test set')
pl.legend()
ticklocs = range(numbatches, len(train_errors) - len(train_errors) % numbatches + 1, numbatches)
epoch_label_gran = int(ceil(numepochs / 20.))
epoch_label_gran = int(ceil(float(epoch_label_gran) / 10) * 10) if numepochs >= 10 else epoch_label_gran
ticklabels = map(lambda x: str((x[1] / numbatches)) if x[0] % epoch_label_gran == epoch_label_gran-1 else '', enumerate(ticklocs))
pl.xticks(ticklocs, ticklabels)
pl.xlabel('Epoch')
# pl.ylabel(self.show_cost)
pl.title('%s[%d]' % (self.show_cost, self.cost_idx))
# print "plotted cost"
def make_filter_fig(self, filters, filter_start, fignum, _title, num_filters, combine_chans, FILTERS_PER_ROW=16):
MAX_ROWS = 24
MAX_FILTERS = FILTERS_PER_ROW * MAX_ROWS
num_colors = filters.shape[0]
f_per_row = int(ceil(FILTERS_PER_ROW / float(1 if combine_chans else num_colors)))
filter_end = min(filter_start+MAX_FILTERS, num_filters)
filter_rows = int(ceil(float(filter_end - filter_start) / f_per_row))
filter_pixels = filters.shape[1]
filter_size = int(sqrt(filters.shape[1]))
fig = pl.figure(fignum)
fig.text(.5, .95, '%s %dx%d filters %d-%d' % (_title, filter_size, filter_size, filter_start, filter_end-1), horizontalalignment='center')
num_filters = filter_end - filter_start
if not combine_chans:
bigpic = n.zeros((filter_size * filter_rows + filter_rows + 1, filter_size*num_colors * f_per_row + f_per_row + 1), dtype=n.single)
else:
bigpic = n.zeros((3, filter_size * filter_rows + filter_rows + 1, filter_size * f_per_row + f_per_row + 1), dtype=n.single)
for m in xrange(filter_start,filter_end ):
filter = filters[:,:,m]
y, x = (m - filter_start) / f_per_row, (m - filter_start) % f_per_row
if not combine_chans:
for c in xrange(num_colors):
filter_pic = filter[c,:].reshape((filter_size,filter_size))
bigpic[1 + (1 + filter_size) * y:1 + (1 + filter_size) * y + filter_size,
1 + (1 + filter_size*num_colors) * x + filter_size*c:1 + (1 + filter_size*num_colors) * x + filter_size*(c+1)] = filter_pic
else:
filter_pic = filter.reshape((3, filter_size,filter_size))
bigpic[:,
1 + (1 + filter_size) * y:1 + (1 + filter_size) * y + filter_size,
1 + (1 + filter_size) * x:1 + (1 + filter_size) * x + filter_size] = filter_pic
pl.xticks([])
pl.yticks([])
if not combine_chans:
pl.imshow(bigpic, cmap=pl.cm.gray, interpolation='nearest')
else:
bigpic = bigpic.swapaxes(0,2).swapaxes(0,1)
pl.imshow(bigpic, interpolation='nearest')
def plot_filters(self):
FILTERS_PER_ROW = 16
filter_start = 0 # First filter to show
if self.show_filters not in self.layers:
raise ShowNetError("Layer with name '%s' not defined by given convnet." % self.show_filters)
layer = self.layers[self.show_filters]
filters = layer['weights'][self.input_idx]
# filters = filters - filters.min()
# filters = filters / filters.max()
if layer['type'] == 'fc': # Fully-connected layer
num_filters = layer['outputs']
channels = self.channels
filters = filters.reshape(channels, filters.shape[0]/channels, filters.shape[1])
elif layer['type'] in ('conv', 'local'): # Conv layer
num_filters = layer['filters']
channels = layer['filterChannels'][self.input_idx]
if layer['type'] == 'local':
filters = filters.reshape((layer['modules'], channels, layer['filterPixels'][self.input_idx], num_filters))
filters = filters[:, :, :, self.local_plane] # first map for now (modules, channels, pixels)
filters = filters.swapaxes(0,2).swapaxes(0,1)
num_filters = layer['modules']
# filters = filters.swapaxes(0,1).reshape(channels * layer['filterPixels'][self.input_idx], num_filters * layer['modules'])
# num_filters *= layer['modules']
FILTERS_PER_ROW = layer['modulesX']
else:
filters = filters.reshape(channels, filters.shape[0]/channels, filters.shape[1])
# Convert YUV filters to RGB
if self.yuv_to_rgb and channels == 3:
R = filters[0,:,:] + 1.28033 * filters[2,:,:]
G = filters[0,:,:] + -0.21482 * filters[1,:,:] + -0.38059 * filters[2,:,:]
B = filters[0,:,:] + 2.12798 * filters[1,:,:]
filters[0,:,:], filters[1,:,:], filters[2,:,:] = R, G, B
combine_chans = not self.no_rgb and channels == 3
# Make sure you don't modify the backing array itself here -- so no -= or /=
if self.norm_filters:
#print filters.shape
filters = filters - n.tile(filters.reshape((filters.shape[0] * filters.shape[1], filters.shape[2])).mean(axis=0).reshape(1, 1, filters.shape[2]), (filters.shape[0], filters.shape[1], 1))
filters = filters / n.sqrt(n.tile(filters.reshape((filters.shape[0] * filters.shape[1], filters.shape[2])).var(axis=0).reshape(1, 1, filters.shape[2]), (filters.shape[0], filters.shape[1], 1)))
#filters = filters - n.tile(filters.min(axis=0).min(axis=0), (3, filters.shape[1], 1))
#filters = filters / n.tile(filters.max(axis=0).max(axis=0), (3, filters.shape[1], 1))
#else:
filters = filters - filters.min()
filters = filters / filters.max()
self.make_filter_fig(filters, filter_start, 2, 'Layer %s' % self.show_filters, num_filters, combine_chans, FILTERS_PER_ROW=FILTERS_PER_ROW)
def plot_predictions(self):
epoch, batch, data = self.get_next_batch(train=False) # get a test batch
num_classes = self.test_data_provider.get_num_classes()
NUM_ROWS = 2
NUM_COLS = 4
NUM_IMGS = NUM_ROWS * NUM_COLS if not self.save_preds else data[0].shape[1]
NUM_TOP_CLASSES = min(num_classes, 5) # show this many top labels
NUM_OUTPUTS = self.model_state['layers'][self.softmax_name]['outputs']
PRED_IDX = 1
label_names = [lab.split(',')[0] for lab in self.test_data_provider.batch_meta['label_names']]
if self.only_errors:
preds = n.zeros((data[0].shape[1], NUM_OUTPUTS), dtype=n.single)
else:
preds = n.zeros((NUM_IMGS, NUM_OUTPUTS), dtype=n.single)
#rand_idx = nr.permutation(n.r_[n.arange(1), n.where(data[1] == 552)[1], n.where(data[1] == 795)[1], n.where(data[1] == 449)[1], n.where(data[1] == 274)[1]])[:NUM_IMGS]
rand_idx = nr.randint(0, data[0].shape[1], NUM_IMGS)
if NUM_IMGS < data[0].shape[1]:
data = [n.require(d[:,rand_idx], requirements='C') for d in data]
# data += [preds]
# Run the model
print [d.shape for d in data], preds.shape
self.libmodel.startFeatureWriter(data, [preds], [self.softmax_name])
IGPUModel.finish_batch(self)
print preds
data[0] = self.test_data_provider.get_plottable_data(data[0])
if self.save_preds:
if not gfile.Exists(self.save_preds):
gfile.MakeDirs(self.save_preds)
preds_thresh = preds > 0.5 # Binarize predictions
data[0] = data[0] * 255.0
data[0][data[0]<0] = 0
data[0][data[0]>255] = 255
data[0] = n.require(data[0], dtype=n.uint8)
dir_name = '%s_predictions_batch_%d' % (os.path.basename(self.save_file), batch)
tar_name = os.path.join(self.save_preds, '%s.tar' % dir_name)
tfo = gfile.GFile(tar_name, "w")
tf = TarFile(fileobj=tfo, mode='w')
for img_idx in xrange(NUM_IMGS):
img = data[0][img_idx,:,:,:]
imsave = Image.fromarray(img)
prefix = "CORRECT" if data[1][0,img_idx] == preds_thresh[img_idx,PRED_IDX] else "FALSE_POS" if preds_thresh[img_idx,PRED_IDX] == 1 else "FALSE_NEG"
file_name = "%s_%.2f_%d_%05d_%d.png" % (prefix, preds[img_idx,PRED_IDX], batch, img_idx, data[1][0,img_idx])
# gf = gfile.GFile(file_name, "w")
file_string = StringIO()
imsave.save(file_string, "PNG")
tarinf = TarInfo(os.path.join(dir_name, file_name))
tarinf.size = file_string.tell()
file_string.seek(0)
tf.addfile(tarinf, file_string)
tf.close()
tfo.close()
# gf.close()
print "Wrote %d prediction PNGs to %s" % (preds.shape[0], tar_name)
else:
fig = pl.figure(3, figsize=(12,9))
fig.text(.4, .95, '%s test samples' % ('Mistaken' if self.only_errors else 'Random'))
if self.only_errors:
# what the net got wrong
if NUM_OUTPUTS > 1:
err_idx = [i for i,p in enumerate(preds.argmax(axis=1)) if p not in n.where(data[2][:,i] > 0)[0]]
else:
err_idx = n.where(data[1][0,:] != preds[:,0].T)[0]
print err_idx
err_idx = r.sample(err_idx, min(len(err_idx), NUM_IMGS))
data[0], data[1], preds = data[0][:,err_idx], data[1][:,err_idx], preds[err_idx,:]
import matplotlib.gridspec as gridspec
import matplotlib.colors as colors
cconv = colors.ColorConverter()
gs = gridspec.GridSpec(NUM_ROWS*2, NUM_COLS,
width_ratios=[1]*NUM_COLS, height_ratios=[2,1]*NUM_ROWS )
#print data[1]
for row in xrange(NUM_ROWS):
for col in xrange(NUM_COLS):
img_idx = row * NUM_COLS + col
if data[0].shape[0] <= img_idx:
break
pl.subplot(gs[(row * 2) * NUM_COLS + col])
#pl.subplot(NUM_ROWS*2, NUM_COLS, row * 2 * NUM_COLS + col + 1)
pl.xticks([])
pl.yticks([])
img = data[0][img_idx,:,:,:]
pl.imshow(img, interpolation='lanczos')
show_title = data[1].shape[0] == 1
true_label = [int(data[1][0,img_idx])] if show_title else n.where(data[1][:,img_idx]==1)[0]
#print true_label
#print preds[img_idx,:].shape
#print preds[img_idx,:].max()
true_label_names = [label_names[i] for i in true_label]
img_labels = sorted(zip(preds[img_idx,:], label_names), key=lambda x: x[0])[-NUM_TOP_CLASSES:]
#print img_labels
axes = pl.subplot(gs[(row * 2 + 1) * NUM_COLS + col])
height = 0.5
ylocs = n.array(range(NUM_TOP_CLASSES))*height
pl.barh(ylocs, [l[0] for l in img_labels], height=height, \
color=['#ffaaaa' if l[1] in true_label_names else '#aaaaff' for l in img_labels])
#pl.title(", ".join(true_labels))
if show_title:
pl.title(", ".join(true_label_names), fontsize=15, fontweight='bold')
else:
print true_label_names
pl.yticks(ylocs + height/2, [l[1] for l in img_labels], x=1, backgroundcolor=cconv.to_rgba('0.65', alpha=0.5), weight='bold')
for line in enumerate(axes.get_yticklines()):
line[1].set_visible(False)
#pl.xticks([width], [''])
#pl.yticks([])
pl.xticks([])
pl.ylim(0, ylocs[-1] + height)
pl.xlim(0, 1)
def start(self):
self.op.print_values()
# print self.show_cost
if self.show_cost:
self.plot_cost()
if self.show_filters:
self.plot_filters()
if self.show_preds:
self.plot_predictions()
if pl:
pl.show()
sys.exit(0)
@classmethod
def get_options_parser(cls):
op = ConvNet.get_options_parser()
for option in list(op.options):
if option not in ('gpu', 'load_file', 'inner_size', 'train_batch_range', 'test_batch_range', 'multiview_test', 'data_path', 'pca_noise', 'scalar_mean'):
op.delete_option(option)
op.add_option("show-cost", "show_cost", StringOptionParser, "Show specified objective function", default="")
op.add_option("show-filters", "show_filters", StringOptionParser, "Show learned filters in specified layer", default="")
op.add_option("norm-filters", "norm_filters", BooleanOptionParser, "Individually normalize filters shown with --show-filters", default=0)
op.add_option("input-idx", "input_idx", IntegerOptionParser, "Input index for layer given to --show-filters", default=0)
op.add_option("cost-idx", "cost_idx", IntegerOptionParser, "Cost function return value index for --show-cost", default=0)
op.add_option("no-rgb", "no_rgb", BooleanOptionParser, "Don't combine filter channels into RGB in layer given to --show-filters", default=False)
op.add_option("yuv-to-rgb", "yuv_to_rgb", BooleanOptionParser, "Convert RGB filters to YUV in layer given to --show-filters", default=False)
op.add_option("channels", "channels", IntegerOptionParser, "Number of channels in layer given to --show-filters (fully-connected layers only)", default=0)
op.add_option("show-preds", "show_preds", StringOptionParser, "Show predictions made by given softmax on test set", default="")
op.add_option("save-preds", "save_preds", StringOptionParser, "Save predictions to given path instead of showing them", default="")
op.add_option("only-errors", "only_errors", BooleanOptionParser, "Show only mistaken predictions (to be used with --show-preds)", default=False, requires=['show_preds'])
op.add_option("local-plane", "local_plane", IntegerOptionParser, "Local plane to show", default=0)
op.add_option("smooth-test-errors", "smooth_test_errors", BooleanOptionParser, "Use running average for test error plot?", default=1)
op.options['load_file'].default = None
return op
if __name__ == "__main__":
#nr.seed(6)
try:
op = ShowConvNet.get_options_parser()
op, load_dic = IGPUModel.parse_options(op)
model = ShowConvNet(op, load_dic)
model.start()
except (UnpickleError, ShowNetError, opt.GetoptError), e:
print "----------------"
print "Error:"
print e
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright 2016 The Cockroach Authors.
//
// Use of this software is governed by the CockroachDB Software License
// included in the /LICENSE file.
package bench
import (
"context"
"fmt"
"math/rand"
"net"
"net/url"
"os/exec"
"testing"
"time"
"github.com/cockroachdb/cockroach/pkg/base"
"github.com/cockroachdb/cockroach/pkg/security/username"
"github.com/cockroachdb/cockroach/pkg/testutils/pgurlutils"
"github.com/cockroachdb/cockroach/pkg/testutils/serverutils"
"github.com/cockroachdb/cockroach/pkg/testutils/skip"
"github.com/cockroachdb/cockroach/pkg/testutils/sqlutils"
"github.com/cockroachdb/cockroach/pkg/util/log"
"github.com/cockroachdb/cockroach/pkg/util/retry"
)
// Tests a batch of queries very similar to those that that PGBench runs
// in its TPC-B(ish) mode.
func BenchmarkPgbenchQuery(b *testing.B) {
defer log.Scope(b).Close(b)
ForEachDB(b, func(b *testing.B, db *sqlutils.SQLRunner) {
if err := SetupBenchDB(db.DB, 20000, true /*quiet*/); err != nil {
b.Fatal(err)
}
src := rand.New(rand.NewSource(5432))
b.ResetTimer()
for i := 0; i < b.N; i++ {
if err := RunOne(db.DB, src, 20000); err != nil {
b.Fatal(err)
}
}
b.StopTimer()
})
}
// Tests a batch of queries very similar to those that that PGBench runs
// in its TPC-B(ish) mode.
func BenchmarkPgbenchQueryParallel(b *testing.B) {
defer log.Scope(b).Close(b)
ForEachDB(b, func(b *testing.B, db *sqlutils.SQLRunner) {
if err := SetupBenchDB(db.DB, 20000, true /*quiet*/); err != nil {
b.Fatal(err)
}
retryOpts := retry.Options{
InitialBackoff: 1 * time.Millisecond,
MaxBackoff: 200 * time.Millisecond,
Multiplier: 2,
}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
src := rand.New(rand.NewSource(5432))
r := retry.Start(retryOpts)
var err error
for pb.Next() {
r.Reset()
for r.Next() {
err = RunOne(db.DB, src, 20000)
if err == nil {
break
}
}
if err != nil {
b.Fatal(err)
}
}
})
b.StopTimer()
})
}
func execPgbench(b *testing.B, pgURL url.URL) {
if _, err := exec.LookPath("pgbench"); err != nil {
skip.IgnoreLint(b, "pgbench is not available on PATH")
}
c, err := SetupExec(pgURL, "bench", 20000, b.N)
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
out, err := c.CombinedOutput()
if testing.Verbose() || err != nil {
fmt.Println(string(out))
}
if err != nil {
b.Log(c)
b.Fatal(err)
}
b.StopTimer()
}
func BenchmarkPgbenchExec(b *testing.B) {
defer log.Scope(b).Close(b)
b.Run("Cockroach", func(b *testing.B) {
s := serverutils.StartServerOnly(b, base.TestServerArgs{Insecure: true})
defer s.Stopper().Stop(context.Background())
pgURL, cleanupFn := pgurlutils.PGUrl(
b, s.AdvSQLAddr(), "benchmarkCockroach", url.User(username.RootUser))
pgURL.RawQuery = "sslmode=disable"
defer cleanupFn()
execPgbench(b, pgURL)
})
b.Run("Postgres", func(b *testing.B) {
pgURL := url.URL{
Scheme: "postgres",
Host: "localhost:5432",
RawQuery: "sslmode=disable&dbname=postgres",
}
if conn, err := net.Dial("tcp", pgURL.Host); err != nil {
skip.IgnoreLintf(b, "unable to connect to postgres server on %s: %s", pgURL.Host, err)
} else {
conn.Close()
}
execPgbench(b, pgURL)
})
}
|
go
|
github
|
https://github.com/cockroachdb/cockroach
|
pkg/bench/pgbench_test.go
|
from lamson import view, encoding, queue
from config import settings
def mail_to_you_is_bouncing(message):
reason = message.bounce.error_for_humans()
msg = view.respond(locals(), 'mail/you_bounced.msg',
From='unbounce@librelist.com',
To=message.bounce.original['to'],
Subject="Email to you is bouncing.")
if message.bounce.report:
for report in message.bounce.report:
msg.attach('bounce_report.msg', content_type='text/plain', data=encoding.to_string(report),
disposition='attachment')
if message.bounce.notification:
msg.attach('notification_report.msg', content_type='text/plain',
data=encoding.to_string(message.bounce.notification),
disposition='attachment')
return msg
def you_are_now_unbounced(message):
msg = view.respond(locals(), 'mail/you_are_unbounced.msg',
From='noreply@librelist.com',
To=message['from'],
Subject="You are now unbounced.")
return msg
def archive_bounce(message):
qu = queue.Queue(settings.BOUNCE_ARCHIVE)
qu.push(message)
|
unknown
|
codeparrot/codeparrot-clean
| ||
from django.contrib.webdesign.lorem_ipsum import words, paragraphs
from django import template
register = template.Library()
class LoremNode(template.Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == 'w':
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == 'p':
paras = ['<p>%s</p>' % p for p in paras]
return u'\n\n'.join(paras)
@register.tag
def lorem(parser, token):
"""
Creates random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` will output the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` will output the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` will output two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != 'random'
if not common:
bits.pop()
# Method bit
if bits[-1] in ('w', 'p', 'b'):
method = bits.pop()
else:
method = 'b'
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = '1'
count = parser.compile_filter(count)
if len(bits) != 1:
raise template.TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package stackruntime
import (
"context"
"encoding/json"
"fmt"
"path"
"path/filepath"
"sort"
"strings"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty-debug/ctydebug"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/internal/checks"
"github.com/hashicorp/terraform/internal/depsfile"
"github.com/hashicorp/terraform/internal/getproviders/providerreqs"
"github.com/hashicorp/terraform/internal/stacks/stackruntime/hooks"
"github.com/hashicorp/terraform/internal/addrs"
terraformProvider "github.com/hashicorp/terraform/internal/builtin/providers/terraform"
"github.com/hashicorp/terraform/internal/collections"
"github.com/hashicorp/terraform/internal/configs/configschema"
"github.com/hashicorp/terraform/internal/lang/marks"
"github.com/hashicorp/terraform/internal/plans"
"github.com/hashicorp/terraform/internal/providers"
default_testing_provider "github.com/hashicorp/terraform/internal/providers/testing"
"github.com/hashicorp/terraform/internal/stacks/stackaddrs"
"github.com/hashicorp/terraform/internal/stacks/stackplan"
"github.com/hashicorp/terraform/internal/stacks/stackruntime/internal/stackeval"
stacks_testing_provider "github.com/hashicorp/terraform/internal/stacks/stackruntime/testing"
"github.com/hashicorp/terraform/internal/stacks/stackstate"
"github.com/hashicorp/terraform/internal/states"
"github.com/hashicorp/terraform/internal/tfdiags"
"github.com/hashicorp/terraform/version"
)
// TestPlan_valid runs the same set of configurations as TestValidate_valid.
//
// Plan should execute the same set of validations as validate, so we expect
// all of the following to be valid for both plan and validate.
//
// We also want to make sure the static and dynamic evaluations are not
// returning duplicate / conflicting diagnostics. This test will tell us if
// either plan or validate is reporting diagnostics the others are missing.
func TestPlan_valid(t *testing.T) {
for name, tc := range validConfigurations {
t.Run(name, func(t *testing.T) {
if tc.skip {
// We've added this test before the implementation was ready.
t.SkipNow()
}
ctx := context.Background()
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
lock.SetProvider(
addrs.NewDefaultProvider("other"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
testContext := TestContext{
config: loadMainBundleConfigForTest(t, name),
providers: map[addrs.Provider]providers.Factory{
// We support both hashicorp/testing and
// terraform.io/builtin/testing as providers. This lets us
// test the provider aliasing feature. Both providers
// support the same set of resources and data sources.
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
addrs.NewBuiltInProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
// We also support an "other" provider out of the box to
// test the provider aliasing feature.
addrs.NewDefaultProvider("other"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
dependencyLocks: *lock,
timestamp: &fakePlanTimestamp,
}
cycle := TestCycle{
planInputs: tc.planInputVars,
wantPlannedChanges: nil, // don't care about the planned changes in this test.
wantPlannedDiags: nil, // should return no diagnostics.
}
testContext.Plan(t, ctx, nil, cycle)
})
}
}
// TestPlan_invalid runs the same set of configurations as TestValidate_invalid.
//
// Plan should execute the same set of validations as validate, so we expect
// all of the following to be invalid for both plan and validate.
//
// We also want to make sure the static and dynamic evaluations are not
// returning duplicate / conflicting diagnostics. This test will tell us if
// either plan or validate is reporting diagnostics the others are missing.
//
// The dynamic validation that happens during the plan *might* introduce
// additional diagnostics that are not present in the static validation. These
// should be added manually into this function.
func TestPlan_invalid(t *testing.T) {
for name, tc := range invalidConfigurations {
t.Run(name, func(t *testing.T) {
if tc.skip {
// We've added this test before the implementation was ready.
t.SkipNow()
}
ctx := context.Background()
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
testContext := TestContext{
config: loadMainBundleConfigForTest(t, name),
providers: map[addrs.Provider]providers.Factory{
// We support both hashicorp/testing and
// terraform.io/builtin/testing as providers. This lets us
// test the provider aliasing feature. Both providers
// support the same set of resources and data sources.
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
addrs.NewBuiltInProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
dependencyLocks: *lock,
timestamp: &fakePlanTimestamp,
}
cycle := TestCycle{
planInputs: tc.planInputVars,
wantPlannedChanges: nil, // don't care about the planned changes in this test.
wantPlannedDiags: tc.diags(),
}
testContext.Plan(t, ctx, nil, cycle)
})
}
}
// TestPlan uses a generic framework for running plan integration tests
// against Stacks. Generally, new tests should be added into this function
// rather than copying the large amount of duplicate code from the other
// tests in this file.
//
// If you are editing other tests in this file, please consider moving them
// into this test function so they can reuse the shared setup and boilerplate
// code managing the boring parts of the test.
func TestPlan(t *testing.T) {
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
tcs := map[string]struct {
path string
state *stackstate.State
store *stacks_testing_provider.ResourceStore
cycle TestCycle
}{
"empty-destroy-with-data-source": {
path: path.Join("with-data-source", "dependent"),
cycle: TestCycle{
planMode: plans.DestroyMode,
planInputs: map[string]cty.Value{
"id": cty.StringVal("foo"),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.data"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Delete,
Mode: plans.DestroyMode,
RequiredComponents: collections.NewSet(mustAbsComponent("component.self")),
PlannedOutputValues: make(map[string]cty.Value),
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self"),
PlanComplete: true,
PlanApplyable: true,
Action: plans.Delete,
Mode: plans.DestroyMode,
PlannedOutputValues: map[string]cty.Value{
"id": cty.StringVal("foo"),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: mustStackInputVariable("id"),
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("foo"),
DeleteOnApply: true,
},
},
},
},
"deferred-provider-with-write-only": {
path: "with-write-only-attribute",
cycle: TestCycle{
planInputs: map[string]cty.Value{
"providers": cty.UnknownVal(cty.Set(cty.String)),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.main"),
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"datasource_id": mustPlanDynamicValueDynamicType(cty.StringVal("datasource")),
"resource_id": mustPlanDynamicValueDynamicType(cty.StringVal("resource")),
"write_only_input": mustPlanDynamicValueDynamicType(cty.StringVal("secret")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"datasource_id": nil,
"resource_id": nil,
"write_only_input": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.main.data.testing_write_only_data_source.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("data.testing_write_only_data_source.data"),
PrevRunAddr: mustAbsResourceInstance("data.testing_write_only_data_source.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Read,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("datasource"),
"value": cty.UnknownVal(cty.String),
"write_only": cty.NullVal(cty.String),
})),
AfterSensitivePaths: []cty.Path{
cty.GetAttrPath("write_only"),
},
},
ActionReason: plans.ResourceInstanceReadBecauseDependencyPending,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.WriteOnlyDataSourceSchema,
},
DeferredReason: providers.DeferredReasonProviderConfigUnknown,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.main.testing_write_only_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_write_only_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_write_only_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("resource"),
"value": cty.UnknownVal(cty.String),
"write_only": cty.NullVal(cty.String),
})),
AfterSensitivePaths: []cty.Path{
cty.GetAttrPath("write_only"),
},
},
},
PriorStateSrc: nil,
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.WriteOnlyResourceSchema,
},
DeferredReason: providers.DeferredReasonProviderConfigUnknown,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: mustStackInputVariable("providers"),
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
},
},
},
"deferred-provider-with-data-sources": {
path: path.Join("with-data-source", "deferred-provider-for-each"),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("data_known", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("data_known"),
"value": cty.StringVal("known"),
})).
Build(),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"providers": cty.UnknownVal(cty.Set(cty.String)),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.const"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("data_known")),
"resource": mustPlanDynamicValueDynamicType(cty.StringVal("resource_known")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"resource": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.const.data.testing_data_source.data"),
ChangeSrc: nil,
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "data_known",
"value": "known",
}),
Status: states.ObjectReady,
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingDataSourceSchema,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.const.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("resource_known"),
"value": cty.StringVal("known"),
})),
},
},
PriorStateSrc: nil,
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.main[*]"),
PlanApplyable: false, // only deferred changes
PlanComplete: false, // deferred
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("data_unknown")),
"resource": mustPlanDynamicValueDynamicType(cty.StringVal("resource_unknown")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"resource": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.AbsComponentInstance{
Item: stackaddrs.ComponentInstance{
Component: stackaddrs.Component{
Name: "main",
},
Key: addrs.WildcardKey,
},
},
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: mustAbsResourceInstance("data.testing_data_source.data"),
},
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("data.testing_data_source.data"),
PrevRunAddr: mustAbsResourceInstance("data.testing_data_source.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Read,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("data_unknown"),
"value": cty.UnknownVal(cty.String),
})),
},
ActionReason: plans.ResourceInstanceReadBecauseDependencyPending,
},
PriorStateSrc: nil,
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingDataSourceSchema,
},
DeferredReason: providers.DeferredReasonProviderConfigUnknown,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.AbsComponentInstance{
Item: stackaddrs.ComponentInstance{
Component: stackaddrs.Component{
Name: "main",
},
Key: addrs.WildcardKey,
},
},
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: mustAbsResourceInstance("testing_resource.data"),
},
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("resource_unknown"),
"value": cty.UnknownVal(cty.String),
})),
},
},
PriorStateSrc: nil,
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonProviderConfigUnknown,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: mustStackInputVariable("providers"),
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
},
},
},
"removed embedded component duplicate": {
path: filepath.Join("with-single-input", "removed-component-from-stack-dynamic"),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"for_each_input": cty.MapVal(map[string]cty.Value{
"foo": cty.StringVal("bar"),
}),
"simple_input": cty.MapVal(map[string]cty.Value{
"foo": cty.StringVal("bar"),
}),
"for_each_removed": cty.SetVal([]cty.Value{
cty.StringVal("foo"),
}),
"simple_removed": cty.SetVal([]cty.Value{
cty.StringVal("foo"),
}),
},
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot remove component instance",
Detail: "The component instance stack.for_each.component.self[\"foo\"] is targeted by a component block and cannot be removed. The relevant component is defined at git::https://example.com/test.git//with-single-input/for-each-component/for-each-component.tfcomponent.hcl:15,1-17.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/removed-component-from-stack-dynamic/removed-component-from-stack-dynamic.tfcomponent.hcl",
Start: hcl.Pos{Line: 38, Column: 1, Byte: 505},
End: hcl.Pos{Line: 38, Column: 8, Byte: 512},
},
})
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot remove component instance",
Detail: "The component instance stack.simple[\"foo\"].component.self is targeted by a component block and cannot be removed. The relevant component is defined at git::https://example.com/test.git//with-single-input/valid/valid.tfcomponent.hcl:19,1-17.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/removed-component-from-stack-dynamic/removed-component-from-stack-dynamic.tfcomponent.hcl",
Start: hcl.Pos{Line: 60, Column: 1, Byte: 811},
End: hcl.Pos{Line: 60, Column: 8, Byte: 818},
},
})
return diags
}),
},
},
"deferred-embedded-stack-update": {
path: path.Join("with-single-input", "deferred-embedded-stack-for-each"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.a[\"deferred\"].component.self")).
AddInputVariable("id", cty.StringVal("deferred")).
AddInputVariable("input", cty.StringVal("deferred"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.a[\"deferred\"].component.self.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "deferred",
"value": "deferred",
}),
})).
AddInput("stacks", cty.MapVal(map[string]cty.Value{
"deferred": cty.StringVal("deferred"),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("deferred", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("deferred"),
"value": cty.StringVal("deferred"),
})).
Build(),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"stacks": cty.UnknownVal(cty.Map(cty.String)),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("a", addrs.StringKey("deferred")),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
PlanApplyable: false, // Everything is deferred, so nothing to apply.
PlanComplete: false,
Action: plans.Update,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("deferred")),
"input": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
},
PlannedOutputValues: map[string]cty.Value{},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
DeferredReason: providers.DeferredReasonDeferredPrereq,
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("a", addrs.StringKey("deferred")),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Update,
Before: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("deferred"),
"value": cty.StringVal("deferred"),
}), stacks_testing_provider.TestingResourceSchema.Body),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("deferred"),
"value": cty.UnknownVal(cty.String),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: nil,
},
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "deferred",
"value": "deferred",
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "stacks"},
Action: plans.Update,
Before: cty.MapVal(map[string]cty.Value{
"deferred": cty.StringVal("deferred"),
}),
After: cty.UnknownVal(cty.Map(cty.String)),
},
},
},
},
"deferred-embedded-stack-create": {
path: path.Join("with-single-input", "deferred-embedded-stack-for-each"),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"stacks": cty.UnknownVal(cty.Map(cty.String)),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("a", addrs.WildcardKey),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
PlanApplyable: false, // Everything is deferred, so nothing to apply.
PlanComplete: false,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
},
PlannedOutputValues: map[string]cty.Value{},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
DeferredReason: providers.DeferredReasonDeferredPrereq,
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("a", addrs.WildcardKey),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.UnknownVal(cty.String),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: nil,
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "stacks"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Map(cty.String)),
},
},
},
},
"deferred-embedded-stack-and-component-for-each": {
path: path.Join("with-single-input", "deferred-embedded-stack-and-component-for-each"),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"stacks": cty.UnknownVal(cty.Map(cty.Set(cty.String))),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("a", addrs.WildcardKey),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
Key: addrs.WildcardKey,
},
),
PlanApplyable: false, // Everything is deferred, so nothing to apply.
PlanComplete: false,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
},
PlannedOutputValues: map[string]cty.Value{},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
DeferredReason: providers.DeferredReasonDeferredPrereq,
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("a", addrs.WildcardKey),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
Key: addrs.WildcardKey,
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.UnknownVal(cty.String),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: nil,
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "stacks"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Map(cty.Set(cty.String))),
},
},
},
},
"removed block targets stack not in configuration or state": {
path: filepath.Join("with-single-input", "removed-stack-instance-dynamic"),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"input": cty.MapValEmpty(cty.String),
"removed": cty.MapVal(map[string]cty.Value{
"component": cty.StringVal("component"),
}),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.MapValEmpty(cty.String),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.MapVal(map[string]cty.Value{
"component": cty.StringVal("component"),
}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed-direct"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetValEmpty(cty.String),
},
},
},
},
"embedded stack in state but not in configuration": {
path: filepath.Join("with-single-input", "valid"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.child.component.self"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.child.component.self.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "leftover",
"value": "leftover",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("leftover", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("leftover"),
"value": cty.StringVal("leftover"),
})).
Build(),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"input": cty.StringVal("input"),
},
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Unclaimed component instance",
Detail: "The component instance stack.child.component.self is not claimed by any component or removed block in the configuration. Make sure it is instantiated by a component block, or targeted for removal by a removed block.",
})
}),
},
},
"removed and stack block target the same stack": {
path: filepath.Join("with-single-input", "removed-stack-instance-dynamic"),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"input": cty.MapVal(map[string]cty.Value{
"component": cty.StringVal("component"),
}),
"removed": cty.MapVal(map[string]cty.Value{
"component": cty.StringVal("component"),
}),
},
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot remove stack instance",
Detail: "The stack instance stack.simple[\"component\"] is targeted by an embedded stack block and cannot be removed. The relevant embedded stack is defined at git::https://example.com/test.git//with-single-input/removed-stack-instance-dynamic/removed-stack-instance-dynamic.tfcomponent.hcl:25,1-15.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/removed-stack-instance-dynamic/removed-stack-instance-dynamic.tfcomponent.hcl",
Start: hcl.Pos{Line: 36, Column: 1, Byte: 441},
End: hcl.Pos{Line: 36, Column: 8, Byte: 448},
},
})
}),
},
},
"removed targets stack block in embedded stack that exists": {
path: filepath.Join("with-single-input", "removed-stack-from-embedded-stack"),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"input": cty.MapVal(map[string]cty.Value{
"component": cty.MapVal(map[string]cty.Value{
"component": cty.StringVal("component"),
}),
}),
"removed": cty.MapVal(map[string]cty.Value{
"component": cty.MapVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"input": cty.StringVal("component"),
}),
}),
},
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot remove stack instance",
Detail: "The stack instance stack.embedded[\"component\"].stack.simple[\"component\"] is targeted by an embedded stack block and cannot be removed. The relevant embedded stack is defined at git::https://example.com/test.git//with-single-input/removed-stack-instance-dynamic/removed-stack-instance-dynamic.tfcomponent.hcl:25,1-15.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/removed-stack-from-embedded-stack/removed-stack-from-embedded-stack.tfcomponent.hcl",
Start: hcl.Pos{Line: 28, Column: 1, Byte: 360},
End: hcl.Pos{Line: 28, Column: 8, Byte: 367},
},
})
}),
},
},
"removed block targets component inside removed stack": {
path: filepath.Join("with-single-input", "removed-stack-instance-dynamic"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.simple[\"component\"].component.self")).
AddInputVariable("id", cty.StringVal("component")).
AddInputVariable("input", cty.StringVal("component"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.simple[\"component\"].component.self.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "component",
"value": "component",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("component", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"value": cty.StringVal("component"),
})).
Build(),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"removed": cty.MapVal(map[string]cty.Value{
"component": cty.StringVal("component"),
}),
"removed-direct": cty.SetVal([]cty.Value{
cty.StringVal("component"),
}),
},
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot remove component instance",
Detail: "The component instance stack.simple[\"component\"].component.self is targeted by a component block and cannot be removed. The relevant component is defined at git::https://example.com/test.git//with-single-input/valid/valid.tfcomponent.hcl:19,1-17.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/removed-stack-instance-dynamic/removed-stack-instance-dynamic.tfcomponent.hcl",
Start: hcl.Pos{Line: 51, Column: 1, Byte: 708},
End: hcl.Pos{Line: 51, Column: 8, Byte: 715},
},
})
}),
},
},
"removed block targets orphaned component": {
path: filepath.Join("with-single-input", "removed-component-from-stack-dynamic"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.simple[\"component\"].component.self")).
AddInputVariable("id", cty.StringVal("component")).
AddInputVariable("input", cty.StringVal("component"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.simple[\"component\"].component.self.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "component",
"value": "component",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("component", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"value": cty.StringVal("component"),
})).
Build(),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"simple_input": cty.MapValEmpty(cty.String),
"simple_removed": cty.SetVal([]cty.Value{
cty.StringVal("component"),
}),
},
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid removed block",
Detail: "The component instance stack.simple[\"component\"].component.self could not be removed. The linked removed block was not executed because the `from` attribute of the removed block targets a component or embedded stack within an orphaned embedded stack.\n\nIn order to remove an entire stack, update your removed block to target the entire removed stack itself instead of the specific elements within it.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/removed-component-from-stack-dynamic/removed-component-from-stack-dynamic.tfcomponent.hcl",
Start: hcl.Pos{Line: 60, Column: 1, Byte: 811},
End: hcl.Pos{Line: 60, Column: 8, Byte: 818},
},
})
}),
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: false,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "for_each_input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.MapValEmpty(cty.String),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "for_each_removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetValEmpty(cty.String),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "simple_input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.MapValEmpty(cty.String),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "simple_removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("component"),
}),
},
},
},
},
"removed block targets orphaned stack": {
path: filepath.Join("with-single-input", "removed-stack-from-embedded-stack"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.embedded[\"component\"].stack.simple[\"component\"].component.self")).
AddInputVariable("id", cty.StringVal("component")).
AddInputVariable("input", cty.StringVal("component"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.embedded[\"component\"].stack.simple[\"component\"].component.self.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "component",
"value": "component",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("component", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"value": cty.StringVal("component"),
})).
Build(),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"input": cty.MapValEmpty(cty.Map(cty.String)),
"removed": cty.MapVal(map[string]cty.Value{
"component": cty.MapVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"input": cty.StringVal("component"),
}),
}),
},
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid removed block",
Detail: "The component instance stack.embedded[\"component\"].stack.simple[\"component\"].component.self could not be removed. The linked removed block was not executed because the `from` attribute of the removed block targets a component or embedded stack within an orphaned embedded stack.\n\nIn order to remove an entire stack, update your removed block to target the entire removed stack itself instead of the specific elements within it.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/removed-stack-from-embedded-stack/removed-stack-from-embedded-stack.tfcomponent.hcl",
Start: hcl.Pos{Line: 28, Column: 1, Byte: 360},
End: hcl.Pos{Line: 28, Column: 8, Byte: 367},
},
})
}),
},
},
"removed block targets orphaned component without config definition": {
path: filepath.Join("with-single-input", "orphaned-component"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.embedded.component.self")).
AddInputVariable("id", cty.StringVal("component")).
AddInputVariable("input", cty.StringVal("component"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.embedded.component.self.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "component",
"value": "component",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("component", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"value": cty.StringVal("component"),
})).
Build(),
cycle: TestCycle{
wantPlannedDiags: initDiags(func(diags tfdiags.Diagnostics) tfdiags.Diagnostics {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid removed block",
Detail: "The component instance stack.embedded.component.self could not be removed. The linked removed block was not executed because the `from` attribute of the removed block targets a component or embedded stack within an orphaned embedded stack.\n\nIn order to remove an entire stack, update your removed block to target the entire removed stack itself instead of the specific elements within it.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//with-single-input/orphaned-component/orphaned-component.tfcomponent.hcl",
Start: hcl.Pos{Line: 10, Column: 1, Byte: 131},
End: hcl.Pos{Line: 10, Column: 8, Byte: 138},
},
})
}),
},
},
"unknown embedded stack with internal component targeted by concrete removed block": {
path: filepath.Join("with-single-input", "removed-stack-instance-dynamic"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.simple[\"component\"].component.self")).
AddInputVariable("id", cty.StringVal("component")).
AddInputVariable("input", cty.StringVal("component"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.simple[\"component\"].component.self.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "component",
"value": "component",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("component", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"value": cty.StringVal("component"),
})).
Build(),
cycle: TestCycle{
planInputs: map[string]cty.Value{
"removed": cty.UnknownVal(cty.Map(cty.String)),
},
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("stack.simple[\"component\"].component.self"),
Action: plans.Delete,
Mode: plans.DestroyMode,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("stack.simple[\"component\"].component.self.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Delete,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("component"),
"value": cty.StringVal("component"),
})),
After: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
},
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "component",
"value": "component",
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonDeferredPrereq,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.MapValEmpty(cty.String),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Map(cty.String)),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed-direct"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetValEmpty(cty.String),
},
},
},
},
"remove partial stack": {
path: filepath.Join("with-single-input", "multiple-components", "removed"),
state: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("stack.multiple.component.one")).
AddInputVariable("id", cty.StringVal("one")).
AddInputVariable("input", cty.StringVal("one"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("stack.multiple.component.one.testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "one",
"value": "one",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("one", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("one"),
"value": cty.StringVal("one"),
})).
Build(),
cycle: TestCycle{
wantPlannedChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("stack.multiple.component.one"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Delete,
Mode: plans.DestroyMode,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("one")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("one")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("stack.multiple.component.one.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Delete,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("one"),
"value": cty.StringVal("one"),
})),
After: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
},
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "one",
"value": "one",
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("stack.multiple.component.two"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Delete,
Mode: plans.DestroyMode,
PlannedOutputValues: make(map[string]cty.Value),
PlanTimestamp: fakePlanTimestamp,
},
},
},
},
}
for name, tc := range tcs {
t.Run(name, func(t *testing.T) {
ctx := context.Background()
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
store := tc.store
if store == nil {
store = stacks_testing_provider.NewResourceStore()
}
testContext := TestContext{
timestamp: &fakePlanTimestamp,
config: loadMainBundleConfigForTest(t, tc.path),
providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProviderWithData(t, store), nil
},
},
dependencyLocks: *lock,
}
testContext.Plan(t, ctx, tc.state, tc.cycle)
})
}
}
func TestPlanWithMissingInputVariable(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "plan-undeclared-variable-in-component")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewBuiltInProvider("terraform"): func() (providers.Interface, error) {
return terraformProvider.NewProvider(), nil
},
},
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
_, gotDiags := collectPlanOutput(changesCh, diagsCh)
// We'll normalize the diagnostics to be of consistent underlying type
// using ForRPC, so that we can easily diff them; we don't actually care
// about which underlying implementation is in use.
gotDiags = gotDiags.ForRPC()
var wantDiags tfdiags.Diagnostics
wantDiags = wantDiags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Reference to undeclared input variable",
Detail: `There is no variable "input" block declared in this stack.`,
Subject: &hcl.Range{
Filename: mainBundleSourceAddrStr("plan-undeclared-variable-in-component/undeclared-variable.tfcomponent.hcl"),
Start: hcl.Pos{Line: 17, Column: 13, Byte: 250},
End: hcl.Pos{Line: 17, Column: 22, Byte: 259},
},
})
wantDiags = wantDiags.ForRPC()
if diff := cmp.Diff(wantDiags, gotDiags); diff != "" {
t.Errorf("wrong diagnostics\n%s", diff)
}
}
func TestPlanWithNoValueForRequiredVariable(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "plan-no-value-for-required-variable")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewBuiltInProvider("terraform"): func() (providers.Interface, error) {
return terraformProvider.NewProvider(), nil
},
},
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
_, gotDiags := collectPlanOutput(changesCh, diagsCh)
// We'll normalize the diagnostics to be of consistent underlying type
// using ForRPC, so that we can easily diff them; we don't actually care
// about which underlying implementation is in use.
gotDiags = gotDiags.ForRPC()
var wantDiags tfdiags.Diagnostics
wantDiags = wantDiags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "No value for required variable",
Detail: `The root input variable "var.beep" is not set, and has no default value.`,
Subject: &hcl.Range{
Filename: mainBundleSourceAddrStr("plan-no-value-for-required-variable/unset-variable.tfcomponent.hcl"),
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 16, Byte: 15},
},
})
wantDiags = wantDiags.ForRPC()
if diff := cmp.Diff(wantDiags, gotDiags); diff != "" {
t.Errorf("wrong diagnostics\n%s", diff)
}
}
func TestPlanWithVariableDefaults(t *testing.T) {
// Test that defaults are applied correctly for both unspecified input
// variables and those with an explicit null value.
testCases := map[string]struct {
inputs map[stackaddrs.InputVariable]ExternalInputValue
}{
"unspecified": {
inputs: make(map[stackaddrs.InputVariable]ExternalInputValue),
},
"explicit null": {
inputs: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "beep"}: {
Value: cty.NullVal(cty.DynamicPseudoType),
DefRange: tfdiags.SourceRange{Filename: "fake.tfcomponent.hcl"},
},
},
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "plan-variable-defaults")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
InputValues: tc.inputs,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "beep"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("BEEP"),
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "defaulted"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("BOOP"),
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "specified"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("BEEP"),
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "beep",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("BEEP"),
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, ctydebug.CmpOptions); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
})
}
}
func TestPlanWithComplexVariableDefaults(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("complex-inputs"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange)
diagsCh := make(chan tfdiags.Diagnostic)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "optional"}: {
Value: cty.EmptyObjectVal, // This should be populated by defaults.
DefRange: tfdiags.SourceRange{},
},
},
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
changes, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Fatalf("unexpected diagnostics: %s", diags)
}
sort.SliceStable(changes, func(i, j int) bool {
return plannedChangeSortKey(changes[i]) < plannedChangeSortKey(changes[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self"),
PlanComplete: true,
PlanApplyable: true,
Action: plans.Create,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](),
PlannedInputValues: map[string]plans.DynamicValue{
"input": mustPlanDynamicValueDynamicType(cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("cec9bc39"),
"value": cty.StringVal("hello, mercury!"),
}),
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("78d8b3d7"),
"value": cty.StringVal("hello, venus!"),
}),
cty.ObjectVal(map[string]cty.Value{
"id": cty.NullVal(cty.String),
"value": cty.StringVal("hello, earth!"),
}),
})),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.data[0]"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data[0]"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data[0]"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("cec9bc39"),
"value": cty.StringVal("hello, mercury!"),
})),
},
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.data[1]"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data[1]"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data[1]"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("78d8b3d7"),
"value": cty.StringVal("hello, venus!"),
})),
},
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.data[2]"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data[2]"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data[2]"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("hello, earth!"),
})),
},
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("stack.child.component.parent"),
PlanComplete: true,
PlanApplyable: true,
Action: plans.Create,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](),
PlannedInputValues: map[string]plans.DynamicValue{
"input": mustPlanDynamicValueDynamicType(cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("cec9bc39"),
"value": cty.StringVal("hello, mercury!"),
}),
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("78d8b3d7"),
"value": cty.StringVal("hello, venus!"),
}),
cty.ObjectVal(map[string]cty.Value{
"id": cty.NullVal(cty.String),
"value": cty.StringVal("hello, earth!"),
}),
})),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("stack.child.component.parent.testing_resource.data[0]"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data[0]"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data[0]"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("cec9bc39"),
"value": cty.StringVal("hello, mercury!"),
})),
},
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("stack.child.component.parent.testing_resource.data[1]"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data[1]"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data[1]"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("78d8b3d7"),
"value": cty.StringVal("hello, venus!"),
})),
},
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("stack.child.component.parent.testing_resource.data[2]"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data[2]"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data[2]"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("hello, earth!"),
})),
},
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "default"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("cec9bc39"),
"value": cty.StringVal("hello, mercury!"),
}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "optional"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.ObjectVal(map[string]cty.Value{
"id": cty.NullVal(cty.String),
"value": cty.StringVal("hello, earth!"),
}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "optional_default"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("78d8b3d7"),
"value": cty.StringVal("hello, venus!"),
}),
},
}
if diff := cmp.Diff(wantChanges, changes, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithSingleResource(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "with-single-resource")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewBuiltInProvider("terraform"): func() (providers.Interface, error) {
return terraformProvider.NewProvider(), nil
},
},
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
// The order of emission for our planned changes is unspecified since it
// depends on how the various goroutines get scheduled, and so we'll
// arbitrarily sort gotChanges lexically by the name of the change type
// so that we have some dependable order to diff against below.
sort.Slice(gotChanges, func(i, j int) bool {
ic := gotChanges[i]
jc := gotChanges[j]
return fmt.Sprintf("%T", ic) < fmt.Sprintf("%T", jc)
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: map[string]cty.Value{
"input": cty.StringVal("hello"),
"output": cty.UnknownVal(cty.String),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "obj"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.ObjectVal(map[string]cty.Value{
"input": cty.StringVal("hello"),
"output": cty.UnknownVal(cty.String),
}),
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "terraform_data",
Name: "main",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("terraform.io/builtin/terraform"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "terraform_data",
Name: "main",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "terraform_data",
Name: "main",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.NewBuiltInProvider("terraform"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: plans.DynamicValue{
// This is an object conforming to the terraform_data
// resource type's schema.
//
// FIXME: Should write this a different way that is
// scrutable and won't break each time something gets
// added to the terraform_data schema. (We can't use
// mustPlanDynamicValue here because the resource type
// uses DynamicPseudoType attributes, which require
// explicitly-typed encoding.)
0x84, 0xa2, 0x69, 0x64, 0xc7, 0x03, 0x0c, 0x81,
0x01, 0xc2, 0xa5, 0x69, 0x6e, 0x70, 0x75, 0x74,
0x92, 0xc4, 0x08, 0x22, 0x73, 0x74, 0x72, 0x69,
0x6e, 0x67, 0x22, 0xa5, 0x68, 0x65, 0x6c, 0x6c,
0x6f, 0xa6, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74,
0x92, 0xc4, 0x08, 0x22, 0x73, 0x74, 0x72, 0x69,
0x6e, 0x67, 0x22, 0xd4, 0x00, 0x00, 0xb0, 0x74,
0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x73, 0x5f,
0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0xc0,
},
},
},
// The following is schema for the real terraform_data resource
// type from the real terraform.io/builtin/terraform provider
// maintained elsewhere in this codebase. If that schema changes
// in future then this should change to match it.
Schema: providers.Schema{
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"input": {Type: cty.DynamicPseudoType, Optional: true},
"output": {Type: cty.DynamicPseudoType, Computed: true},
"triggers_replace": {Type: cty.DynamicPseudoType, Optional: true},
"id": {Type: cty.String, Computed: true},
},
},
Identity: &configschema.Object{
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Description: "The unique identifier for the data store.",
Required: true,
},
},
Nesting: configschema.NestingSingle,
},
},
},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithEphemeralInputVariables(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "variable-ephemeral")
t.Run("with variables set", func(t *testing.T) {
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
req := PlanRequest{
Config: cfg,
InputValues: map[stackaddrs.InputVariable]stackeval.ExternalInputValue{
{Name: "eph"}: {Value: cty.StringVal("eph value")},
{Name: "noneph"}: {Value: cty.StringVal("noneph value")},
},
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "eph",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.NullVal(cty.String), // ephemeral
RequiredOnApply: true,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "noneph",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("noneph value"),
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
})
t.Run("without variables set", func(t *testing.T) {
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
req := PlanRequest{
InputValues: map[stackaddrs.InputVariable]stackeval.ExternalInputValue{
// Intentionally not set for this subtest.
},
Config: cfg,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "eph",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.NullVal(cty.String), // ephemeral
RequiredOnApply: false,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "noneph",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.NullVal(cty.String),
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
})
}
func TestPlanVariableOutputRoundtripNested(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "variable-output-roundtrip-nested")
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
req := PlanRequest{
Config: cfg,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "msg"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("default"),
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "msg",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("default"),
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanSensitiveOutput(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "sensitive-output")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: map[string]cty.Value{
"out": cty.StringVal("secret").Mark(marks.Sensitive),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "result"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("secret").Mark(marks.Sensitive),
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanSensitiveOutputNested(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "sensitive-output-nested")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "result"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("secret").Mark(marks.Sensitive),
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("child", addrs.NoKey),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: map[string]cty.Value{
"out": cty.StringVal("secret").Mark(marks.Sensitive),
},
PlanTimestamp: fakePlanTimestamp,
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanSensitiveOutputAsInput(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "sensitive-output-as-input")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](
mustAbsComponent("stack.sensitive.component.self"),
),
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: map[string]plans.DynamicValue{
"secret": mustPlanDynamicValueDynamicType(cty.StringVal("secret")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"secret": {
{
Marks: cty.NewValueMarks(marks.Sensitive),
},
},
},
PlannedOutputValues: map[string]cty.Value{
"result": cty.StringVal("SECRET").Mark(marks.Sensitive),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "result"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType), // MessagePack nil
After: cty.StringVal("SECRET").Mark(marks.Sensitive),
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("sensitive", addrs.NoKey),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: map[string]cty.Value{
"out": cty.StringVal("secret").Mark(marks.Sensitive),
},
PlanTimestamp: fakePlanTimestamp,
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithProviderConfig(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "with-provider-config")
providerAddr := addrs.MustParseProviderSourceString("example.com/test/test")
providerSchema := &providers.GetProviderSchemaResponse{
Provider: providers.Schema{
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"name": {
Type: cty.String,
Required: true,
},
},
},
},
}
inputVarAddr := stackaddrs.InputVariable{Name: "name"}
fakeSrcRng := tfdiags.SourceRange{
Filename: "fake-source",
}
lock := depsfile.NewLocks()
lock.SetProvider(
providerAddr,
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
t.Run("valid", func(t *testing.T) {
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
provider := &default_testing_provider.MockProvider{
GetProviderSchemaResponse: providerSchema,
ValidateProviderConfigResponse: &providers.ValidateProviderConfigResponse{},
ConfigureProviderResponse: &providers.ConfigureProviderResponse{},
}
req := PlanRequest{
Config: cfg,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
inputVarAddr: {
Value: cty.StringVal("Jackson"),
DefRange: fakeSrcRng,
},
},
ProviderFactories: map[addrs.Provider]providers.Factory{
providerAddr: func() (providers.Interface, error) {
return provider, nil
},
},
DependencyLocks: *lock,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
_, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
if !provider.ValidateProviderConfigCalled {
t.Error("ValidateProviderConfig wasn't called")
} else {
req := provider.ValidateProviderConfigRequest
if got, want := req.Config.GetAttr("name"), cty.StringVal("Jackson"); !got.RawEquals(want) {
t.Errorf("wrong name in ValidateProviderConfig\ngot: %#v\nwant: %#v", got, want)
}
}
if !provider.ConfigureProviderCalled {
t.Error("ConfigureProvider wasn't called")
} else {
req := provider.ConfigureProviderRequest
if got, want := req.Config.GetAttr("name"), cty.StringVal("Jackson"); !got.RawEquals(want) {
t.Errorf("wrong name in ConfigureProvider\ngot: %#v\nwant: %#v", got, want)
}
}
if !provider.CloseCalled {
t.Error("provider wasn't closed")
}
})
}
func TestPlanWithRemovedResource(t *testing.T) {
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
attrs := map[string]interface{}{
"id": "FE1D5830765C",
"input": map[string]interface{}{
"value": "hello",
"type": "string",
},
"output": map[string]interface{}{
"value": nil,
"type": "string",
},
"triggers_replace": nil,
}
attrsJSON, err := json.Marshal(attrs)
if err != nil {
t.Fatal(err)
}
// We want to see that it's adding the extra context for when a provider is
// missing for a resource that's in state and not in config.
expectedDiagnostic := "has resources in state that"
tcs := make(map[string]*string)
tcs["missing-providers"] = &expectedDiagnostic
tcs["valid-providers"] = nil
for name, diag := range tcs {
t.Run(name, func(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("empty-component", name))
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewBuiltInProvider("terraform"): func() (providers.Interface, error) {
return terraformProvider.NewProvider(), nil
},
},
ForcePlanTimestamp: &fakePlanTimestamp,
// PrevState specifies a state with a resource that is not present in
// the current configuration. This is a common situation when a resource
// is removed from the configuration but still exists in the state.
PrevState: stackstate.NewStateBuilder().
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.AbsComponentInstance{
Stack: stackaddrs.RootStackInstance,
Item: stackaddrs.ComponentInstance{
Component: stackaddrs.Component{
Name: "self",
},
Key: addrs.NoKey,
},
},
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "terraform_data",
Name: "main",
},
Key: addrs.NoKey,
},
},
DeposedKey: addrs.NotDeposed,
},
}).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
SchemaVersion: 0,
AttrsJSON: attrsJSON,
Status: states.ObjectReady,
}).
SetProviderAddr(addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("terraform.io/builtin/terraform"),
})).
Build(),
}
changesCh := make(chan stackplan.PlannedChange)
diagsCh := make(chan tfdiags.Diagnostic)
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
_, diags := collectPlanOutput(changesCh, diagsCh)
if diag != nil {
if len(diags) == 0 {
t.Fatalf("expected diagnostics, got none")
}
if !strings.Contains(diags[0].Description().Detail, *diag) {
t.Fatalf("expected diagnostic %q, got %q", *diag, diags[0].Description().Detail)
}
} else if len(diags) > 0 {
t.Fatalf("unexpected diagnostics: %s", diags.ErrWithWarnings().Error())
}
})
}
}
func TestPlanWithSensitivePropagation(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("with-single-input", "sensitive-input"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](
stackaddrs.AbsComponent{
Stack: stackaddrs.RootStackInstance,
Item: stackaddrs.Component{Name: "sensitive"},
},
),
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("secret")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": {
{
Marks: cty.NewValueMarks(marks.Sensitive),
},
},
},
PlannedOutputValues: make(map[string]cty.Value),
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.NewDefaultProvider("testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("secret"),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: []cty.Path{
cty.GetAttrPath("value"),
},
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "sensitive"},
},
),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: map[string]cty.Value{
"out": cty.StringVal("secret").Mark(marks.Sensitive),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "id"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.NullVal(cty.String),
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithSensitivePropagationNested(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("with-single-input", "sensitive-input-nested"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](
mustAbsComponent("stack.sensitive.component.self"),
),
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("secret")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": {
{
Marks: cty.NewValueMarks(marks.Sensitive),
},
},
},
PlannedOutputValues: make(map[string]cty.Value),
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.NewDefaultProvider("testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("secret"),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: []cty.Path{
cty.GetAttrPath("value"),
},
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance.Child("sensitive", addrs.NoKey),
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: map[string]cty.Value{
"out": cty.StringVal("secret").Mark(marks.Sensitive),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "id"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.NullVal(cty.String),
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithForEach(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("with-single-input", "input-from-component-list"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "components"}: {
Value: cty.ListVal([]cty.Value{cty.StringVal("one"), cty.StringVal("two"), cty.StringVal("three")}),
DefRange: tfdiags.SourceRange{},
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
_, diags := collectPlanOutput(changesCh, diagsCh)
reportDiagnosticsForTest(t, diags)
if len(diags) != 0 {
t.FailNow() // We reported the diags above/
}
}
func TestPlanWithCheckableObjects(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "checkable-objects")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "foo"}: {
Value: cty.StringVal("bar"),
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
var wantDiags tfdiags.Diagnostics
wantDiags = wantDiags.Append(&hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Check block assertion failed",
Detail: `value must be 'baz'`,
Subject: &hcl.Range{
Filename: mainBundleSourceAddrStr("checkable-objects/checkable-objects.tf"),
Start: hcl.Pos{Line: 41, Column: 21, Byte: 716},
End: hcl.Pos{Line: 41, Column: 57, Byte: 752},
},
})
go Plan(ctx, &req, &resp)
gotChanges, gotDiags := collectPlanOutput(changesCh, diagsCh)
if diff := cmp.Diff(wantDiags.ForRPC(), gotDiags.ForRPC()); diff != "" {
t.Errorf("wrong diagnostics\n%s", diff)
}
// The order of emission for our planned changes is unspecified since it
// depends on how the various goroutines get scheduled, and so we'll
// arbitrarily sort gotChanges lexically by the name of the change type
// so that we have some dependable order to diff against below.
sort.Slice(gotChanges, func(i, j int) bool {
ic := gotChanges[i]
jc := gotChanges[j]
return fmt.Sprintf("%T", ic) < fmt.Sprintf("%T", jc)
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "single"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedInputValues: map[string]plans.DynamicValue{
"foo": mustPlanDynamicValueDynamicType(cty.StringVal("bar")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{"foo": nil},
PlannedOutputValues: map[string]cty.Value{
"foo": cty.StringVal("bar"),
},
PlannedCheckResults: &states.CheckResults{
ConfigResults: addrs.MakeMap(
addrs.MakeMapElem[addrs.ConfigCheckable](
addrs.Check{
Name: "value_is_baz",
}.InModule(addrs.RootModule),
&states.CheckResultAggregate{
Status: checks.StatusFail,
ObjectResults: addrs.MakeMap(
addrs.MakeMapElem[addrs.Checkable](
addrs.Check{
Name: "value_is_baz",
}.Absolute(addrs.RootModuleInstance),
&states.CheckResultObject{
Status: checks.StatusFail,
FailureMessages: []string{"value must be 'baz'"},
},
),
),
},
),
addrs.MakeMapElem[addrs.ConfigCheckable](
addrs.InputVariable{
Name: "foo",
}.InModule(addrs.RootModule),
&states.CheckResultAggregate{
Status: checks.StatusPass,
ObjectResults: addrs.MakeMap(
addrs.MakeMapElem[addrs.Checkable](
addrs.InputVariable{
Name: "foo",
}.Absolute(addrs.RootModuleInstance),
&states.CheckResultObject{
Status: checks.StatusPass,
},
),
),
},
),
addrs.MakeMapElem[addrs.ConfigCheckable](
addrs.OutputValue{
Name: "foo",
}.InModule(addrs.RootModule),
&states.CheckResultAggregate{
Status: checks.StatusPass,
ObjectResults: addrs.MakeMap(
addrs.MakeMapElem[addrs.Checkable](
addrs.OutputValue{
Name: "foo",
}.Absolute(addrs.RootModuleInstance),
&states.CheckResultObject{
Status: checks.StatusPass,
},
),
),
},
),
addrs.MakeMapElem[addrs.ConfigCheckable](
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "main",
}.InModule(addrs.RootModule),
&states.CheckResultAggregate{
Status: checks.StatusPass,
ObjectResults: addrs.MakeMap(
addrs.MakeMapElem[addrs.Checkable](
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "main",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.CheckResultObject{
Status: checks.StatusPass,
},
),
),
},
),
),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "single"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "main",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.NewDefaultProvider("testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "main",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "main",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.NewDefaultProvider("testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("test"),
"value": cty.StringVal("bar"),
}), stacks_testing_provider.TestingResourceSchema.Body),
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithDeferredResource(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "deferrable-component")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1994-09-05T08:50:00Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange)
diagsCh := make(chan tfdiags.Diagnostic)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "id"}: {
Value: cty.StringVal("62594ae3"),
},
{Name: "defer"}: {
Value: cty.BoolVal(true),
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
reportDiagnosticsForTest(t, diags)
if len(diags) != 0 {
t.FailNow() // We reported the diags above
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
PlanComplete: false,
PlanApplyable: false, // We don't have any resources to apply since they're deferred.
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("62594ae3")),
"defer": mustPlanDynamicValueDynamicType(cty.BoolVal(true)),
},
PlannedOutputValues: map[string]cty.Value{},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"defer": nil,
},
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_deferred_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_deferred_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_deferred_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("62594ae3"),
"value": cty.NullVal(cty.String),
"deferred": cty.BoolVal(true),
}), stacks_testing_provider.DeferredResourceSchema.Body),
AfterSensitivePaths: nil,
},
},
Schema: stacks_testing_provider.DeferredResourceSchema,
},
DeferredReason: providers.DeferredReasonResourceConfigUnknown,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "defer"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.BoolVal(true),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "id"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("62594ae3"),
},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithDeferredComponentForEach(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("with-single-input-and-output", "deferred-component-for-each"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "components"}: {
Value: cty.UnknownVal(cty.Set(cty.String)),
DefRange: tfdiags.SourceRange{},
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
reportDiagnosticsForTest(t, diags)
if len(diags) != 0 {
t.FailNow() // We reported the diags above/
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "child"},
},
),
PlanApplyable: true,
PlanComplete: false,
Action: plans.Create,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](
stackaddrs.AbsComponent{
Stack: stackaddrs.RootStackInstance,
Item: stackaddrs.Component{
Name: "self",
},
},
),
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlannedOutputValues: map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.AbsComponentInstance{
Stack: stackaddrs.RootStackInstance,
Item: stackaddrs.ComponentInstance{
Component: stackaddrs.Component{
Name: "child",
},
},
},
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
},
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.UnknownVal(cty.String),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: nil,
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonDeferredPrereq,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
Key: addrs.WildcardKey,
},
),
PlanApplyable: true, // TODO: Questionable? We only have outputs.
PlanComplete: false,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
},
PlannedOutputValues: map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
DeferredReason: providers.DeferredReasonDeferredPrereq,
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
Key: addrs.WildcardKey,
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.UnknownVal(cty.String),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: nil,
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "components"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithDeferredComponentReferences(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("with-single-input-and-output", "deferred-component-references"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "known_components"}: {
Value: cty.ListVal([]cty.Value{cty.StringVal("known")}),
DefRange: tfdiags.SourceRange{},
},
{Name: "unknown_components"}: {
Value: cty.UnknownVal(cty.Set(cty.String)),
DefRange: tfdiags.SourceRange{},
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
reportDiagnosticsForTest(t, diags)
if len(diags) != 0 {
t.FailNow() // We reported the diags above.
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "children"},
Key: addrs.WildcardKey,
},
),
PlanApplyable: true, // TODO: Questionable? We only have outputs.
PlanComplete: false,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.UnknownVal(cty.String)),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlannedOutputValues: map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](
stackaddrs.AbsComponent{
Stack: stackaddrs.RootStackInstance,
Item: stackaddrs.Component{
Name: "self",
},
},
),
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
DeferredReason: providers.DeferredReasonDeferredPrereq,
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "children"},
Key: addrs.WildcardKey,
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.UnknownVal(cty.String),
}), stacks_testing_provider.TestingResourceSchema.Body),
AfterSensitivePaths: nil,
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
Key: addrs.StringKey("known"),
}),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("known")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlannedOutputValues: map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.AbsComponentInstance{
Stack: stackaddrs.RootStackInstance,
Item: stackaddrs.ComponentInstance{
Component: stackaddrs.Component{
Name: "self",
},
Key: addrs.StringKey("known"),
},
},
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
},
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
PrevRunAddr: addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("known"),
}), stacks_testing_provider.TestingResourceSchema.Body),
},
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "known_components"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{cty.StringVal("known")}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "unknown_components"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithDeferredComponentForEachOfInvalidType(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "deferred-component-for-each-from-component-of-invalid-type")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "components"}: {
Value: cty.UnknownVal(cty.Set(cty.String)),
DefRange: tfdiags.SourceRange{},
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
_, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 1 {
t.Fatalf("expected 1 diagnostic, got %d: %s", len(diags), diags)
}
if diags[0].Severity() != tfdiags.Error {
t.Errorf("expected error diagnostic, got %q", diags[0].Severity())
}
expectedSummary := "Invalid for_each value"
if diags[0].Description().Summary != expectedSummary {
t.Errorf("expected diagnostic with summary %q, got %q", expectedSummary, diags[0].Description().Summary)
}
expectedDetail := "The for_each expression must produce either a map of any type or a set of strings. The keys of the map or the set elements will serve as unique identifiers for multiple instances of this component."
if diags[0].Description().Detail != expectedDetail {
t.Errorf("expected diagnostic with detail %q, got %q", expectedDetail, diags[0].Description().Detail)
}
}
func TestPlanWithDeferredProviderForEach(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("with-single-input", "deferred-provider-for-each"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange)
diagsCh := make(chan tfdiags.Diagnostic)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "providers"}: {
Value: cty.UnknownVal(cty.Set(cty.String)),
DefRange: tfdiags.SourceRange{},
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
reportDiagnosticsForTest(t, diags)
if len(diags) != 0 {
t.FailNow() // We reported the diags above
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "known"},
}),
PlanComplete: false,
PlanApplyable: false,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("primary")),
},
PlannedOutputValues: map[string]cty.Value{},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "known"},
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("primary"),
}), stacks_testing_provider.TestingResourceSchema.Body),
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonProviderConfigUnknown,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "unknown"},
Key: addrs.WildcardKey,
}),
PlanComplete: false,
PlanApplyable: false,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("secondary")),
},
PlannedOutputValues: map[string]cty.Value{},
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "unknown"},
Key: addrs.WildcardKey,
},
),
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
},
},
ProviderConfigAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
PrevRunAddr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
ProviderAddr: addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.MustParseProviderSourceString("hashicorp/testing"),
},
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValueSchema(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("secondary"),
}), stacks_testing_provider.TestingResourceSchema.Body),
},
},
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonProviderConfigUnknown,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "providers"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanInvalidProvidersFailGracefully(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("invalid-providers"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange)
diagsCh := make(chan tfdiags.Diagnostic)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
changes, diags := collectPlanOutput(changesCh, diagsCh)
sort.SliceStable(diags, diagnosticSortFunc(diags))
expectDiagnosticsForTest(t, diags,
expectDiagnostic(tfdiags.Error, "Provider configuration is invalid", "Cannot plan changes for this resource because its associated provider configuration is invalid."),
expectDiagnostic(tfdiags.Error, "invalid configuration", "configure_error attribute was set"))
sort.SliceStable(changes, func(i, j int) bool {
return plannedChangeSortKey(changes[i]) < plannedChangeSortKey(changes[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanTimestamp: fakePlanTimestamp,
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
}
if diff := cmp.Diff(wantChanges, changes, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanWithStateManipulation(t *testing.T) {
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
tcs := map[string]struct {
state *stackstate.State
store *stacks_testing_provider.ResourceStore
inputs map[string]cty.Value
changes []stackplan.PlannedChange
counts collections.Map[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange]
expectedWarnings []string
}{
"moved": {
state: stackstate.NewStateBuilder().
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self.testing_resource.before")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "moved",
"value": "moved",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("moved", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("moved"),
"value": cty.StringVal("moved"),
})).
Build(),
changes: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Update,
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](),
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.after"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.after"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.before"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("moved"),
"value": cty.StringVal("moved"),
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("moved"),
"value": cty.StringVal("moved"),
})),
},
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "moved",
"value": "moved",
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
},
counts: collections.NewMap[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange](
collections.MapElem[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange]{
K: mustAbsComponentInstance("component.self"),
V: &hooks.ComponentInstanceChange{
Addr: mustAbsComponentInstance("component.self"),
Move: 1,
},
}),
},
"cross-type-moved": {
state: stackstate.NewStateBuilder().
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self.testing_resource.before")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "moved",
"value": "moved",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("moved", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("moved"),
"value": cty.StringVal("moved"),
})).
Build(),
changes: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Update,
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](),
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_deferred_resource.after"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_deferred_resource.after"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.before"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("moved"),
"value": cty.StringVal("moved"),
"deferred": cty.False,
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("moved"),
"value": cty.StringVal("moved"),
"deferred": cty.False,
})),
},
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "moved",
"value": "moved",
"deferred": false,
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.DeferredResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
},
counts: collections.NewMap[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange](
collections.MapElem[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange]{
K: mustAbsComponentInstance("component.self"),
V: &hooks.ComponentInstanceChange{
Addr: mustAbsComponentInstance("component.self"),
Move: 1,
},
}),
},
"import": {
state: stackstate.NewStateBuilder().Build(), // We start with an empty state for this.
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("imported", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("imported"),
"value": cty.StringVal("imported"),
})).
Build(),
inputs: map[string]cty.Value{
"id": cty.StringVal("imported"),
},
changes: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self"),
PlanApplyable: true,
PlanComplete: true,
// The component is still CREATE even though all the
// instances are NoOps, because the component itself didn't
// exist before even though all the resources might have.
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("imported")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](),
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("imported"),
"value": cty.StringVal("imported"),
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("imported"),
"value": cty.StringVal("imported"),
})),
Importing: &plans.ImportingSrc{
ID: "imported",
},
},
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "imported",
"value": "imported",
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "id",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("imported"),
RequiredOnApply: false,
},
},
counts: collections.NewMap[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange](
collections.MapElem[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange]{
K: mustAbsComponentInstance("component.self"),
V: &hooks.ComponentInstanceChange{
Addr: mustAbsComponentInstance("component.self"),
Import: 1,
},
}),
},
"removed": {
state: stackstate.NewStateBuilder().
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self.testing_resource.resource")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "removed",
"value": "removed",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("removed", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("removed"),
"value": cty.StringVal("removed"),
})).
Build(),
changes: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Update,
PlannedInputValues: make(map[string]plans.DynamicValue),
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](),
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.resource"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.resource"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.resource"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Forget,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("removed"),
"value": cty.StringVal("removed"),
})),
After: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
},
ActionReason: plans.ResourceInstanceDeleteBecauseNoResourceConfig,
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "removed",
"value": "removed",
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
},
counts: collections.NewMap[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange](
collections.MapElem[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange]{
K: mustAbsComponentInstance("component.self"),
V: &hooks.ComponentInstanceChange{
Addr: mustAbsComponentInstance("component.self"),
Forget: 1,
},
}),
expectedWarnings: []string{"Some objects will no longer be managed by Terraform"},
},
}
for name, tc := range tcs {
t.Run(name, func(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("state-manipulation", name))
gotCounts := collections.NewMap[stackaddrs.AbsComponentInstance, *hooks.ComponentInstanceChange]()
ctx = ContextWithHooks(ctx, &stackeval.Hooks{
ReportComponentInstancePlanned: func(ctx context.Context, span any, change *hooks.ComponentInstanceChange) any {
gotCounts.Put(change.Addr, change)
return span
},
})
inputs := make(map[stackaddrs.InputVariable]ExternalInputValue, len(tc.inputs))
for name, input := range tc.inputs {
inputs[stackaddrs.InputVariable{Name: name}] = ExternalInputValue{
Value: input,
}
}
changesCh := make(chan stackplan.PlannedChange)
diagsCh := make(chan tfdiags.Diagnostic)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProviderWithData(t, tc.store), nil
},
},
DependencyLocks: *lock,
InputValues: inputs,
ForcePlanTimestamp: &fakePlanTimestamp,
PrevState: tc.state,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
changes, diags := collectPlanOutput(changesCh, diagsCh)
reportDiagnosticsForTest(t, diags)
if len(diags) > len(tc.expectedWarnings) {
t.Fatalf("had unexpected warnings")
}
for i, diag := range diags {
if diag.Description().Summary != tc.expectedWarnings[i] {
t.Fatalf("expected diagnostic with summary %q, got %q", tc.expectedWarnings[i], diag.Description().Summary)
}
}
sort.SliceStable(changes, func(i, j int) bool {
return plannedChangeSortKey(changes[i]) < plannedChangeSortKey(changes[j])
})
if diff := cmp.Diff(tc.changes, changes, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
wantCounts := tc.counts
for key, elem := range wantCounts.All() {
// First, make sure everything we wanted is present.
if !gotCounts.HasKey(key) {
t.Errorf("wrong counts: wanted %s but didn't get it", key)
}
// And that the values actually match.
got, want := gotCounts.Get(key), elem
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("wrong counts for %s: %s", want.Addr, diff)
}
}
for key := range gotCounts.All() {
// Then, make sure we didn't get anything we didn't want.
if !wantCounts.HasKey(key) {
t.Errorf("wrong counts: got %s but didn't want it", key)
}
}
})
}
}
func TestPlan_plantimestamp_force_timestamp(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "with-plantimestamp")
forcedPlanTimestamp := "1991-08-25T20:57:08Z"
fakePlanTimestamp, err := time.Parse(time.RFC3339, forcedPlanTimestamp)
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
// We support both hashicorp/testing and
// terraform.io/builtin/testing as providers. This lets us
// test the provider aliasing feature. Both providers
// support the same set of resources and data sources.
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
addrs.NewBuiltInProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
InputValues: func() map[stackaddrs.InputVariable]ExternalInputValue {
return map[stackaddrs.InputVariable]ExternalInputValue{}
}(),
ForcePlanTimestamp: &fakePlanTimestamp,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
// The following will fail the test if there are any error
// diagnostics.
reportDiagnosticsForTest(t, diags)
// We also want to fail if there are just warnings, since the
// configurations here are supposed to be totally problem-free.
if len(diags) != 0 {
// reportDiagnosticsForTest already showed the diagnostics in
// the log
t.FailNow()
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "second-self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"value": nil,
},
PlannedInputValues: map[string]plans.DynamicValue{
"value": mustPlanDynamicValueDynamicType(cty.StringVal(forcedPlanTimestamp)),
},
PlannedOutputValues: map[string]cty.Value{
"input": cty.StringVal(forcedPlanTimestamp),
"out": cty.StringVal(fmt.Sprintf("module-output-%s", forcedPlanTimestamp)),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"value": nil,
},
PlannedInputValues: map[string]plans.DynamicValue{
"value": mustPlanDynamicValueDynamicType(cty.StringVal(forcedPlanTimestamp)),
},
PlannedOutputValues: map[string]cty.Value{
"input": cty.StringVal(forcedPlanTimestamp),
"out": cty.StringVal(fmt.Sprintf("module-output-%s", forcedPlanTimestamp)),
},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangeOutputValue{
Addr: stackaddrs.OutputValue{Name: "plantimestamp"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal(forcedPlanTimestamp),
},
&stackplan.PlannedChangePlannedTimestamp{PlannedTimestamp: fakePlanTimestamp},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlan_plantimestamp_later_than_when_writing_this_test(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "with-plantimestamp")
dayOfWritingThisTest := "2024-06-21T06:37:08Z"
dayOfWritingThisTestTime, err := time.Parse(time.RFC3339, dayOfWritingThisTest)
if err != nil {
t.Fatal(err)
}
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
// We support both hashicorp/testing and
// terraform.io/builtin/testing as providers. This lets us
// test the provider aliasing feature. Both providers
// support the same set of resources and data sources.
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
addrs.NewBuiltInProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
InputValues: func() map[stackaddrs.InputVariable]ExternalInputValue {
return map[stackaddrs.InputVariable]ExternalInputValue{}
}(),
ForcePlanTimestamp: nil, // This is what we want to test
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
changes, diags := collectPlanOutput(changesCh, diagsCh)
output := expectOutput(t, "plantimestamp", changes)
plantimestampValue := output.After
plantimestamp, err := time.Parse(time.RFC3339, plantimestampValue.AsString())
if err != nil {
t.Fatal(err)
}
if plantimestamp.Before(dayOfWritingThisTestTime) {
t.Errorf("expected plantimestamp to be later than %q, got %q", dayOfWritingThisTest, plantimestampValue.AsString())
}
// The following will fail the test if there are any error
// diagnostics.
reportDiagnosticsForTest(t, diags)
// We also want to fail if there are just warnings, since the
// configurations here are supposed to be totally problem-free.
if len(diags) != 0 {
// reportDiagnosticsForTest already showed the diagnostics in
// the log
t.FailNow()
}
}
func TestPlan_DependsOnUpdatesRequirements(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, path.Join("with-single-input", "depends-on"))
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
changesCh := make(chan stackplan.PlannedChange)
diagsCh := make(chan tfdiags.Diagnostic)
req := PlanRequest{
Config: cfg,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
ForcePlanTimestamp: &fakePlanTimestamp,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "input"}: {
Value: cty.StringVal("hello, world!"),
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
reportDiagnosticsForTest(t, diags)
if len(diags) != 0 {
t.FailNow()
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.first"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](),
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("hello, world!")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlanTimestamp: fakePlanTimestamp,
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.first.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("hello, world!"),
})),
},
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.second"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](
mustAbsComponent("component.first"),
mustAbsComponent("stack.second.component.self"),
),
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("hello, world!")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlanTimestamp: fakePlanTimestamp,
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.second.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("hello, world!"),
})),
},
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("stack.first.component.self"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
RequiredComponents: collections.NewSet[stackaddrs.AbsComponent](
mustAbsComponent("component.first"),
mustAbsComponent("component.empty"),
),
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("hello, world!")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlanTimestamp: fakePlanTimestamp,
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("stack.first.component.self.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("hello, world!"),
})),
},
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("stack.second.component.self"),
PlanApplyable: true,
PlanComplete: true,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.NullVal(cty.String)),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("hello, world!")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"id": nil,
"input": nil,
},
PlanTimestamp: fakePlanTimestamp,
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("stack.second.component.self.testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.StringVal("hello, world!"),
})),
},
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "empty",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetValEmpty(cty.String),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{
Name: "input",
},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.StringVal("hello, world!"),
},
}
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlan_RemovedBlocks(t *testing.T) {
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
tcs := map[string]struct {
source string
initialState *stackstate.State
store *stacks_testing_provider.ResourceStore
inputs map[string]cty.Value
wantPlanChanges []stackplan.PlannedChange
wantPlanDiags []expectedDiagnostic
}{
"unknown removed block with nothing to remove": {
source: filepath.Join("with-single-input", "removed-component-instance"),
initialState: stackstate.NewStateBuilder().
// we have a single component instance in state
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"a\"]")).
AddInputVariable("id", cty.StringVal("a")).
AddInputVariable("input", cty.StringVal("a"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("a", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})).
Build(),
inputs: map[string]cty.Value{
"input": cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
"removed": cty.UnknownVal(cty.Set(cty.String)),
},
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"a\"]"),
PlanComplete: true,
PlanApplyable: false, // all changes are no-ops
Action: plans.Update,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "a",
"value": "a",
}),
Status: states.ObjectReady,
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
},
},
"unknown removed block with elements in state": {
source: filepath.Join("with-single-input", "removed-component-instance"),
initialState: stackstate.NewStateBuilder().
// we have a single component instance in state
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"a\"]")).
AddInputVariable("id", cty.StringVal("a")).
AddInputVariable("input", cty.StringVal("a"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("a", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})).
Build(),
inputs: map[string]cty.Value{
"input": cty.SetValEmpty(cty.String),
"removed": cty.UnknownVal(cty.Set(cty.String)),
},
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"a\"]"),
PlanComplete: false, // has deferred changes
PlanApplyable: false, // only deferred changes
Action: plans.Delete,
Mode: plans.DestroyMode,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Delete,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
After: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "a",
"value": "a",
}),
Status: states.ObjectReady,
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonDeferredPrereq,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetValEmpty(cty.String),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
},
},
"unknown component block with element to remove": {
source: filepath.Join("with-single-input", "removed-component-instance"),
initialState: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"a\"]")).
AddInputVariable("id", cty.StringVal("a")).
AddInputVariable("input", cty.StringVal("a"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
})).
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"b\"]")).
AddInputVariable("id", cty.StringVal("b")).
AddInputVariable("input", cty.StringVal("b"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"b\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "b",
"value": "b",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("a", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})).
AddResource("b", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("b"),
"value": cty.StringVal("b"),
})).
Build(),
inputs: map[string]cty.Value{
"input": cty.UnknownVal(cty.Set(cty.String)),
"removed": cty.SetVal([]cty.Value{cty.StringVal("b")}),
},
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"a\"]"),
PlanComplete: false, // has deferred changes
PlanApplyable: false, // only deferred changes
Action: plans.Update,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "a",
"value": "a",
}),
Status: states.ObjectReady,
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonDeferredPrereq,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"b\"]"),
PlanComplete: true,
PlanApplyable: true,
Action: plans.Delete,
Mode: plans.DestroyMode,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("b")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("b")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"b\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Delete,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("b"),
"value": cty.StringVal("b"),
})),
After: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "b",
"value": "b",
}),
Status: states.ObjectReady,
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{cty.StringVal("b")}),
},
},
},
"unknown component and removed block with element in state": {
source: filepath.Join("with-single-input", "removed-component-instance"),
initialState: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"a\"]")).
AddInputVariable("id", cty.StringVal("a")).
AddInputVariable("input", cty.StringVal("a"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("a", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})).
Build(),
inputs: map[string]cty.Value{
"input": cty.UnknownVal(cty.Set(cty.String)),
"removed": cty.UnknownVal(cty.Set(cty.String)),
},
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"a\"]"),
PlanComplete: false, // has deferred changes
PlanApplyable: false, // only deferred changes
Action: plans.Update,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeDeferredResourceInstancePlanned{
ResourceInstancePlanned: stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "a",
"value": "a",
}),
Status: states.ObjectReady,
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
DeferredReason: providers.DeferredReasonDeferredPrereq,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.UnknownVal(cty.Set(cty.String)),
},
},
},
"absent component": {
source: filepath.Join("with-single-input", "removed-component"),
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
},
},
"absent component instance": {
source: filepath.Join("with-single-input", "removed-component-instance"),
initialState: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"removed\"]")).
AddInputVariable("id", cty.StringVal("a")).
AddInputVariable("input", cty.StringVal("a"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("a", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})).
Build(),
inputs: map[string]cty.Value{
"input": cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
"removed": cty.SetVal([]cty.Value{
cty.StringVal("b"), // Doesn't exist!
}),
},
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
// we're expecting the new component to be created
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"a\"]"),
PlanComplete: true,
PlanApplyable: false, // no changes
Action: plans.Update,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
Dependencies: make([]addrs.ConfigResource, 0),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeComponentInstanceRemoved{
Addr: mustAbsComponentInstance("component.self[\"removed\"]"),
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("b"),
}),
},
},
},
"orphaned component": {
source: filepath.Join("with-single-input", "removed-component-instance"),
initialState: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"removed\"]")).
AddInputVariable("id", cty.StringVal("removed")).
AddInputVariable("input", cty.StringVal("removed"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"removed\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "removed",
"value": "removed",
}),
})).
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"orphaned\"]")).
AddInputVariable("id", cty.StringVal("orphaned")).
AddInputVariable("input", cty.StringVal("orphaned"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"orphaned\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "orphaned",
"value": "orphaned",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("removed", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("removed"),
"value": cty.StringVal("removed"),
})).
AddResource("orphaned", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("orphaned"),
"value": cty.StringVal("orphaned"),
})).
Build(),
inputs: map[string]cty.Value{
"input": cty.SetVal([]cty.Value{
cty.StringVal("added"),
}),
"removed": cty.SetVal([]cty.Value{
cty.StringVal("removed"),
}),
},
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: false, // No! We have an unclaimed instance!
},
// we're expecting the new component to be created
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"added\"]"),
PlanComplete: true,
PlanApplyable: true,
Action: plans.Create,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("added")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("added")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"added\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("added"),
"value": cty.StringVal("added"),
})),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"removed\"]"),
PlanComplete: true,
PlanApplyable: true,
Mode: plans.DestroyMode,
Action: plans.Delete,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("removed")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("removed")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"removed\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Delete,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("removed"),
"value": cty.StringVal("removed"),
})),
After: mustPlanDynamicValue(cty.NullVal(cty.Object(map[string]cty.Type{
"id": cty.String,
"value": cty.String,
}))),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "removed",
"value": "removed",
}),
Dependencies: make([]addrs.ConfigResource, 0),
Status: states.ObjectReady,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("added"),
}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("removed"),
}),
},
},
wantPlanDiags: []expectedDiagnostic{
{
severity: tfdiags.Error,
summary: "Unclaimed component instance",
detail: "The component instance component.self[\"orphaned\"] is not claimed by any component or removed block in the configuration. Make sure it is instantiated by a component block, or targeted for removal by a removed block.",
},
},
},
"duplicate component": {
source: filepath.Join("with-single-input", "removed-component-instance"),
initialState: stackstate.NewStateBuilder().
AddComponentInstance(stackstate.NewComponentInstanceBuilder(mustAbsComponentInstance("component.self[\"a\"]")).
AddInputVariable("id", cty.StringVal("a")).
AddInputVariable("input", cty.StringVal("a"))).
AddResourceInstance(stackstate.NewResourceInstanceBuilder().
SetAddr(mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data")).
SetProviderAddr(mustDefaultRootProvider("testing")).
SetResourceInstanceObjectSrc(states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
})).
Build(),
store: stacks_testing_provider.NewResourceStoreBuilder().
AddResource("a", cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})).
Build(),
inputs: map[string]cty.Value{
"input": cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
"removed": cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
},
wantPlanChanges: []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: false, // No! The removed block is a duplicate of the component!
},
&stackplan.PlannedChangeComponentInstance{
Addr: mustAbsComponentInstance("component.self[\"a\"]"),
PlanComplete: true,
PlanApplyable: false, // no changes
Action: plans.Update,
PlannedInputValues: map[string]plans.DynamicValue{
"id": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
"input": mustPlanDynamicValueDynamicType(cty.StringVal("a")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{
"input": nil,
"id": nil,
},
PlannedOutputValues: make(map[string]cty.Value),
PlannedCheckResults: &states.CheckResults{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self[\"a\"].testing_resource.data"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource.data"),
PrevRunAddr: mustAbsResourceInstance("testing_resource.data"),
ChangeSrc: plans.ChangeSrc{
Action: plans.NoOp,
Before: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"value": cty.StringVal("a"),
})),
},
ProviderAddr: mustDefaultRootProvider("testing"),
},
PriorStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]any{
"id": "a",
"value": "a",
}),
Dependencies: make([]addrs.ConfigResource, 0),
Status: states.ObjectReady,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "input"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
},
&stackplan.PlannedChangeRootInputValue{
Addr: stackaddrs.InputVariable{Name: "removed"},
Action: plans.Create,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.SetVal([]cty.Value{
cty.StringVal("a"),
}),
},
},
wantPlanDiags: []expectedDiagnostic{
{
severity: tfdiags.Error,
summary: "Cannot remove component instance",
detail: "The component instance component.self[\"a\"] is targeted by a component block and cannot be removed. The relevant component is defined at git::https://example.com/test.git//with-single-input/removed-component-instance/removed-component-instance.tfcomponent.hcl:18,1-17.",
},
},
},
}
for name, tc := range tcs {
t.Run(name, func(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, tc.source)
inputs := make(map[stackaddrs.InputVariable]ExternalInputValue, len(tc.inputs))
for name, input := range tc.inputs {
inputs[stackaddrs.InputVariable{Name: name}] = ExternalInputValue{
Value: input,
}
}
providers := map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProviderWithData(t, tc.store), nil
},
}
planChangesCh := make(chan stackplan.PlannedChange)
planDiagsCh := make(chan tfdiags.Diagnostic)
planReq := PlanRequest{
Config: cfg,
ProviderFactories: providers,
InputValues: inputs,
ForcePlanTimestamp: &fakePlanTimestamp,
PrevState: tc.initialState,
DependencyLocks: *lock,
}
planResp := PlanResponse{
PlannedChanges: planChangesCh,
Diagnostics: planDiagsCh,
}
go Plan(ctx, &planReq, &planResp)
gotPlanChanges, gotPlanDiags := collectPlanOutput(planChangesCh, planDiagsCh)
sort.SliceStable(gotPlanChanges, func(i, j int) bool {
return plannedChangeSortKey(gotPlanChanges[i]) < plannedChangeSortKey(gotPlanChanges[j])
})
sort.SliceStable(gotPlanDiags, diagnosticSortFunc(gotPlanDiags))
expectDiagnosticsForTest(t, gotPlanDiags, tc.wantPlanDiags...)
if diff := cmp.Diff(tc.wantPlanChanges, gotPlanChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
})
}
}
func TestPlanWithResourceIdentities(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "resource-identity")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ForcePlanTimestamp: &fakePlanTimestamp,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
if len(diags) != 0 {
t.Errorf("unexpected diagnostics\n%s", diags.ErrWithWarnings().Error())
}
wantChanges := []stackplan.PlannedChange{
&stackplan.PlannedChangeApplyable{
Applyable: true,
},
&stackplan.PlannedChangeComponentInstance{
Addr: stackaddrs.Absolute(
stackaddrs.RootStackInstance,
stackaddrs.ComponentInstance{
Component: stackaddrs.Component{Name: "self"},
},
),
Action: plans.Create,
PlanApplyable: true,
PlanComplete: true,
PlannedCheckResults: &states.CheckResults{},
PlannedInputValues: map[string]plans.DynamicValue{
"name": mustPlanDynamicValueDynamicType(cty.StringVal("example")),
},
PlannedInputValueMarks: map[string][]cty.PathValueMarks{"name": nil},
PlannedOutputValues: map[string]cty.Value{},
PlanTimestamp: fakePlanTimestamp,
},
&stackplan.PlannedChangeResourceInstancePlanned{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource_with_identity.hello"),
ChangeSrc: &plans.ResourceInstanceChangeSrc{
Addr: mustAbsResourceInstance("testing_resource_with_identity.hello"),
PrevRunAddr: mustAbsResourceInstance("testing_resource_with_identity.hello"),
ProviderAddr: mustDefaultRootProvider("testing"),
ChangeSrc: plans.ChangeSrc{
Action: plans.Create,
Before: mustPlanDynamicValue(cty.NullVal(cty.DynamicPseudoType)),
After: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("example"),
"value": cty.NullVal(cty.String),
})),
AfterIdentity: mustPlanDynamicValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("id:example"),
})),
},
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceWithIdentitySchema,
},
&stackplan.PlannedChangeHeader{
TerraformVersion: version.SemVer,
},
&stackplan.PlannedChangePlannedTimestamp{
PlannedTimestamp: fakePlanTimestamp,
},
}
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
if diff := cmp.Diff(wantChanges, gotChanges, changesCmpOpts); diff != "" {
t.Errorf("wrong changes\n%s", diff)
}
}
func TestPlanInvalidLocalValue(t *testing.T) {
ctx := context.Background()
cfg := loadMainBundleConfigForTest(t, "invalid-local")
fakePlanTimestamp, err := time.Parse(time.RFC3339, "1991-08-25T20:57:08Z")
if err != nil {
t.Fatal(err)
}
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
changesCh := make(chan stackplan.PlannedChange, 8)
diagsCh := make(chan tfdiags.Diagnostic, 2)
req := PlanRequest{
Config: cfg,
ForcePlanTimestamp: &fakePlanTimestamp,
ProviderFactories: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
DependencyLocks: *lock,
InputValues: map[stackaddrs.InputVariable]ExternalInputValue{
{Name: "in"}: {
Value: cty.ObjectVal(map[string]cty.Value{"name": cty.StringVal("foo")}),
},
},
}
resp := PlanResponse{
PlannedChanges: changesCh,
Diagnostics: diagsCh,
}
go Plan(ctx, &req, &resp)
gotChanges, diags := collectPlanOutput(changesCh, diagsCh)
tfdiags.AssertDiagnosticsMatch(t, diags, tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid operand",
Detail: "Unsuitable value for left operand: a number is required.",
Subject: &hcl.Range{
Filename: "git::https://example.com/test.git//invalid-local/invalid-local.tfcomponent.hcl",
Start: hcl.Pos{Line: 19, Column: 49, Byte: 377},
End: hcl.Pos{Line: 19, Column: 50, Byte: 378},
},
Context: &hcl.Range{
Filename: "git::https://example.com/test.git//invalid-local/invalid-local.tfcomponent.hcl",
Start: hcl.Pos{Line: 19, Column: 49, Byte: 377},
End: hcl.Pos{Line: 19, Column: 54, Byte: 382},
},
}))
// We don't really care about the precise content of the plan changes here,
// we just want to ensure that the produced plan is not applyable
sort.SliceStable(gotChanges, func(i, j int) bool {
return plannedChangeSortKey(gotChanges[i]) < plannedChangeSortKey(gotChanges[j])
})
pca, ok := gotChanges[0].(*stackplan.PlannedChangeApplyable)
if !ok {
t.Fatalf("expected first change to be PlannedChangeApplyable, got %T", gotChanges[0])
}
if pca.Applyable {
t.Fatalf("expected plan to be not applyable due to invalid local value, but it is applyable")
}
}
// collectPlanOutput consumes the two output channels emitting results from
// a call to [Plan], and collects all of the data written to them before
// returning once changesCh has been closed by the sender to indicate that
// the planning process is complete.
func collectPlanOutput(changesCh <-chan stackplan.PlannedChange, diagsCh <-chan tfdiags.Diagnostic) ([]stackplan.PlannedChange, tfdiags.Diagnostics) {
var changes []stackplan.PlannedChange
var diags tfdiags.Diagnostics
for {
select {
case change, ok := <-changesCh:
if !ok {
// The plan operation is complete but we might still have
// some buffered diagnostics to consume.
if diagsCh != nil {
for diag := range diagsCh {
diags = append(diags, diag)
}
}
return changes, diags
}
changes = append(changes, change)
case diag, ok := <-diagsCh:
if !ok {
// no more diagnostics to read
diagsCh = nil
continue
}
diags = append(diags, diag)
}
}
}
func expectOutput(t *testing.T, name string, changes []stackplan.PlannedChange) *stackplan.PlannedChangeOutputValue {
t.Helper()
for _, change := range changes {
if v, ok := change.(*stackplan.PlannedChangeOutputValue); ok && v.Addr.Name == name {
return v
}
}
t.Fatalf("expected output value %q", name)
return nil
}
|
go
|
github
|
https://github.com/hashicorp/terraform
|
internal/stacks/stackruntime/plan_test.go
|
// SPDX-License-Identifier: GPL-2.0-or-later
/* audit_watch.c -- watching inodes
*
* Copyright 2003-2009 Red Hat, Inc.
* Copyright 2005 Hewlett-Packard Development Company, L.P.
* Copyright 2005 IBM Corporation
*/
#include <linux/file.h>
#include <linux/kernel.h>
#include <linux/audit.h>
#include <linux/kthread.h>
#include <linux/mutex.h>
#include <linux/fs.h>
#include <linux/fsnotify_backend.h>
#include <linux/namei.h>
#include <linux/netlink.h>
#include <linux/refcount.h>
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/security.h>
#include "audit.h"
/*
* Reference counting:
*
* audit_parent: lifetime is from audit_init_parent() to receipt of an FS_IGNORED
* event. Each audit_watch holds a reference to its associated parent.
*
* audit_watch: if added to lists, lifetime is from audit_init_watch() to
* audit_remove_watch(). Additionally, an audit_watch may exist
* temporarily to assist in searching existing filter data. Each
* audit_krule holds a reference to its associated watch.
*/
struct audit_watch {
refcount_t count; /* reference count */
dev_t dev; /* associated superblock device */
char *path; /* insertion path */
unsigned long ino; /* associated inode number */
struct audit_parent *parent; /* associated parent */
struct list_head wlist; /* entry in parent->watches list */
struct list_head rules; /* anchor for krule->rlist */
};
struct audit_parent {
struct list_head watches; /* anchor for audit_watch->wlist */
struct fsnotify_mark mark; /* fsnotify mark on the inode */
};
/* fsnotify handle. */
static struct fsnotify_group *audit_watch_group;
/* fsnotify events we care about. */
#define AUDIT_FS_WATCH (FS_MOVE | FS_CREATE | FS_DELETE | FS_DELETE_SELF |\
FS_MOVE_SELF | FS_UNMOUNT)
static void audit_free_parent(struct audit_parent *parent)
{
WARN_ON(!list_empty(&parent->watches));
kfree(parent);
}
static void audit_watch_free_mark(struct fsnotify_mark *entry)
{
struct audit_parent *parent;
parent = container_of(entry, struct audit_parent, mark);
audit_free_parent(parent);
}
static void audit_get_parent(struct audit_parent *parent)
{
if (likely(parent))
fsnotify_get_mark(&parent->mark);
}
static void audit_put_parent(struct audit_parent *parent)
{
if (likely(parent))
fsnotify_put_mark(&parent->mark);
}
/*
* Find and return the audit_parent on the given inode. If found a reference
* is taken on this parent.
*/
static inline struct audit_parent *audit_find_parent(struct inode *inode)
{
struct audit_parent *parent = NULL;
struct fsnotify_mark *entry;
entry = fsnotify_find_inode_mark(inode, audit_watch_group);
if (entry)
parent = container_of(entry, struct audit_parent, mark);
return parent;
}
void audit_get_watch(struct audit_watch *watch)
{
refcount_inc(&watch->count);
}
void audit_put_watch(struct audit_watch *watch)
{
if (refcount_dec_and_test(&watch->count)) {
WARN_ON(watch->parent);
WARN_ON(!list_empty(&watch->rules));
kfree(watch->path);
kfree(watch);
}
}
static void audit_remove_watch(struct audit_watch *watch)
{
list_del(&watch->wlist);
audit_put_parent(watch->parent);
watch->parent = NULL;
audit_put_watch(watch); /* match initial get */
}
char *audit_watch_path(struct audit_watch *watch)
{
return watch->path;
}
int audit_watch_compare(struct audit_watch *watch, unsigned long ino, dev_t dev)
{
return (watch->ino != AUDIT_INO_UNSET) &&
(watch->ino == ino) &&
(watch->dev == dev);
}
/* Initialize a parent watch entry. */
static struct audit_parent *audit_init_parent(const struct path *path)
{
struct inode *inode = d_backing_inode(path->dentry);
struct audit_parent *parent;
int ret;
parent = kzalloc(sizeof(*parent), GFP_KERNEL);
if (unlikely(!parent))
return ERR_PTR(-ENOMEM);
INIT_LIST_HEAD(&parent->watches);
fsnotify_init_mark(&parent->mark, audit_watch_group);
parent->mark.mask = AUDIT_FS_WATCH;
ret = fsnotify_add_inode_mark(&parent->mark, inode, 0);
if (ret < 0) {
audit_free_parent(parent);
return ERR_PTR(ret);
}
return parent;
}
/* Initialize a watch entry. */
static struct audit_watch *audit_init_watch(char *path)
{
struct audit_watch *watch;
watch = kzalloc(sizeof(*watch), GFP_KERNEL);
if (unlikely(!watch))
return ERR_PTR(-ENOMEM);
INIT_LIST_HEAD(&watch->rules);
refcount_set(&watch->count, 1);
watch->path = path;
watch->dev = AUDIT_DEV_UNSET;
watch->ino = AUDIT_INO_UNSET;
return watch;
}
/* Translate a watch string to kernel representation. */
int audit_to_watch(struct audit_krule *krule, char *path, int len, u32 op)
{
struct audit_watch *watch;
if (!audit_watch_group)
return -EOPNOTSUPP;
if (path[0] != '/' || path[len-1] == '/' ||
(krule->listnr != AUDIT_FILTER_EXIT &&
krule->listnr != AUDIT_FILTER_URING_EXIT) ||
op != Audit_equal ||
krule->inode_f || krule->watch || krule->tree)
return -EINVAL;
watch = audit_init_watch(path);
if (IS_ERR(watch))
return PTR_ERR(watch);
krule->watch = watch;
return 0;
}
/* Duplicate the given audit watch. The new watch's rules list is initialized
* to an empty list and wlist is undefined. */
static struct audit_watch *audit_dupe_watch(struct audit_watch *old)
{
char *path;
struct audit_watch *new;
path = kstrdup(old->path, GFP_KERNEL);
if (unlikely(!path))
return ERR_PTR(-ENOMEM);
new = audit_init_watch(path);
if (IS_ERR(new)) {
kfree(path);
goto out;
}
new->dev = old->dev;
new->ino = old->ino;
audit_get_parent(old->parent);
new->parent = old->parent;
out:
return new;
}
static void audit_watch_log_rule_change(struct audit_krule *r, struct audit_watch *w, char *op)
{
struct audit_buffer *ab;
if (!audit_enabled)
return;
ab = audit_log_start(audit_context(), GFP_NOFS, AUDIT_CONFIG_CHANGE);
if (!ab)
return;
audit_log_session_info(ab);
audit_log_format(ab, "op=%s path=", op);
audit_log_untrustedstring(ab, w->path);
audit_log_key(ab, r->filterkey);
audit_log_format(ab, " list=%d res=1", r->listnr);
audit_log_end(ab);
}
/* Update inode info in audit rules based on filesystem event. */
static void audit_update_watch(struct audit_parent *parent,
const struct qstr *dname, dev_t dev,
unsigned long ino, unsigned invalidating)
{
struct audit_watch *owatch, *nwatch, *nextw;
struct audit_krule *r, *nextr;
struct audit_entry *oentry, *nentry;
mutex_lock(&audit_filter_mutex);
/* Run all of the watches on this parent looking for the one that
* matches the given dname */
list_for_each_entry_safe(owatch, nextw, &parent->watches, wlist) {
if (audit_compare_dname_path(dname, owatch->path,
AUDIT_NAME_FULL))
continue;
/* If the update involves invalidating rules, do the inode-based
* filtering now, so we don't omit records. */
if (invalidating && !audit_dummy_context())
audit_filter_inodes(current, audit_context());
/* updating ino will likely change which audit_hash_list we
* are on so we need a new watch for the new list */
nwatch = audit_dupe_watch(owatch);
if (IS_ERR(nwatch)) {
mutex_unlock(&audit_filter_mutex);
audit_panic("error updating watch, skipping");
return;
}
nwatch->dev = dev;
nwatch->ino = ino;
list_for_each_entry_safe(r, nextr, &owatch->rules, rlist) {
oentry = container_of(r, struct audit_entry, rule);
list_del(&oentry->rule.rlist);
list_del_rcu(&oentry->list);
nentry = audit_dupe_rule(&oentry->rule);
if (IS_ERR(nentry)) {
list_del(&oentry->rule.list);
audit_panic("error updating watch, removing");
} else {
int h = audit_hash_ino((u32)ino);
/*
* nentry->rule.watch == oentry->rule.watch so
* we must drop that reference and set it to our
* new watch.
*/
audit_put_watch(nentry->rule.watch);
audit_get_watch(nwatch);
nentry->rule.watch = nwatch;
list_add(&nentry->rule.rlist, &nwatch->rules);
list_add_rcu(&nentry->list, &audit_inode_hash[h]);
list_replace(&oentry->rule.list,
&nentry->rule.list);
}
if (oentry->rule.exe)
audit_remove_mark(oentry->rule.exe);
call_rcu(&oentry->rcu, audit_free_rule_rcu);
}
audit_remove_watch(owatch);
goto add_watch_to_parent; /* event applies to a single watch */
}
mutex_unlock(&audit_filter_mutex);
return;
add_watch_to_parent:
list_add(&nwatch->wlist, &parent->watches);
mutex_unlock(&audit_filter_mutex);
return;
}
/* Remove all watches & rules associated with a parent that is going away. */
static void audit_remove_parent_watches(struct audit_parent *parent)
{
struct audit_watch *w, *nextw;
struct audit_krule *r, *nextr;
struct audit_entry *e;
mutex_lock(&audit_filter_mutex);
list_for_each_entry_safe(w, nextw, &parent->watches, wlist) {
list_for_each_entry_safe(r, nextr, &w->rules, rlist) {
e = container_of(r, struct audit_entry, rule);
audit_watch_log_rule_change(r, w, "remove_rule");
if (e->rule.exe)
audit_remove_mark(e->rule.exe);
list_del(&r->rlist);
list_del(&r->list);
list_del_rcu(&e->list);
call_rcu(&e->rcu, audit_free_rule_rcu);
}
audit_remove_watch(w);
}
mutex_unlock(&audit_filter_mutex);
fsnotify_destroy_mark(&parent->mark, audit_watch_group);
}
/* Get path information necessary for adding watches. */
static int audit_get_nd(struct audit_watch *watch, struct path *parent)
{
struct dentry *d;
d = kern_path_parent(watch->path, parent);
if (IS_ERR(d))
return PTR_ERR(d);
if (d_is_positive(d)) {
/* update watch filter fields */
watch->dev = d->d_sb->s_dev;
watch->ino = d_backing_inode(d)->i_ino;
}
dput(d);
return 0;
}
/* Associate the given rule with an existing parent.
* Caller must hold audit_filter_mutex. */
static void audit_add_to_parent(struct audit_krule *krule,
struct audit_parent *parent)
{
struct audit_watch *w, *watch = krule->watch;
int watch_found = 0;
BUG_ON(!mutex_is_locked(&audit_filter_mutex));
list_for_each_entry(w, &parent->watches, wlist) {
if (strcmp(watch->path, w->path))
continue;
watch_found = 1;
/* put krule's ref to temporary watch */
audit_put_watch(watch);
audit_get_watch(w);
krule->watch = watch = w;
audit_put_parent(parent);
break;
}
if (!watch_found) {
watch->parent = parent;
audit_get_watch(watch);
list_add(&watch->wlist, &parent->watches);
}
list_add(&krule->rlist, &watch->rules);
}
/* Find a matching watch entry, or add this one.
* Caller must hold audit_filter_mutex. */
int audit_add_watch(struct audit_krule *krule, struct list_head **list)
{
struct audit_watch *watch = krule->watch;
struct audit_parent *parent;
struct path parent_path;
int h, ret = 0;
/*
* When we will be calling audit_add_to_parent, krule->watch might have
* been updated and watch might have been freed.
* So we need to keep a reference of watch.
*/
audit_get_watch(watch);
mutex_unlock(&audit_filter_mutex);
/* Avoid calling path_lookup under audit_filter_mutex. */
ret = audit_get_nd(watch, &parent_path);
/* caller expects mutex locked */
mutex_lock(&audit_filter_mutex);
if (ret) {
audit_put_watch(watch);
return ret;
}
/* either find an old parent or attach a new one */
parent = audit_find_parent(d_backing_inode(parent_path.dentry));
if (!parent) {
parent = audit_init_parent(&parent_path);
if (IS_ERR(parent)) {
ret = PTR_ERR(parent);
goto error;
}
}
audit_add_to_parent(krule, parent);
h = audit_hash_ino((u32)watch->ino);
*list = &audit_inode_hash[h];
error:
path_put(&parent_path);
audit_put_watch(watch);
return ret;
}
void audit_remove_watch_rule(struct audit_krule *krule)
{
struct audit_watch *watch = krule->watch;
struct audit_parent *parent = watch->parent;
list_del(&krule->rlist);
if (list_empty(&watch->rules)) {
/*
* audit_remove_watch() drops our reference to 'parent' which
* can get freed. Grab our own reference to be safe.
*/
audit_get_parent(parent);
audit_remove_watch(watch);
if (list_empty(&parent->watches))
fsnotify_destroy_mark(&parent->mark, audit_watch_group);
audit_put_parent(parent);
}
}
/* Update watch data in audit rules based on fsnotify events. */
static int audit_watch_handle_event(struct fsnotify_mark *inode_mark, u32 mask,
struct inode *inode, struct inode *dir,
const struct qstr *dname, u32 cookie)
{
struct audit_parent *parent;
parent = container_of(inode_mark, struct audit_parent, mark);
if (WARN_ON_ONCE(inode_mark->group != audit_watch_group))
return 0;
if (mask & (FS_CREATE|FS_MOVED_TO) && inode)
audit_update_watch(parent, dname, inode->i_sb->s_dev, inode->i_ino, 0);
else if (mask & (FS_DELETE|FS_MOVED_FROM))
audit_update_watch(parent, dname, AUDIT_DEV_UNSET, AUDIT_INO_UNSET, 1);
else if (mask & (FS_DELETE_SELF|FS_UNMOUNT|FS_MOVE_SELF))
audit_remove_parent_watches(parent);
return 0;
}
static const struct fsnotify_ops audit_watch_fsnotify_ops = {
.handle_inode_event = audit_watch_handle_event,
.free_mark = audit_watch_free_mark,
};
static int __init audit_watch_init(void)
{
audit_watch_group = fsnotify_alloc_group(&audit_watch_fsnotify_ops, 0);
if (IS_ERR(audit_watch_group)) {
audit_watch_group = NULL;
audit_panic("cannot create audit fsnotify group");
}
return 0;
}
device_initcall(audit_watch_init);
int audit_dupe_exe(struct audit_krule *new, struct audit_krule *old)
{
struct audit_fsnotify_mark *audit_mark;
char *pathname;
pathname = kstrdup(audit_mark_path(old->exe), GFP_KERNEL);
if (!pathname)
return -ENOMEM;
audit_mark = audit_alloc_mark(new, pathname, strlen(pathname));
if (IS_ERR(audit_mark)) {
kfree(pathname);
return PTR_ERR(audit_mark);
}
new->exe = audit_mark;
return 0;
}
int audit_exe_compare(struct task_struct *tsk, struct audit_fsnotify_mark *mark)
{
struct file *exe_file;
unsigned long ino;
dev_t dev;
/* only do exe filtering if we are recording @current events/records */
if (tsk != current)
return 0;
if (!current->mm)
return 0;
exe_file = get_mm_exe_file(current->mm);
if (!exe_file)
return 0;
ino = file_inode(exe_file)->i_ino;
dev = file_inode(exe_file)->i_sb->s_dev;
fput(exe_file);
return audit_mark_compare(mark, ino, dev);
}
|
c
|
github
|
https://github.com/torvalds/linux
|
kernel/audit_watch.c
|
from tkinter import * # 导入 Tkinter 库
import multiwords
root = Tk()
root.resizable(False,False)
root.title("统计对比多个词语")
#KCC基本分析组件
#该组件用于列举出包含指定词语的帖子/回帖
##插件信息定义
KCC_PLUGIN_NAME="multiwordS"
KCC_PLUGIN_DESCRIPTION="用来统计多个词语的频率并以条形统计图显示"
KCC_PLUGIN_COPYRIGHT="kanch"
##定义结束
def btnclick():
root.update()
wordlist = wordentry.get()
print("word=",wordlist)
multiwords.compareMultiWords(wordlist)
def centerWindow(rt):
rt.update() # update window ,must do
curWidth = rt.winfo_reqwidth() # get current width
curHeight = rt.winfo_height() # get current height
scnWidth,scnHeight = rt.maxsize() # get screen width and height
tmpcnf = '%dx%d+%d+%d'%(curWidth,curHeight,
(scnWidth-curWidth)/2,(scnHeight-curHeight)/2)
rt.geometry(tmpcnf)
return rt
data = StringVar(root)
scale = IntVar(root)
Label(root,text="KCC数据分析模块 - 基本分析套件\n该模块用于显示指定词语的时间频率关系图",width=35,height=5).pack()
Label(root,text="请输入要分析的词语(用空格隔开):",width=25,height=2).pack()
wordentry = Entry(root,text="请输入内容",width=25,textvariable=data)
wordentry.pack(ipadx=4,ipady=4)
Button(root, text="显示结果", width=15,relief=GROOVE,command=btnclick).pack(pady=16,ipadx=8,ipady=8)
root = centerWindow(root)
root.mainloop() # 进入消息循环
|
unknown
|
codeparrot/codeparrot-clean
| ||
from cwrap import BaseCClass
from ert.enkf import EnkfPrototype
from ert.util import CTime
class EnsemblePlotDataVector(BaseCClass):
TYPE_NAME = "ensemble_plot_data_vector"
_size = EnkfPrototype("int enkf_plot_tvector_size(ensemble_plot_data_vector)")
_get_value = EnkfPrototype("double enkf_plot_tvector_iget_value(ensemble_plot_data_vector, int)")
_get_time = EnkfPrototype("time_t enkf_plot_tvector_iget_time(ensemble_plot_data_vector, int)")
_is_active = EnkfPrototype("bool enkf_plot_tvector_iget_active(ensemble_plot_data_vector, int)")
def __init__(self):
raise NotImplementedError("Class can not be instantiated directly!")
def __len__(self):
""" @rtype: int """
return self._size()
def getValue(self, index):
""" @rtype: float """
return self._get_value(index)
def getTime(self, index):
""" @rtype: CTime """
return self._get_time(index)
def isActive(self, index):
""" @rtype: bool """
return self._is_active(index)
def __repr__(self):
return 'EnsemblePlotDataVector(size = %d) %s' % (len(self), self._ad_str())
|
unknown
|
codeparrot/codeparrot-clean
| ||
from __future__ import absolute_import
import six
from django.conf import settings
from django.utils.translation import ugettext as _
from sentry.utils.html import escape
from sentry.utils.imports import import_string
def iter_interfaces():
rv = {}
for name, import_path in six.iteritems(settings.SENTRY_INTERFACES):
rv.setdefault(import_path, []).append(name)
for import_path, keys in six.iteritems(rv):
iface = import_string(import_path)
yield iface, keys
def get_interface(name):
try:
import_path = settings.SENTRY_INTERFACES[name]
except KeyError:
raise ValueError('Invalid interface name: %s' % (name,))
try:
interface = import_string(import_path)
except Exception:
raise ValueError('Unable to load interface: %s' % (name,))
return interface
class InterfaceValidationError(Exception):
pass
class Interface(object):
"""
An interface is a structured representation of data, which may
render differently than the default ``extra`` metadata in an event.
"""
_data = None
score = 0
display_score = None
ephemeral = False
def __init__(self, **data):
self._data = data or {}
def __eq__(self, other):
if type(self) != type(other):
return False
return self._data == other._data
def __getstate__(self):
return {'_data': self._data}
def __setstate__(self, state):
self.__dict__.update(state)
if not hasattr(self, '_data'):
self._data = {}
def __getattr__(self, name):
return self._data[name]
def __setattr__(self, name, value):
if name == '_data':
self.__dict__['_data'] = value
else:
self._data[name] = value
@classmethod
def to_python(cls, data):
return cls(data)
def get_api_context(self, is_public=False):
return self.to_json()
def to_json(self):
# eliminate empty values for serialization to compress the keyspace
# and save (seriously) ridiculous amounts of bytes
# XXX(dcramer): its important that we keep zero values here, but empty
# lists and strings get discarded as we've deemed them not important
return dict(
(k, v) for k, v in six.iteritems(self._data) if (v == 0 or v)
)
def get_path(self):
cls = type(self)
return '%s.%s' % (cls.__module__, cls.__name__)
def get_alias(self):
return self.get_slug()
def get_hash(self):
return []
def compute_hashes(self, platform):
result = self.get_hash()
if not result:
return []
return [result]
def get_slug(self):
return type(self).__name__.lower()
def get_title(self):
return _(type(self).__name__)
def get_display_score(self):
return self.display_score or self.score
def get_score(self):
return self.score
def iter_tags(self):
return iter(())
def to_string(self, event, is_public=False, **kwargs):
return ''
def to_email_html(self, event, **kwargs):
body = self.to_string(event)
if not body:
return ''
return '<pre>%s</pre>' % (escape(body),)
|
unknown
|
codeparrot/codeparrot-clean
| ||
from typing import cast
from langchain_core.load import dumpd, load
from langchain_core.messages import AIMessage, AIMessageChunk
from langchain_core.messages import content as types
from langchain_core.messages.ai import (
InputTokenDetails,
OutputTokenDetails,
UsageMetadata,
add_ai_message_chunks,
add_usage,
subtract_usage,
)
from langchain_core.messages.tool import invalid_tool_call as create_invalid_tool_call
from langchain_core.messages.tool import tool_call as create_tool_call
from langchain_core.messages.tool import tool_call_chunk as create_tool_call_chunk
def test_serdes_message() -> None:
msg = AIMessage(
content=[{"text": "blah", "type": "text"}],
tool_calls=[create_tool_call(name="foo", args={"bar": 1}, id="baz")],
invalid_tool_calls=[
create_invalid_tool_call(name="foobad", args="blah", id="booz", error="bad")
],
)
expected = {
"lc": 1,
"type": "constructor",
"id": ["langchain", "schema", "messages", "AIMessage"],
"kwargs": {
"type": "ai",
"content": [{"text": "blah", "type": "text"}],
"tool_calls": [
{"name": "foo", "args": {"bar": 1}, "id": "baz", "type": "tool_call"}
],
"invalid_tool_calls": [
{
"name": "foobad",
"args": "blah",
"id": "booz",
"error": "bad",
"type": "invalid_tool_call",
}
],
},
}
actual = dumpd(msg)
assert actual == expected
assert load(actual, allowed_objects=[AIMessage]) == msg
def test_serdes_message_chunk() -> None:
chunk = AIMessageChunk(
content=[{"text": "blah", "type": "text"}],
tool_call_chunks=[
create_tool_call_chunk(name="foo", args='{"bar": 1}', id="baz", index=0),
create_tool_call_chunk(
name="foobad",
args="blah",
id="booz",
index=1,
),
],
)
expected = {
"lc": 1,
"type": "constructor",
"id": ["langchain", "schema", "messages", "AIMessageChunk"],
"kwargs": {
"type": "AIMessageChunk",
"content": [{"text": "blah", "type": "text"}],
"tool_calls": [
{"name": "foo", "args": {"bar": 1}, "id": "baz", "type": "tool_call"}
],
"invalid_tool_calls": [
{
"name": "foobad",
"args": "blah",
"id": "booz",
"error": None,
"type": "invalid_tool_call",
}
],
"tool_call_chunks": [
{
"name": "foo",
"args": '{"bar": 1}',
"id": "baz",
"index": 0,
"type": "tool_call_chunk",
},
{
"name": "foobad",
"args": "blah",
"id": "booz",
"index": 1,
"type": "tool_call_chunk",
},
],
},
}
actual = dumpd(chunk)
assert actual == expected
assert load(actual, allowed_objects=[AIMessageChunk]) == chunk
def test_add_usage_both_none() -> None:
result = add_usage(None, None)
assert result == UsageMetadata(input_tokens=0, output_tokens=0, total_tokens=0)
def test_add_usage_one_none() -> None:
usage = UsageMetadata(input_tokens=10, output_tokens=20, total_tokens=30)
result = add_usage(usage, None)
assert result == usage
def test_add_usage_both_present() -> None:
usage1 = UsageMetadata(input_tokens=10, output_tokens=20, total_tokens=30)
usage2 = UsageMetadata(input_tokens=5, output_tokens=10, total_tokens=15)
result = add_usage(usage1, usage2)
assert result == UsageMetadata(input_tokens=15, output_tokens=30, total_tokens=45)
def test_add_usage_with_details() -> None:
usage1 = UsageMetadata(
input_tokens=10,
output_tokens=20,
total_tokens=30,
input_token_details=InputTokenDetails(audio=5),
output_token_details=OutputTokenDetails(reasoning=10),
)
usage2 = UsageMetadata(
input_tokens=5,
output_tokens=10,
total_tokens=15,
input_token_details=InputTokenDetails(audio=3),
output_token_details=OutputTokenDetails(reasoning=5),
)
result = add_usage(usage1, usage2)
assert result["input_token_details"]["audio"] == 8
assert result["output_token_details"]["reasoning"] == 15
def test_subtract_usage_both_none() -> None:
result = subtract_usage(None, None)
assert result == UsageMetadata(input_tokens=0, output_tokens=0, total_tokens=0)
def test_subtract_usage_one_none() -> None:
usage = UsageMetadata(input_tokens=10, output_tokens=20, total_tokens=30)
result = subtract_usage(usage, None)
assert result == usage
def test_subtract_usage_both_present() -> None:
usage1 = UsageMetadata(input_tokens=10, output_tokens=20, total_tokens=30)
usage2 = UsageMetadata(input_tokens=5, output_tokens=10, total_tokens=15)
result = subtract_usage(usage1, usage2)
assert result == UsageMetadata(input_tokens=5, output_tokens=10, total_tokens=15)
def test_subtract_usage_with_negative_result() -> None:
usage1 = UsageMetadata(input_tokens=5, output_tokens=10, total_tokens=15)
usage2 = UsageMetadata(input_tokens=10, output_tokens=20, total_tokens=30)
result = subtract_usage(usage1, usage2)
assert result == UsageMetadata(input_tokens=0, output_tokens=0, total_tokens=0)
def test_add_ai_message_chunks_usage() -> None:
chunks = [
AIMessageChunk(content="", usage_metadata=None),
AIMessageChunk(
content="",
usage_metadata=UsageMetadata(
input_tokens=2, output_tokens=3, total_tokens=5
),
),
AIMessageChunk(
content="",
usage_metadata=UsageMetadata(
input_tokens=2,
output_tokens=3,
total_tokens=5,
input_token_details=InputTokenDetails(audio=1, cache_read=1),
output_token_details=OutputTokenDetails(audio=1, reasoning=2),
),
),
]
combined = add_ai_message_chunks(*chunks)
assert combined == AIMessageChunk(
content="",
usage_metadata=UsageMetadata(
input_tokens=4,
output_tokens=6,
total_tokens=10,
input_token_details=InputTokenDetails(audio=1, cache_read=1),
output_token_details=OutputTokenDetails(audio=1, reasoning=2),
),
)
def test_init_tool_calls() -> None:
# Test we add "type" key on init
msg = AIMessage("", tool_calls=[{"name": "foo", "args": {"a": "b"}, "id": "abc"}])
assert len(msg.tool_calls) == 1
assert msg.tool_calls[0]["type"] == "tool_call"
# Test we can assign without adding type key
msg.tool_calls = [{"name": "bar", "args": {"c": "d"}, "id": "def"}]
def test_content_blocks() -> None:
message = AIMessage(
"",
tool_calls=[
{"type": "tool_call", "name": "foo", "args": {"a": "b"}, "id": "abc_123"}
],
)
assert len(message.content_blocks) == 1
assert message.content_blocks[0]["type"] == "tool_call"
assert message.content_blocks == [
{"type": "tool_call", "id": "abc_123", "name": "foo", "args": {"a": "b"}}
]
assert message.content == ""
message = AIMessage(
"foo",
tool_calls=[
{"type": "tool_call", "name": "foo", "args": {"a": "b"}, "id": "abc_123"}
],
)
assert len(message.content_blocks) == 2
assert message.content_blocks[0]["type"] == "text"
assert message.content_blocks[1]["type"] == "tool_call"
assert message.content_blocks == [
{"type": "text", "text": "foo"},
{"type": "tool_call", "id": "abc_123", "name": "foo", "args": {"a": "b"}},
]
assert message.content == "foo"
# With standard blocks
standard_content: list[types.ContentBlock] = [
{"type": "reasoning", "reasoning": "foo"},
{"type": "text", "text": "bar"},
{
"type": "text",
"text": "baz",
"annotations": [{"type": "citation", "url": "http://example.com"}],
},
{
"type": "image",
"url": "http://example.com/image.png",
"extras": {"foo": "bar"},
},
{
"type": "non_standard",
"value": {"custom_key": "custom_value", "another_key": 123},
},
{
"type": "tool_call",
"name": "foo",
"args": {"a": "b"},
"id": "abc_123",
},
]
missing_tool_call: types.ToolCall = {
"type": "tool_call",
"name": "bar",
"args": {"c": "d"},
"id": "abc_234",
}
message = AIMessage(
content_blocks=standard_content,
tool_calls=[
{"type": "tool_call", "name": "foo", "args": {"a": "b"}, "id": "abc_123"},
missing_tool_call,
],
)
assert message.content_blocks == [*standard_content, missing_tool_call]
# Check we auto-populate tool_calls
standard_content = [
{"type": "text", "text": "foo"},
{
"type": "tool_call",
"name": "foo",
"args": {"a": "b"},
"id": "abc_123",
},
missing_tool_call,
]
message = AIMessage(content_blocks=standard_content)
assert message.tool_calls == [
{"type": "tool_call", "name": "foo", "args": {"a": "b"}, "id": "abc_123"},
missing_tool_call,
]
# Chunks
message = AIMessageChunk(
content="",
tool_call_chunks=[
{
"type": "tool_call_chunk",
"name": "foo",
"args": "",
"id": "abc_123",
"index": 0,
}
],
)
assert len(message.content_blocks) == 1
assert message.content_blocks[0]["type"] == "tool_call_chunk"
assert message.content_blocks == [
{
"type": "tool_call_chunk",
"name": "foo",
"args": "",
"id": "abc_123",
"index": 0,
}
]
assert message.content == ""
# Test we parse tool call chunks into tool calls for v1 content
chunk_1 = AIMessageChunk(
content="",
tool_call_chunks=[
{
"type": "tool_call_chunk",
"name": "foo",
"args": '{"foo": "b',
"id": "abc_123",
"index": 0,
}
],
)
chunk_2 = AIMessageChunk(
content="",
tool_call_chunks=[
{
"type": "tool_call_chunk",
"name": "",
"args": 'ar"}',
"id": "abc_123",
"index": 0,
}
],
)
chunk_3 = AIMessageChunk(content="", chunk_position="last")
chunk = chunk_1 + chunk_2 + chunk_3
assert chunk.content == ""
assert chunk.content_blocks == chunk.tool_calls
# test v1 content
chunk_1.content = cast("str | list[str | dict]", chunk_1.content_blocks)
assert len(chunk_1.content) == 1
chunk_1.content[0]["extras"] = {"baz": "qux"} # type: ignore[index]
chunk_1.response_metadata["output_version"] = "v1"
chunk_2.content = cast("str | list[str | dict]", chunk_2.content_blocks)
chunk = chunk_1 + chunk_2 + chunk_3
assert chunk.content == [
{
"type": "tool_call",
"name": "foo",
"args": {"foo": "bar"},
"id": "abc_123",
"extras": {"baz": "qux"},
}
]
# Non-standard
standard_content_1: list[types.ContentBlock] = [
{"type": "non_standard", "index": 0, "value": {"foo": "bar "}}
]
standard_content_2: list[types.ContentBlock] = [
{"type": "non_standard", "index": 0, "value": {"foo": "baz"}}
]
chunk_1 = AIMessageChunk(content=cast("str | list[str | dict]", standard_content_1))
chunk_2 = AIMessageChunk(content=cast("str | list[str | dict]", standard_content_2))
merged_chunk = chunk_1 + chunk_2
assert merged_chunk.content == [
{"type": "non_standard", "index": 0, "value": {"foo": "bar baz"}},
]
# Test server_tool_call_chunks
chunk_1 = AIMessageChunk(
content=[
{
"type": "server_tool_call_chunk",
"index": 0,
"name": "foo",
}
]
)
chunk_2 = AIMessageChunk(
content=[{"type": "server_tool_call_chunk", "index": 0, "args": '{"a'}]
)
chunk_3 = AIMessageChunk(
content=[{"type": "server_tool_call_chunk", "index": 0, "args": '": 1}'}]
)
merged_chunk = chunk_1 + chunk_2 + chunk_3
assert merged_chunk.content == [
{
"type": "server_tool_call_chunk",
"name": "foo",
"index": 0,
"args": '{"a": 1}',
}
]
full_chunk = merged_chunk + AIMessageChunk(
content=[], chunk_position="last", response_metadata={"output_version": "v1"}
)
assert full_chunk.content == [
{"type": "server_tool_call", "name": "foo", "index": 0, "args": {"a": 1}}
]
# Test non-standard + non-standard
chunk_1 = AIMessageChunk(
content=[
{
"type": "non_standard",
"index": 0,
"value": {"type": "non_standard_tool", "foo": "bar"},
}
]
)
chunk_2 = AIMessageChunk(
content=[
{
"type": "non_standard",
"index": 0,
"value": {"type": "input_json_delta", "partial_json": "a"},
}
]
)
chunk_3 = AIMessageChunk(
content=[
{
"type": "non_standard",
"index": 0,
"value": {"type": "input_json_delta", "partial_json": "b"},
}
]
)
merged_chunk = chunk_1 + chunk_2 + chunk_3
assert merged_chunk.content == [
{
"type": "non_standard",
"index": 0,
"value": {"type": "non_standard_tool", "foo": "bar", "partial_json": "ab"},
}
]
# Test standard + non-standard with same index
standard_content_1 = [
{
"type": "server_tool_call",
"name": "web_search",
"id": "ws_123",
"args": {"query": "web query"},
"index": 0,
}
]
standard_content_2 = [{"type": "non_standard", "value": {"foo": "bar"}, "index": 0}]
chunk_1 = AIMessageChunk(content=cast("str | list[str | dict]", standard_content_1))
chunk_2 = AIMessageChunk(content=cast("str | list[str | dict]", standard_content_2))
merged_chunk = chunk_1 + chunk_2
assert merged_chunk.content == [
{
"type": "server_tool_call",
"name": "web_search",
"id": "ws_123",
"args": {"query": "web query"},
"index": 0,
"extras": {"foo": "bar"},
}
]
def test_content_blocks_reasoning_extraction() -> None:
"""Test best-effort reasoning extraction from `additional_kwargs`."""
message = AIMessage(
content="The answer is 42.",
additional_kwargs={"reasoning_content": "Let me think about this problem..."},
)
content_blocks = message.content_blocks
assert len(content_blocks) == 2
assert content_blocks[0]["type"] == "reasoning"
assert content_blocks[0].get("reasoning") == "Let me think about this problem..."
assert content_blocks[1]["type"] == "text"
assert content_blocks[1]["text"] == "The answer is 42."
# Test no reasoning extraction when no reasoning content
message = AIMessage(
content="The answer is 42.", additional_kwargs={"other_field": "some value"}
)
content_blocks = message.content_blocks
assert len(content_blocks) == 1
assert content_blocks[0]["type"] == "text"
|
python
|
github
|
https://github.com/langchain-ai/langchain
|
libs/core/tests/unit_tests/messages/test_ai.py
|
# Managing async data with signals using the Resources API
Now that you've learned [how to derive state with linked signals](/tutorials/signals/3-deriving-state-with-linked-signals), let's explore how to handle asynchronous data with the Resource API. The Resource API provides a powerful way to manage async operations using signals, with built-in loading states, error handling, and request management.
In this activity, you'll learn how to use the `resource()` function to load data asynchronously and how to handle different states of async operations by building a user profile loader that demonstrates the Resource API in action.
<hr />
<docs-workflow>
<docs-step title="Import resource function and API">
Add `resource` to your existing imports and import the fake API function.
```ts
// Add resource to existing imports
import {Component, signal, computed, resource, ChangeDetectionStrategy} from '@angular/core';
// Import mock API function
import {getUserData} from './user-api';
```
</docs-step>
<docs-step title="Create a resource for user data">
Add a property in the component class that creates a resource to load user data based on a user ID signal.
```ts
userId = signal(1);
userResource = resource({
params: () => ({id: this.userId()}),
loader: (params) => getUserData(params.params.id),
});
```
</docs-step>
<docs-step title="Add methods to interact with the resource">
Add methods to change the user ID and reload the resource.
```ts
loadUser(id: number) {
this.userId.set(id);
}
reloadUser() {
this.userResource.reload();
}
```
Changing the params signal automatically triggers a reload, or you can manually reload with `reload()`.
</docs-step>
<docs-step title="Create computed signals for resource states">
Add computed signals to access different states of the resource.
```ts
isLoading = computed(() => this.userResource.status() === 'loading');
hasError = computed(() => this.userResource.status() === 'error');
```
Resources provide a `status()` signal that can be 'loading', 'success', or 'error', a `value()` signal for the loaded data, and a `hasValue()` method that safely checks if data is available.
</docs-step>
<docs-step title="Wire up the buttons and display resource states">
The template structure is already provided. Now connect everything:
Part 1. **Add click handlers to the buttons:**
```html
<button (click)="loadUser(1)">Load User 1</button>
<button (click)="loadUser(2)">Load User 2</button>
<button (click)="loadUser(999)">Load Invalid User</button>
<button (click)="reloadUser()">Reload</button>
```
Part 2. **Replace the placeholder with resource state handling:**
```angular-html
@if (isLoading()) {
<p>Loading user...</p>
} @else if (hasError()) {
<p class="error">Error: {{ userResource.error()?.message }}</p>
} @else if (userResource.hasValue()) {
<div class="user-info">
<h3>{{ userResource.value().name }}</h3>
<p>{{ userResource.value().email }}</p>
</div>
}
```
The resource provides different methods to check its state:
- `isLoading()` - true when fetching data
- `hasError()` - true when an error occurred
- `userResource.hasValue()` - true when data is available
- `userResource.value()` - access the loaded data
- `userResource.error()` - access error information
</docs-step>
</docs-workflow>
Excellent! You've now learned how to use the Resource API with signals. Key concepts to remember:
- **Resources are reactive**: They automatically reload when params change
- **Built-in state management**: Resources provide `status()`, `value()`, and `error()` signals
- **Automatic cleanup**: Resources handle request cancellation and cleanup automatically
- **Manual control**: You can manually reload or abort requests when needed
In the next lesson, you'll learn [how to pass data to components with input signals](/tutorials/signals/5-component-communication-with-signals)!
|
unknown
|
github
|
https://github.com/angular/angular
|
adev/src/content/tutorials/signals/steps/4-managing-async-data-with-signals/README.md
|
import os
import re
def main():
fileIN = open('eg_states.h', 'r')
line = fileIN.readline()
next_is_reg = False
count = 0
print "/* This file is autogenerated from eg_states.h - do not edit directly */"
print "/* autogenerating script is gen_eg_states.py */"
print ""
while line:
if line[0:2] == "};":
if next_is_reg == True:
print "#define " + name + "_SIZE\t\t", count
print "#define " + name + "_PM4 128\t\t"
next_is_reg = False
count = 0
print ""
if line[0:6] == "static":
name = line.rstrip("\n")
cline = name.split()
name = cline[4].split('[')
name = name[0].replace("_names", "")
print "/* " + name + " */"
next_is_reg = True
elif next_is_reg == True:
reg = line.split();
reg = reg[3].replace("},", "")
reg = reg.replace("\"", "")
print "#define " + name + "__" + reg + "\t\t", count
count = count + 1
line = fileIN.readline()
if __name__ == "__main__":
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
import nipy
import pandas as pd
import numpy as np
import argparse
import os
from collections import OrderedDict
import sys
sys.path.append( os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) )
from config import *
parser = argparse.ArgumentParser(description='Script to create summary of VBM experiment.'
'Summary table:'
'Brain Region,Region size (#voxels),min p-value,# negative voxels,# positive voxels ')
parser.add_argument("-p",required=True, type=str, help="p-map image (with real p-values, not (1-p_value)!)")
parser.add_argument("-t",required=True, type=str, help="t-map or b-map image")
parser.add_argument("-a", type=str,default='Hammer',choices=['Hammer','FreeSurfer','Tracts'], help="Atlas name")
parser.add_argument("-o",required=True, type=str, help="output folder")
parser.add_argument("-n",required=True, type=str, help="result table name")
parser.add_argument("-th",required=True, type=float, help="p-value threshold")
parser.add_argument("-mask",default=None, type=str, help="path mask image")
parser.add_argument("-atlas",default=None, type=str, help="path atlas image")
parser.add_argument("-tract_th", type=float, default=0.0, help='tracts threshold for minimum probability to include voxel')
args = parser.parse_args()
print args
def get_atlas(atlas_name,atlas_path):
Atlas={}
Atlas['mask']={}
Atlas['regions']={}
if atlas_path is None:
Atlas_path=ATLAS[atlas_name]
else:
Atlas_path = atlas_path
Atlas_table=INFO_TABLE[atlas_name]
A=nipy.load_image(Atlas_path)
Table=pd.read_csv(Atlas_table, sep=',', header=None)
if atlas_name!='Tracts':
u=np.unique(A._data)
for j,i in enumerate(Table[0]):
if i in u:
Atlas['regions'][i]=Table[1][j]
Atlas['mask'][i]=np.where(A._data==i)
return Atlas
else:
Atlas['mask']=A
Atlas['regions']=Table[1].tolist()
return Atlas
if __name__=="__main__":
if args.th<=0 or args.th>=1:
raise ValueError('Threshold should be 0 < threshold < 1, not {}'.format(args.th))
results=OrderedDict()
Atlas=get_atlas(args.a, args.atlas)
P=nipy.load_image(args.p)
if args.mask is not None:
M=nipy.load_image(args.mask)
P._data[M._data==0]=1
results['Brain Region']=[]
results['Region size (#voxels)']=[]
results['min p-value']=[]
results['# negative voxels']=[]
results['# positive voxels']=[]
mask=np.where(P._data>args.th)
P._data[mask]=1
P._data[P._data==0]=1 #TODO change:check with mask, if zero outside, then it is significant
T_neg=nipy.load_image(args.t)
T_pos=nipy.load_image(args.t)
T_neg._data[mask]=0
T_pos._data[mask]=0
T_pos[T_pos._data<0]=0
T_neg[T_neg._data>0]=0
if args.a!='Tracts':
for k in Atlas['mask']:
results['Brain Region'].append(Atlas['regions'][k])
results['Region size (#voxels)'].append( len(Atlas['mask'][k][0]) )
results['min p-value'].append(np.min( P._data[Atlas['mask'][k]] ))
results['# negative voxels'].append(len( np.where(T_neg._data[Atlas['mask'][k]]!=0)[0] ))
results['# positive voxels'].append(len( np.where(T_pos._data[Atlas['mask'][k]]!=0)[0] ))
else:
for j,i in enumerate(Atlas['regions']):
results['Brain Region'].append(i)
tract=Atlas['mask'][:,:,:,j+1] #Tract atlas starts from 0 dim with no info
#print i, tract.shape, args.tract_th
tract_mask=np.where(tract._data>args.tract_th)
#print tract_mask[0]
results['Region size (#voxels)'].append( len(tract_mask[0]) )
results['min p-value'].append(np.min(P._data[tract_mask]))
results['# negative voxels'].append(len(np.where(T_neg._data[tract_mask] != 0)[0]))
results['# positive voxels'].append(len(np.where(T_pos._data[tract_mask] != 0)[0]))
df=pd.DataFrame.from_dict(results)
df.to_csv(os.path.join(args.o,args.n), sep=',', index=False)
|
unknown
|
codeparrot/codeparrot-clean
| ||
from __future__ import absolute_import
from mock import patch
from django.test import TestCase
from track.backends.mongodb import MongoBackend
class TestMongoBackend(TestCase):
def setUp(self):
super(TestMongoBackend, self).setUp()
self.mongo_patcher = patch('track.backends.mongodb.MongoClient')
self.mongo_patcher.start()
self.addCleanup(self.mongo_patcher.stop)
self.backend = MongoBackend()
def test_mongo_backend(self):
events = [{'test': 1}, {'test': 2}]
self.backend.send(events[0])
self.backend.send(events[1])
# Check if we inserted events into the database
calls = self.backend.collection.insert.mock_calls
self.assertEqual(len(calls), 2)
# Unpack the arguments and check if the events were used
# as the first argument to collection.insert
def first_argument(call):
_, args, _ = call
return args[0]
self.assertEqual(events[0], first_argument(calls[0]))
self.assertEqual(events[1], first_argument(calls[1]))
|
unknown
|
codeparrot/codeparrot-clean
| ||
import antlr3
import testbase
import unittest
class t011lexer(testbase.ANTLRTest):
def setUp(self):
self.compileGrammar()
def lexerClass(self, base):
class TLexer(base):
def emitErrorMessage(self, msg):
# report errors to /dev/null
pass
def reportError(self, re):
# no error recovery yet, just crash!
raise re
return TLexer
def testValid(self):
stream = antlr3.StringStream('foobar _Ab98 \n A12sdf')
lexer = self.getLexer(stream)
token = lexer.nextToken()
assert token.type == self.lexerModule.IDENTIFIER
assert token.start == 0, token.start
assert token.stop == 5, token.stop
assert token.text == 'foobar', token.text
token = lexer.nextToken()
assert token.type == self.lexerModule.WS
assert token.start == 6, token.start
assert token.stop == 6, token.stop
assert token.text == ' ', token.text
token = lexer.nextToken()
assert token.type == self.lexerModule.IDENTIFIER
assert token.start == 7, token.start
assert token.stop == 11, token.stop
assert token.text == '_Ab98', token.text
token = lexer.nextToken()
assert token.type == self.lexerModule.WS
assert token.start == 12, token.start
assert token.stop == 14, token.stop
assert token.text == ' \n ', token.text
token = lexer.nextToken()
assert token.type == self.lexerModule.IDENTIFIER
assert token.start == 15, token.start
assert token.stop == 20, token.stop
assert token.text == 'A12sdf', token.text
token = lexer.nextToken()
assert token.type == self.lexerModule.EOF
def testMalformedInput(self):
stream = antlr3.StringStream('a-b')
lexer = self.getLexer(stream)
lexer.nextToken()
try:
token = lexer.nextToken()
raise AssertionError, token
except antlr3.NoViableAltException, exc:
assert exc.unexpectedType == '-', repr(exc.unexpectedType)
assert exc.charPositionInLine == 1, repr(exc.charPositionInLine)
assert exc.line == 1, repr(exc.line)
if __name__ == '__main__':
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
import torch
import numpy as np
from models import SSD300
weights_path = '../trained_models/vgg16_reducedfc.pth'
vgg_weights = torch.load(weights_path)
biases = []
weights = []
for name, torch_weights in vgg_weights.items():
print(name)
if 'bias' in name:
biases.append(torch_weights.numpy())
if 'weight' in name:
print(torch_weights.numpy().shape)
conv_weights = torch_weights.numpy()
conv_weights = np.rollaxis(conv_weights, 0, 4)
conv_weights = np.rollaxis(conv_weights, 0, 3)
weights.append(conv_weights)
vgg_weights = list(zip(weights, biases))
base_model = SSD300(return_base=True)
pytorch_layer_arg = 0
for layer in base_model.layers[1:]:
conv_weights = vgg_weights[pytorch_layer_arg][0]
bias_weights = vgg_weights[pytorch_layer_arg][1]
if ('conv2d' in layer.name) or ('branch_2' in layer.name):
print(layer.name)
print('pre-trained_weigths:', conv_weights.shape)
print('model weights:', layer.get_weights()[0].shape)
layer.set_weights([conv_weights, bias_weights])
pytorch_layer_arg = pytorch_layer_arg + 1
base_model.save_weights('../trained_models/VGG16_weights.hdf5')
|
unknown
|
codeparrot/codeparrot-clean
| ||
from datetime import datetime
from operator import attrgetter
from django.test import TestCase
from .models import (
CustomMembership, Employee, Event, Friendship, Group, Ingredient,
Invitation, Membership, Person, PersonSelfRefM2M, Recipe, RecipeIngredient,
Relationship,
)
class M2mThroughTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name='Bob')
cls.jim = Person.objects.create(name='Jim')
cls.jane = Person.objects.create(name='Jane')
cls.rock = Group.objects.create(name='Rock')
cls.roll = Group.objects.create(name='Roll')
def test_retrieve_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
expected = ['Jane', 'Jim']
self.assertQuerysetEqual(
self.rock.members.all(),
expected,
attrgetter("name")
)
def test_get_on_intermediate_model(self):
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.get(person=self.jane, group=self.rock)
self.assertEqual(
repr(queryset),
'<Membership: Jane is a member of Rock>'
)
def test_filter_on_intermediate_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.filter(group=self.rock)
expected = [
'<Membership: Jim is a member of Rock>',
'<Membership: Jane is a member of Rock>',
]
self.assertQuerysetEqual(
queryset,
expected
)
def test_cannot_use_add_on_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.add(self.bob)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.create(name='Annie')
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_remove_on_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.remove(self.jim)
self.assertQuerysetEqual(
self.rock.members.all(),
['Jim', ],
attrgetter("name")
)
def test_cannot_use_setattr_on_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Person.objects.filter(name__in=['Bob', 'Jim']))
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.set(members)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_clear_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
self.rock.members.clear()
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_retrieve_reverse_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
expected = ['Rock', 'Roll']
self.assertQuerysetEqual(
self.jim.group_set.all(),
expected,
attrgetter("name")
)
def test_cannot_use_add_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.add(self.bob)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.create(name='Funk')
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_remove_on_reverse_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.bob, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.remove(self.rock)
self.assertQuerysetEqual(
self.bob.group_set.all(),
['Rock', ],
attrgetter('name')
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Group.objects.filter(name__in=['Rock', 'Roll']))
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.set(members)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_clear_on_reverse_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
self.jim.group_set.clear()
self.assertQuerysetEqual(
self.jim.group_set.all(),
[]
)
def test_query_model_by_attribute_name_of_related_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Group.objects.filter(members__name='Bob'),
['Roll', ],
attrgetter("name")
)
def test_order_by_relational_field_through_model(self):
CustomMembership.objects.create(person=self.jim, group=self.rock)
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jane, group=self.roll)
CustomMembership.objects.create(person=self.jim, group=self.roll)
self.assertSequenceEqual(
self.rock.custom_members.order_by('custom_person_related_name'),
[self.jim, self.bob]
)
self.assertSequenceEqual(
self.roll.custom_members.order_by('custom_person_related_name'),
[self.jane, self.jim]
)
def test_query_first_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Group.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Roll'],
attrgetter("name")
)
def test_query_second_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Person.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Jane'],
attrgetter("name")
)
def test_query_model_by_related_model_name(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Person.objects.filter(group__name="Rock"),
['Jane', 'Jim'],
attrgetter("name")
)
def test_query_model_by_custom_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
Person.objects.filter(custom__name="Rock"),
['Bob', 'Jim'],
attrgetter("name")
)
def test_query_model_by_intermediate_can_return_non_unique_queryset(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(
person=self.jane, group=self.rock,
date_joined=datetime(2006, 1, 1)
)
Membership.objects.create(
person=self.bob, group=self.roll,
date_joined=datetime(2004, 1, 1))
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(
person=self.jane, group=self.roll,
date_joined=datetime(2004, 1, 1))
qs = Person.objects.filter(
membership__date_joined__gt=datetime(2004, 1, 1)
)
self.assertQuerysetEqual(
qs,
['Jane', 'Jim', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_forward_empty_qs(self):
self.assertQuerysetEqual(
self.rock.custom_members.all(),
[]
)
def test_custom_related_name_reverse_empty_qs(self):
self.assertQuerysetEqual(
self.bob.custom.all(),
[]
)
def test_custom_related_name_forward_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.rock.custom_members.all(),
['Bob', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_reverse_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom.all(),
['Rock'],
attrgetter("name")
)
def test_custom_related_name_doesnt_conflict_with_fky_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom_person_related_name.all(),
['<CustomMembership: Bob is a member of Rock>']
)
def test_through_fields(self):
"""
Relations with intermediary tables with multiple FKs
to the M2M's ``to`` model are possible.
"""
event = Event.objects.create(title='Rockwhale 2014')
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jim)
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jane)
self.assertQuerysetEqual(
event.invitees.all(),
['Jane', 'Jim'],
attrgetter('name')
)
class M2mThroughReferentialTests(TestCase):
def test_self_referential_empty_qs(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
self.assertQuerysetEqual(
tony.friends.all(),
[]
)
def test_self_referential_non_symmetrical_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_non_symmetrical_second_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
def test_self_referential_non_symmetrical_clear_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
chris.friends.clear()
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
# Since this isn't a symmetrical relation, Tony's friend link still exists.
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_symmetrical(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
Friendship.objects.create(
first=chris, second=tony, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
self.assertQuerysetEqual(
chris.friends.all(),
['Tony'],
attrgetter("name")
)
def test_through_fields_self_referential(self):
john = Employee.objects.create(name='john')
peter = Employee.objects.create(name='peter')
mary = Employee.objects.create(name='mary')
harry = Employee.objects.create(name='harry')
Relationship.objects.create(source=john, target=peter, another=None)
Relationship.objects.create(source=john, target=mary, another=None)
Relationship.objects.create(source=john, target=harry, another=peter)
self.assertQuerysetEqual(
john.subordinates.all(),
['peter', 'mary', 'harry'],
attrgetter('name')
)
class M2mThroughToFieldsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.pea = Ingredient.objects.create(iname='pea')
cls.potato = Ingredient.objects.create(iname='potato')
cls.tomato = Ingredient.objects.create(iname='tomato')
cls.curry = Recipe.objects.create(rname='curry')
RecipeIngredient.objects.create(recipe=cls.curry, ingredient=cls.potato)
RecipeIngredient.objects.create(recipe=cls.curry, ingredient=cls.pea)
RecipeIngredient.objects.create(recipe=cls.curry, ingredient=cls.tomato)
def test_retrieval(self):
# Forward retrieval
self.assertSequenceEqual(self.curry.ingredients.all(), [self.pea, self.potato, self.tomato])
# Backward retrieval
self.assertEqual(self.tomato.recipes.get(), self.curry)
def test_choices(self):
field = Recipe._meta.get_field('ingredients')
self.assertEqual(
[choice[0] for choice in field.get_choices(include_blank=False)],
['pea', 'potato', 'tomato']
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import
import os
import sys
import logging
from six.moves import map
log = logging.getLogger("main")
from ..master_task import AlgCleanerTask
from ..master_job import Job
from ..utils import SeqGroup, GLOBALS, hascontent, DATATYPES, pjoin
from .. import db
__all__ = ["Trimal"]
class Trimal(AlgCleanerTask):
def __init__(self, nodeid, seqtype, alg_fasta_file, alg_phylip_file,
conf, confname):
GLOBALS["citator"].add('trimal')
self.confname = confname
self.conf = conf
self.seqtype = seqtype
self.alg_fasta_file = alg_fasta_file
self.alg_phylip_file = alg_phylip_file
base_args = {
'-in': None,
'-out': None,
'-fasta': "",
'-colnumbering': "",
}
# Initialize task
AlgCleanerTask.__init__(self, nodeid, "acleaner", "Trimal",
base_args,
self.conf[confname])
self.init()
def load_jobs(self):
appname = self.conf[self.confname]["_app"]
args = self.args.copy()
args["-in"] = pjoin(GLOBALS["input_dir"], self.alg_fasta_file)
args["-out"] = "clean.alg.fasta"
job = Job(self.conf["app"][appname], args, parent_ids=[self.nodeid])
job.add_input_file(self.alg_fasta_file)
self.jobs.append(job)
def finish(self):
# Once executed, alignment is converted into relaxed
# interleaved phylip format. Both files, fasta and phylip,
# remain accessible.
# Set Task specific attributes
main_job = self.jobs[0]
fasta_path = pjoin(main_job.jobdir, "clean.alg.fasta")
alg = SeqGroup(fasta_path)
if len(alg) != self.size:
log.warning("Trimming was to aggressive and it tried"
" to remove one or more sequences."
" Alignment trimming will be disabled for this dataset."
)
self.clean_alg_fasta_file = db.register_task_data(self.taskid, DATATYPES.clean_alg_fasta, self.alg_fasta_file)
self.clean_alg_phylip_file = db.register_task_data(self.taskid, DATATYPES.clean_alg_phylip, self.alg_phylip_file)
else:
for line in open(self.jobs[0].stdout_file):
line = line.strip()
if line.startswith("#ColumnsMap"):
kept_columns = list(map(int, line.split("\t")[1].split(",")))
fasta = alg.write(format="fasta")
phylip = alg.write(format="iphylip_relaxed")
AlgCleanerTask.store_data(self, fasta, phylip, kept_columns)
|
unknown
|
codeparrot/codeparrot-clean
| ||
import base64
import urllib
def xunlei_url_encode(url):
return 'thunder://'+base64.encodestring('AA'+url+'ZZ').replace('\n', '')
def xunlei_url_decode(url):
assert url.startswith('thunder://')
url = base64.decodestring(url[10:])
assert url.startswith('AA') and url.endswith('ZZ')
return url[2:-2]
def flashget_url_encode(url):
return 'Flashget://'+base64.encodestring('[FLASHGET]'+url+'[FLASHGET]').replace('\n', '')
def flashget_url_decode(url):
assert url.startswith('Flashget://')
url = base64.decodestring(url[11:])
assert url.startswith('[FLASHGET]') and url.endswith('[FLASHGET]')
return url.replace('[FLASHGET]', '')
def flashgetx_url_decode(url):
assert url.startswith('flashgetx://|mhts|')
name, size, hash, end = url.split('|')[2:]
assert end == '/'
return 'ed2k://|file|'+base64.decodestring(name)+'|'+size+'|'+hash+'/'
def qqdl_url_encode(url):
return 'qqdl://' + base64.encodestring(url).replace('\n', '')
def qqdl_url_decode(url):
assert url.startswith('qqdl://')
return base64.decodestring(url[7:])
def url_unmask(url):
if url.startswith('thunder://'):
return normalize_unicode_link(xunlei_url_decode(url))
elif url.startswith('Flashget://'):
return flashget_url_decode(url)
elif url.startswith('flashgetx://'):
return flashgetx_url_decode(url)
elif url.startswith('qqdl://'):
return qqdl_url_decode(url)
else:
return url
def normalize_unicode_link(url):
import re
def escape_unicode(m):
c = m.group()
if ord(c) < 0x80:
return c
else:
return urllib.quote(c.encode('utf-8'))
def escape_str(m):
c = m.group()
if ord(c) < 0x80:
return c
else:
return urllib.quote(c)
if type(url) == unicode:
return re.sub(r'.', escape_unicode, url)
else:
return re.sub(r'.', escape_str, url)
def unquote_url(x):
x = urllib.unquote(x)
if type(x) != str:
return x
try:
return x.decode('utf-8')
except UnicodeDecodeError:
return x.decode('gbk') # can't decode in utf-8 and gbk
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2010-2025 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.impl.base.test.cases.components.resolver
import org.jetbrains.kotlin.analysis.test.framework.projectStructure.KtTestModule
import org.jetbrains.kotlin.psi.KtElement
import org.jetbrains.kotlin.psi.KtFile
import org.jetbrains.kotlin.test.services.TestServices
abstract class AbstractResolveSymbolByFileTest : AbstractResolveSymbolTest() {
override fun collectElementsToResolve(
file: KtFile,
module: KtTestModule,
testServices: TestServices,
): Collection<ResolveTestCaseContext<KtElement>> = collectAllKtElements(file)
}
|
kotlin
|
github
|
https://github.com/JetBrains/kotlin
|
analysis/analysis-api-impl-base/testFixtures/org/jetbrains/kotlin/analysis/api/impl/base/test/cases/components/resolver/AbstractResolveSymbolByFileTest.kt
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
""" Commit hook for pylint """
import decimal
import os
import re
import sys
import subprocess
import collections
import ConfigParser
import json
import urllib2
ExecutionResult = collections.namedtuple('ExecutionResult',
'status, stdout, stderr')
def _execute(cmd):
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = process.communicate()
status = process.poll()
return ExecutionResult(status, stdout, stderr)
def _current_commit():
"""
Getting current Commit
"""
if _execute('git rev-parse --verify HEAD'.split()).status:
return '4b825dc642cb6eb9a060e54bf8d69288fbee4904'
else:
return 'HEAD'
def _get_list_of_committed_python_files():
""" Returns a list of files about to be commited. """
files = []
# pylint: disable=E1103
diff_index_cmd = 'git diff-index %s' % _current_commit()
output = subprocess.check_output(diff_index_cmd.split())
for result in output.split('\n'):
if result != '':
result = result.split()
if result[4] in ['A', 'M']:
if _is_python_file(result[5]):
files.append((result[5], None)) # None is initial score
return files
def _get_user():
"""
Returns user
"""
get_user_cmd = 'git var GIT_AUTHOR_IDENT '
user = subprocess.check_output(get_user_cmd.split())
return user.split()[0]
def _is_python_file(filename):
"""Check if the input file looks like a Python script
Returns True if the filename ends in ".py" or if the first line
contains "python" and "#!", returns False otherwise.
"""
if filename.endswith('.py'):
return True
else:
with open(filename, 'r') as file_handle:
first_line = file_handle.readline()
return 'python' in first_line and '#!' in first_line
_SCORE_REGEXP = \
re.compile(r'^Your\ code\ has\ been\ rated\ at\ (\-?[0-9\.]+)/10')
def _parse_score(pylint_output):
"""Parse the score out of pylint's output as a float
If the score is not found, return 0.0.
"""
for line in pylint_output.splitlines():
match = re.match(_SCORE_REGEXP, line)
if match:
return float(match.group(1))
return 0.0
def _get_git_previous_commit():
"""
Getting last commit SHA
"""
diff_index_cmd = 'git log -n 1 --pretty=format:%h'
output = subprocess.check_output(diff_index_cmd.split())
return output
_GIT_PYLINT_MINIMUM_SCORE = 4
def _get_prev_score(pylint, python_files, commit_sha='HEAD~1'):
"""
Getting prev commit file score
"""
total_score = 0
checked_pylint_files = 0
avg_score = 0
for (python_file, score) in python_files:
if is_empty_file(python_file):
continue
git_commit_file = create_specfic_commit_git_file(python_file,
commit_sha)
(out, _) = _run_pylint(pylint, git_commit_file)
os.remove(git_commit_file)
if _parse_score(out):
score = _parse_score(out)
total_score += score
checked_pylint_files += 1
if checked_pylint_files:
avg_score = total_score / checked_pylint_files
if avg_score == 0:
avg_score = _GIT_PYLINT_MINIMUM_SCORE
return avg_score
def get_pylint_score(lint, git_commit_file):
"""
It is giving pylint_score of the file.
"""
(out, _) = _run_pylint(lint, git_commit_file)
return _parse_score(out)
def _run_pylint(pylint, python_file, suppress_report=False):
"""
Run pylint on python_file
"""
try:
command = [pylint]
penv = os.environ.copy()
penv['LANG'] = 'it_IT.UTF-8'
penv['LC_CTYPE'] = 'it_IT.UTF-8'
penv['LC_COLLATE'] = 'it_IT.UTF-8'
command.append(python_file)
if suppress_report:
command.append('--reports=n')
proc = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=penv)
(out, _value) = proc.communicate()
except OSError:
print '\nAn error occurred. Is pylint installed?'
sys.exit(1)
return (out, _value)
def _process_git_log_data(git_log_data):
"""
parse and process git log data
"""
git_log_commit = []
for each_commit in git_log_data.split('\n'):
git_log_commit.append(json.loads(each_commit))
return git_log_commit
def _get_lint_type(filename):
"""
Getting type of lint e.g. pylint or golint
"""
file_ext = filename.split('.')[-1]
lint_type = {'py': 'pylint'}
return lint_type.get(file_ext, '')
def _get_lint_score(lint, git_commit_file):
"""
Returning lint function.
"""
file_ext = git_commit_file.split('.')[-1]
lint_types = {'py': get_pylint_score, 'other': ''} # 'go' : _get_golint_score
return lint_types.get(file_ext, 'other')(lint, git_commit_file)
def _get_file_score(lint, lint_file, commit_sha='HEAD~1'):
"""
Get file score
"""
score = 0.0
git_commit_file = create_specfic_commit_git_file(lint_file,
commit_sha)
score = _get_lint_score(lint, git_commit_file)
os.remove(git_commit_file)
return score
def _get_status(commit_score):
"""
Get Status of file
"""
if commit_score >= 0:
return 'PASSED'
else:
return 'FAILED'
def _get_impact(commit_score):
"""
Get changed in score
"""
return str(commit_score * 10) + '%'
def _get_repo_name():
"""
Get repo name on which pylint is running
"""
return os.path.basename(os.getcwd())
def get_insertion_and_deletions(changed_file, commit, prev_commit):
"""
Get number of insert and delete on a particular commit
"""
updates = \
run_subprocess('git diff --stat {0}..{1} {2}'.format(commit,
prev_commit, changed_file))
updates = re.findall(r'\d+', updates)
insert = 0
delete = 0
if len(updates) > 2:
insert = updates[2]
if len(updates) > 3:
delete = updates[3]
return (insert, delete)
def _get_user(commit):
"""
Returns user
"""
get_user_cmd = 'git log -1 %s ' % commit
get_user_cmd += '--format=%ae'
user = subprocess.check_output(get_user_cmd.split())
return user.split()[0]
def get_commit_file_data(git_file, commit_sha='HEAD~1'):
"""
get previous commit git file data
"""
diff_index_cmd = 'git show %s:%s' % (commit_sha, git_file)
return run_subprocess(diff_index_cmd)
def is_empty_file(python_file):
"""
Checking empty init file
"""
if os.path.isfile(python_file) and os.stat(python_file).st_size \
== 0:
return True
return False
def create_specfic_commit_git_file(lint_file, commit_sha):
"""
Generate specfic commit git file
"""
git_commit_file_name = 'lint_' + commit_sha + lint_file.split('/'
)[-1]
git_commit_file = '/tmp/' + git_commit_file_name
if os.path.isfile(git_commit_file_name):
os.remove(git_commit_file_name)
f = open(git_commit_file, 'w')
f.write(get_commit_file_data(lint_file, commit_sha))
f.close()
return git_commit_file
def get_changed_files(base, commit):
"""
Get changed git file
"""
if base == '0000000000000000000000000000000000000000':
results = \
run_subprocess('git show --pretty=format: --no-commit-id --name-only %s'
% commit)
else:
results = run_subprocess('git diff --numstat --name-only %s..%s'
% (base, commit))
return results.strip().split('\n')
def run_subprocess(args):
"""
Run subprocess on git commands
"""
args = args.split(' ')
try:
environ = os.environ.copy()
process = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=environ)
(stdout, stderr) = process.communicate()
status = process.poll()
except Exception, e:
print str(e)
return ExecutionResult(status, stdout, stderr).stdout
def is_commit_already_exist(commit):
"""
Checking commit is passed to git points or not
"""
from timeit import default_timer as timer
start_timer = timer()
try:
reponame = _get_repo_name()
url_get_commit = 'http://10.70.210.192:4000/api/Commits/%s/%s/isExists' % (commit,reponame)
request = urllib2.Request(url_get_commit)
json_data = urllib2.urlopen(url_get_commit).read()
commit_data = json.loads(json_data)
commit_exists = commit_data.get('isExists', "")
if commit_exists:
return True, timer()-start_timer
except Exception as e:
pass
return False, timer()-start_timer
def push_commit_score(
limit,
pylint='pylint',
pylintrc='.pylintrc',
pylint_params=None,
suppress_report=False,
datfile='/tmp/git.dat',
scorefile='/tmp/scores.dat',
):
""" Main function doing the checks
:type limit: float
:param limit: Minimum score to pass the commit
:type pylint: str
:param pylint: Path to pylint executable
:type pylintrc: str
:param pylintrc: Path to pylintrc file
:type pylint_params: str
:param pylint_params: Custom pylint parameters to add to the pylint command
:type suppress_report: bool
:param suppress_report: Suppress report if score is below limit
"""
line = sys.stdin.read()
(base, commit, ref) = line.strip().split()
commit_exist, time_taken_by_request = is_commit_already_exist(commit)
if commit_exist:
sys.exit(0)
git_changed_file_name_list = get_changed_files(base, commit)
for changed_file in git_changed_file_name_list:
commit_info = {}
lint = _get_lint_type(changed_file)
if lint:
sys.stdout.write('Processing start\n')
sys.stdout.write('pylint on {} \t \n'.format(changed_file))
sys.stdout.flush()
file_score = _get_file_score(lint, changed_file, commit)
# prev_file_score = _get_file_score(lint, changed_file, base)
commit_score = file_score
(insert, delete) = \
get_insertion_and_deletions(changed_file, commit, base)
user = _get_user(commit)
commit_info = {
'score': commit_score,
'commitid': commit,
'email': _get_user(commit),
'status': _get_status(commit_score),
'file': changed_file,
'repo': _get_repo_name(),
'insert': insert,
'delete': delete,
}
jsondata = json.dumps(commit_info)
url = 'http://10.70.210.192:4000/api/Commits'
req = urllib2.Request(url, jsondata)
req.add_header('Content-Type', 'application/json')
urllib2.urlopen(req).read()
# Add some output
print 'Score : {:.2}/10.00'.format(decimal.Decimal(commit_score))
with open(datfile, 'a+') as f:
f.write('{:40s} COMMIT SCORE {:5.2f} IMPACT ON REPO AGAINST ref {} and base {} STATUS {}, time_taken_by_request by {} \n'.format(user,
commit_score, commit,
_get_status(commit_score), base, time_taken_by_request ))
def check_repo(
limit,
pylint='pylint',
pylintrc='.pylintrc',
pylint_params=None,
suppress_report=False,
datfile='/tmp/git.dat',
scorefile='/tmp/scores.dat',
):
""" Main function doing the checks
:type limit: float
:param limit: Minimum score to pass the commit
:type pylint: str
:param pylint: Path to pylint executable
:type pylintrc: str
:param pylintrc: Path to pylintrc file
:type pylint_params: str
:param pylint_params: Custom pylint parameters to add to the pylint command
:type suppress_report: bool
:param suppress_report: Suppress report if score is below limit
"""
# List of checked files and their results
python_files = _get_list_of_committed_python_files()
# Set the exit code
all_filed_passed = True
total_score = 0.0
# Don't do anything if there are no Python files
if len(python_files) == 0:
sys.exit(0)
# Load any pre-commit-hooks options from a .pylintrc file (if there is one)
if os.path.exists(pylintrc):
conf = ConfigParser.SafeConfigParser()
conf.read(pylintrc)
if conf.has_option('pre-commit-hook', 'command'):
pylint = conf.get('pre-commit-hook', 'command')
if conf.has_option('pre-commit-hook', 'params'):
pylint_params += ' ' + conf.get('pre-commit-hook', 'params')
if conf.has_option('pre-commit-hook', 'limit'):
limit = float(conf.get('pre-commit-hook', 'limit'))
# Pylint Python files
i = 1
n_files = len(python_files)
for (python_file, score) in python_files:
# Allow __init__.py files to be completely empty
if is_empty_file(python_file):
print 'Skipping pylint on {} (empty __init__.py)..\tSKIPPED'.format(python_file)
# Bump parsed files
i += 1
continue
# Start pylinting
sys.stdout.write('Running pylint on {} (file {}/{})..\t'.format(python_file,
i, n_files))
sys.stdout.flush()
(out, _) = _run_pylint(pylint, python_file)
# Verify the score
score = _parse_score(out)
file_prev_score = _get_prev_score(pylint, [(python_file,
score)])
if file_prev_score and score >= file_prev_score:
status = 'PASSED'
elif score >= float(limit):
status = 'PASSED'
else:
status = 'FAILED'
all_filed_passed = False
total_score += score
# Add some output
print '{:.2}/10.00\t{}'.format(decimal.Decimal(score), status)
if 'FAILED' in status:
(out, _) = _run_pylint(pylint, python_file,
suppress_report=True)
print out
# Bump parsed files
i += 1
user = _get_user()
prev_score = _get_prev_score(pylint, python_files)
if 'FAILED' in status:
new_score = total_score
else:
new_score = (total_score + prev_score) / (n_files + 1)
impact = new_score - prev_score
total_score = total_score / n_files
print 'Total score ', str(total_score)
print 'Your score made an impact of ', str(impact)
return all_filed_passed
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Microsoft's RoboHornet Pro benchmark."""
import os
from telemetry import test
from telemetry.page import page_measurement
from telemetry.page import page_set
class _RobohornetProMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
tab.ExecuteJavaScript('ToggleRoboHornet()')
tab.WaitForJavaScriptExpression(
'document.getElementById("results").innerHTML.indexOf("Total") != -1',
120)
result = int(tab.EvaluateJavaScript('stopTime - startTime'))
results.Add('Total', 'ms', result)
class RobohornetPro(test.Test):
test = _RobohornetProMeasurement
def CreatePageSet(self, options):
return page_set.PageSet.FromDict({
'archive_data_file': '../page_sets/data/robohornet_pro.json',
# Measurement require use of real Date.now() for measurement.
'make_javascript_deterministic': False,
'pages': [
{ 'url':
'http://ie.microsoft.com/testdrive/performance/robohornetpro/' }
]
}, os.path.abspath(__file__))
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package sort_test
import (
"fmt"
"sort"
)
type Person struct {
Name string
Age int
}
func (p Person) String() string {
return fmt.Sprintf("%s: %d", p.Name, p.Age)
}
// ByAge implements sort.Interface for []Person based on
// the Age field.
type ByAge []Person
func (a ByAge) Len() int { return len(a) }
func (a ByAge) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a ByAge) Less(i, j int) bool { return a[i].Age < a[j].Age }
func Example() {
people := []Person{
{"Bob", 31},
{"John", 42},
{"Michael", 17},
{"Jenny", 26},
}
fmt.Println(people)
// There are two ways to sort a slice. First, one can define
// a set of methods for the slice type, as with ByAge, and
// call sort.Sort. In this first example we use that technique.
sort.Sort(ByAge(people))
fmt.Println(people)
// The other way is to use sort.Slice with a custom Less
// function, which can be provided as a closure. In this
// case no methods are needed. (And if they exist, they
// are ignored.) Here we re-sort in reverse order: compare
// the closure with ByAge.Less.
sort.Slice(people, func(i, j int) bool {
return people[i].Age > people[j].Age
})
fmt.Println(people)
// Output:
// [Bob: 31 John: 42 Michael: 17 Jenny: 26]
// [Michael: 17 Jenny: 26 Bob: 31 John: 42]
// [John: 42 Bob: 31 Jenny: 26 Michael: 17]
}
|
go
|
github
|
https://github.com/golang/go
|
src/sort/example_interface_test.go
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Chronos API Add Job',
# list of one or more authors for the module
'Author': ['@TweekFawkes'],
# more verbose multi-line description of the module
'Description': ('Add a Chronos job using the HTTP API service for the Chronos Framework'),
# True if the module needs to run in the background
'Background' : True,
# File extension to save the file as
'OutputExtension': "",
# if the module needs administrative privileges
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : True,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': ["Docs: https://mesos.github.io/chronos/docs/api.html"]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to execute module on.',
'Required' : True,
'Value' : ''
},
'Target' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'FQDN, domain name, or hostname to lookup on the remote target.',
'Required' : True,
'Value' : 'chronos.mesos'
},
'Port' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The port to connect to.',
'Required' : True,
'Value' : '8080'
},
'Name' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The name of the chronos job.',
'Required' : True,
'Value' : 'scheduledJob001'
},
'Command' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The command to run.',
'Required' : True,
'Value' : 'id'
},
'Owner' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The owner of the job.',
'Required' : True,
'Value' : 'admin@example.com'
},
'OwnerName' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The owner name of the job.',
'Required' : True,
'Value' : 'admin'
},
'Description' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The description of the job.',
'Required' : True,
'Value' : 'Scheduled Job 001'
},
'Schedule' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The schedule for the job.',
'Required' : True,
'Value' : 'R/2016-07-15T00:08:35Z/PT24H'
},
'LastSuccess' : {
# Example: 2016-07-16T18:58:25.173Z
'Description' : 'The last successful run for the job (optional).',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
target = self.options['Target']['Value']
port = self.options['Port']['Value']
name = self.options['Name']['Value']
command = self.options['Command']['Value']
owner = self.options['Owner']['Value']
ownerName = self.options['OwnerName']['Value']
description = self.options['Description']['Value']
schedule = self.options['Schedule']['Value']
last = self.options['LastSuccess']['Value']
script = """
import urllib2
target = "%s"
port = "%s"
name = "%s"
command = "%s"
owner = "%s"
ownerName = "%s"
description = "%s"
schedule = "%s"
last = "%s"
url = "http://" + target + ":" + port + "/scheduler/iso8601"
try:
data = '{"name":"'+name+'","command":"'+command+'","shell":true,"epsilon":"PT30M","executor":"","executorFlags":"","retries":2,"owner":"'+owner+'","ownerName":"'+ownerName+'","description":"'+description+'","async":false,"successCount":1,"errorCount":0,"lastSuccess":"'+last+'","lastError":"","cpus":0.1,"disk":256.0,"mem":128.0,"disabled":false,"softError":false,"dataProcessingJobType":false,"errorsSinceLastSuccess":0,"uris":[],"environmentVariables":[],"arguments":[],"highPriority":true,"runAsUser":"root","constraints":[],"schedule":"'+schedule+'","scheduleTimeZone":""}'
request = urllib2.Request(url, data)
request.add_header('User-Agent',
'Mozilla/6.0 (X11; Linux x86_64; rv:24.0) '
'Gecko/20140205 Firefox/27.0 Iceweasel/25.3.0')
request.add_header('Content-Type', 'application/json')
opener = urllib2.build_opener(urllib2.HTTPHandler)
content = opener.open(request).read()
print str(content)
except Exception as e:
print "Failure sending payload: " + str(e)
print "Finished"
""" %(target, port, name, command, owner, ownerName, description, schedule, last)
return script
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Toggles
//
// Used in conjunction with global variables to enable certain theme features.
// Vendor
@import "vendor/rfs";
// Deprecate
@import "mixins/deprecate";
// Helpers
@import "mixins/breakpoints";
@import "mixins/color-mode";
@import "mixins/color-scheme";
@import "mixins/image";
@import "mixins/resize";
@import "mixins/visually-hidden";
@import "mixins/reset-text";
@import "mixins/text-truncate";
// Utilities
@import "mixins/utilities";
// Components
@import "mixins/backdrop";
@import "mixins/buttons";
@import "mixins/caret";
@import "mixins/pagination";
@import "mixins/lists";
@import "mixins/forms";
@import "mixins/table-variants";
// Skins
@import "mixins/border-radius";
@import "mixins/box-shadow";
@import "mixins/gradients";
@import "mixins/transition";
// Layout
@import "mixins/clearfix";
@import "mixins/container";
@import "mixins/grid";
|
unknown
|
github
|
https://github.com/twbs/bootstrap
|
scss/_mixins.scss
|
#! /usr/bin/env python
# Original code by Guido van Rossum; extensive changes by Sam Bayer,
# including code to check URL fragments.
"""Web tree checker.
This utility is handy to check a subweb of the world-wide web for
errors. A subweb is specified by giving one or more ``root URLs''; a
page belongs to the subweb if one of the root URLs is an initial
prefix of it.
File URL extension:
In order to easy the checking of subwebs via the local file system,
the interpretation of ``file:'' URLs is extended to mimic the behavior
of your average HTTP daemon: if a directory pathname is given, the
file index.html in that directory is returned if it exists, otherwise
a directory listing is returned. Now, you can point webchecker to the
document tree in the local file system of your HTTP daemon, and have
most of it checked. In fact the default works this way if your local
web tree is located at /usr/local/etc/httpd/htdpcs (the default for
the NCSA HTTP daemon and probably others).
Report printed:
When done, it reports pages with bad links within the subweb. When
interrupted, it reports for the pages that it has checked already.
In verbose mode, additional messages are printed during the
information gathering phase. By default, it prints a summary of its
work status every 50 URLs (adjustable with the -r option), and it
reports errors as they are encountered. Use the -q option to disable
this output.
Checkpoint feature:
Whether interrupted or not, it dumps its state (a Python pickle) to a
checkpoint file and the -R option allows it to restart from the
checkpoint (assuming that the pages on the subweb that were already
processed haven't changed). Even when it has run till completion, -R
can still be useful -- it will print the reports again, and -Rq prints
the errors only. In this case, the checkpoint file is not written
again. The checkpoint file can be set with the -d option.
The checkpoint file is written as a Python pickle. Remember that
Python's pickle module is currently quite slow. Give it the time it
needs to load and save the checkpoint file. When interrupted while
writing the checkpoint file, the old checkpoint file is not
overwritten, but all work done in the current run is lost.
Miscellaneous:
- You may find the (Tk-based) GUI version easier to use. See wcgui.py.
- Webchecker honors the "robots.txt" convention. Thanks to Skip
Montanaro for his robotparser.py module (included in this directory)!
The agent name is hardwired to "webchecker". URLs that are disallowed
by the robots.txt file are reported as external URLs.
- Because the SGML parser is a bit slow, very large SGML files are
skipped. The size limit can be set with the -m option.
- When the server or protocol does not tell us a file's type, we guess
it based on the URL's suffix. The mimetypes.py module (also in this
directory) has a built-in table mapping most currently known suffixes,
and in addition attempts to read the mime.types configuration files in
the default locations of Netscape and the NCSA HTTP daemon.
- We follow links indicated by <A>, <FRAME> and <IMG> tags. We also
honor the <BASE> tag.
- We now check internal NAME anchor links, as well as toplevel links.
- Checking external links is now done by default; use -x to *disable*
this feature. External links are now checked during normal
processing. (XXX The status of a checked link could be categorized
better. Later...)
- If external links are not checked, you can use the -t flag to
provide specific overrides to -x.
Usage: webchecker.py [option] ... [rooturl] ...
Options:
-R -- restart from checkpoint file
-d file -- checkpoint filename (default %(DUMPFILE)s)
-m bytes -- skip HTML pages larger than this size (default %(MAXPAGE)d)
-n -- reports only, no checking (use with -R)
-q -- quiet operation (also suppresses external links report)
-r number -- number of links processed per round (default %(ROUNDSIZE)d)
-t root -- specify root dir which should be treated as internal (can repeat)
-v -- verbose operation; repeating -v will increase verbosity
-x -- don't check external links (these are often slow to check)
-a -- don't check name anchors
Arguments:
rooturl -- URL to start checking
(default %(DEFROOT)s)
"""
__version__ = "$Revision$"
import sys
import os
from types import *
import StringIO
import getopt
import pickle
import urllib
import urlparse
import sgmllib
import cgi
import mimetypes
import robotparser
# Extract real version number if necessary
if __version__[0] == '$':
_v = __version__.split()
if len(_v) == 3:
__version__ = _v[1]
# Tunable parameters
DEFROOT = "file:/usr/local/etc/httpd/htdocs/" # Default root URL
CHECKEXT = 1 # Check external references (1 deep)
VERBOSE = 1 # Verbosity level (0-3)
MAXPAGE = 150000 # Ignore files bigger than this
ROUNDSIZE = 50 # Number of links processed per round
DUMPFILE = "@webchecker.pickle" # Pickled checkpoint
AGENTNAME = "webchecker" # Agent name for robots.txt parser
NONAMES = 0 # Force name anchor checking
# Global variables
def main():
checkext = CHECKEXT
verbose = VERBOSE
maxpage = MAXPAGE
roundsize = ROUNDSIZE
dumpfile = DUMPFILE
restart = 0
norun = 0
try:
opts, args = getopt.getopt(sys.argv[1:], 'Rd:m:nqr:t:vxa')
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print __doc__%globals()
sys.exit(2)
# The extra_roots variable collects extra roots.
extra_roots = []
nonames = NONAMES
for o, a in opts:
if o == '-R':
restart = 1
if o == '-d':
dumpfile = a
if o == '-m':
maxpage = int(a)
if o == '-n':
norun = 1
if o == '-q':
verbose = 0
if o == '-r':
roundsize = int(a)
if o == '-t':
extra_roots.append(a)
if o == '-a':
nonames = not nonames
if o == '-v':
verbose = verbose + 1
if o == '-x':
checkext = not checkext
if verbose > 0:
print AGENTNAME, "version", __version__
if restart:
c = load_pickle(dumpfile=dumpfile, verbose=verbose)
else:
c = Checker()
c.setflags(checkext=checkext, verbose=verbose,
maxpage=maxpage, roundsize=roundsize,
nonames=nonames
)
if not restart and not args:
args.append(DEFROOT)
for arg in args:
c.addroot(arg)
# The -t flag is only needed if external links are not to be
# checked. So -t values are ignored unless -x was specified.
if not checkext:
for root in extra_roots:
# Make sure it's terminated by a slash,
# so that addroot doesn't discard the last
# directory component.
if root[-1] != "/":
root = root + "/"
c.addroot(root, add_to_do = 0)
try:
if not norun:
try:
c.run()
except KeyboardInterrupt:
if verbose > 0:
print "[run interrupted]"
try:
c.report()
except KeyboardInterrupt:
if verbose > 0:
print "[report interrupted]"
finally:
if c.save_pickle(dumpfile):
if dumpfile == DUMPFILE:
print "Use ``%s -R'' to restart." % sys.argv[0]
else:
print "Use ``%s -R -d %s'' to restart." % (sys.argv[0],
dumpfile)
def load_pickle(dumpfile=DUMPFILE, verbose=VERBOSE):
if verbose > 0:
print "Loading checkpoint from %s ..." % dumpfile
f = open(dumpfile, "rb")
c = pickle.load(f)
f.close()
if verbose > 0:
print "Done."
print "Root:", "\n ".join(c.roots)
return c
class Checker:
checkext = CHECKEXT
verbose = VERBOSE
maxpage = MAXPAGE
roundsize = ROUNDSIZE
nonames = NONAMES
validflags = tuple(dir())
def __init__(self):
self.reset()
def setflags(self, **kw):
for key in kw.keys():
if key not in self.validflags:
raise NameError, "invalid keyword argument: %s" % str(key)
for key, value in kw.items():
setattr(self, key, value)
def reset(self):
self.roots = []
self.todo = {}
self.done = {}
self.bad = {}
# Add a name table, so that the name URLs can be checked. Also
# serves as an implicit cache for which URLs are done.
self.name_table = {}
self.round = 0
# The following are not pickled:
self.robots = {}
self.errors = {}
self.urlopener = MyURLopener()
self.changed = 0
def note(self, level, format, *args):
if self.verbose > level:
if args:
format = format%args
self.message(format)
def message(self, format, *args):
if args:
format = format%args
print format
def __getstate__(self):
return (self.roots, self.todo, self.done, self.bad, self.round)
def __setstate__(self, state):
self.reset()
(self.roots, self.todo, self.done, self.bad, self.round) = state
for root in self.roots:
self.addrobot(root)
for url in self.bad.keys():
self.markerror(url)
def addroot(self, root, add_to_do = 1):
if root not in self.roots:
troot = root
scheme, netloc, path, params, query, fragment = \
urlparse.urlparse(root)
i = path.rfind("/") + 1
if 0 < i < len(path):
path = path[:i]
troot = urlparse.urlunparse((scheme, netloc, path,
params, query, fragment))
self.roots.append(troot)
self.addrobot(root)
if add_to_do:
self.newlink((root, ""), ("<root>", root))
def addrobot(self, root):
root = urlparse.urljoin(root, "/")
if self.robots.has_key(root): return
url = urlparse.urljoin(root, "/robots.txt")
self.robots[root] = rp = robotparser.RobotFileParser()
self.note(2, "Parsing %s", url)
rp.debug = self.verbose > 3
rp.set_url(url)
try:
rp.read()
except (OSError, IOError), msg:
self.note(1, "I/O error parsing %s: %s", url, msg)
def run(self):
while self.todo:
self.round = self.round + 1
self.note(0, "\nRound %d (%s)\n", self.round, self.status())
urls = self.todo.keys()
urls.sort()
del urls[self.roundsize:]
for url in urls:
self.dopage(url)
def status(self):
return "%d total, %d to do, %d done, %d bad" % (
len(self.todo)+len(self.done),
len(self.todo), len(self.done),
len(self.bad))
def report(self):
self.message("")
if not self.todo: s = "Final"
else: s = "Interim"
self.message("%s Report (%s)", s, self.status())
self.report_errors()
def report_errors(self):
if not self.bad:
self.message("\nNo errors")
return
self.message("\nError Report:")
sources = self.errors.keys()
sources.sort()
for source in sources:
triples = self.errors[source]
self.message("")
if len(triples) > 1:
self.message("%d Errors in %s", len(triples), source)
else:
self.message("Error in %s", source)
# Call self.format_url() instead of referring
# to the URL directly, since the URLs in these
# triples is now a (URL, fragment) pair. The value
# of the "source" variable comes from the list of
# origins, and is a URL, not a pair.
for url, rawlink, msg in triples:
if rawlink != self.format_url(url): s = " (%s)" % rawlink
else: s = ""
self.message(" HREF %s%s\n msg %s",
self.format_url(url), s, msg)
def dopage(self, url_pair):
# All printing of URLs uses format_url(); argument changed to
# url_pair for clarity.
if self.verbose > 1:
if self.verbose > 2:
self.show("Check ", self.format_url(url_pair),
" from", self.todo[url_pair])
else:
self.message("Check %s", self.format_url(url_pair))
url, local_fragment = url_pair
if local_fragment and self.nonames:
self.markdone(url_pair)
return
try:
page = self.getpage(url_pair)
except sgmllib.SGMLParseError, msg:
msg = self.sanitize(msg)
self.note(0, "Error parsing %s: %s",
self.format_url(url_pair), msg)
# Dont actually mark the URL as bad - it exists, just
# we can't parse it!
page = None
if page:
# Store the page which corresponds to this URL.
self.name_table[url] = page
# If there is a fragment in this url_pair, and it's not
# in the list of names for the page, call setbad(), since
# it's a missing anchor.
if local_fragment and local_fragment not in page.getnames():
self.setbad(url_pair, ("Missing name anchor `%s'" % local_fragment))
for info in page.getlinkinfos():
# getlinkinfos() now returns the fragment as well,
# and we store that fragment here in the "todo" dictionary.
link, rawlink, fragment = info
# However, we don't want the fragment as the origin, since
# the origin is logically a page.
origin = url, rawlink
self.newlink((link, fragment), origin)
else:
# If no page has been created yet, we want to
# record that fact.
self.name_table[url_pair[0]] = None
self.markdone(url_pair)
def newlink(self, url, origin):
if self.done.has_key(url):
self.newdonelink(url, origin)
else:
self.newtodolink(url, origin)
def newdonelink(self, url, origin):
if origin not in self.done[url]:
self.done[url].append(origin)
# Call self.format_url(), since the URL here
# is now a (URL, fragment) pair.
self.note(3, " Done link %s", self.format_url(url))
# Make sure that if it's bad, that the origin gets added.
if self.bad.has_key(url):
source, rawlink = origin
triple = url, rawlink, self.bad[url]
self.seterror(source, triple)
def newtodolink(self, url, origin):
# Call self.format_url(), since the URL here
# is now a (URL, fragment) pair.
if self.todo.has_key(url):
if origin not in self.todo[url]:
self.todo[url].append(origin)
self.note(3, " Seen todo link %s", self.format_url(url))
else:
self.todo[url] = [origin]
self.note(3, " New todo link %s", self.format_url(url))
def format_url(self, url):
link, fragment = url
if fragment: return link + "#" + fragment
else: return link
def markdone(self, url):
self.done[url] = self.todo[url]
del self.todo[url]
self.changed = 1
def inroots(self, url):
for root in self.roots:
if url[:len(root)] == root:
return self.isallowed(root, url)
return 0
def isallowed(self, root, url):
root = urlparse.urljoin(root, "/")
return self.robots[root].can_fetch(AGENTNAME, url)
def getpage(self, url_pair):
# Incoming argument name is a (URL, fragment) pair.
# The page may have been cached in the name_table variable.
url, fragment = url_pair
if self.name_table.has_key(url):
return self.name_table[url]
scheme, path = urllib.splittype(url)
if scheme in ('mailto', 'news', 'javascript', 'telnet'):
self.note(1, " Not checking %s URL" % scheme)
return None
isint = self.inroots(url)
# Ensure that openpage gets the URL pair to
# print out its error message and record the error pair
# correctly.
if not isint:
if not self.checkext:
self.note(1, " Not checking ext link")
return None
f = self.openpage(url_pair)
if f:
self.safeclose(f)
return None
text, nurl = self.readhtml(url_pair)
if nurl != url:
self.note(1, " Redirected to %s", nurl)
url = nurl
if text:
return Page(text, url, maxpage=self.maxpage, checker=self)
# These next three functions take (URL, fragment) pairs as
# arguments, so that openpage() receives the appropriate tuple to
# record error messages.
def readhtml(self, url_pair):
url, fragment = url_pair
text = None
f, url = self.openhtml(url_pair)
if f:
text = f.read()
f.close()
return text, url
def openhtml(self, url_pair):
url, fragment = url_pair
f = self.openpage(url_pair)
if f:
url = f.geturl()
info = f.info()
if not self.checkforhtml(info, url):
self.safeclose(f)
f = None
return f, url
def openpage(self, url_pair):
url, fragment = url_pair
try:
return self.urlopener.open(url)
except (OSError, IOError), msg:
msg = self.sanitize(msg)
self.note(0, "Error %s", msg)
if self.verbose > 0:
self.show(" HREF ", url, " from", self.todo[url_pair])
self.setbad(url_pair, msg)
return None
def checkforhtml(self, info, url):
if info.has_key('content-type'):
ctype = cgi.parse_header(info['content-type'])[0].lower()
if ';' in ctype:
# handle content-type: text/html; charset=iso8859-1 :
ctype = ctype.split(';', 1)[0].strip()
else:
if url[-1:] == "/":
return 1
ctype, encoding = mimetypes.guess_type(url)
if ctype == 'text/html':
return 1
else:
self.note(1, " Not HTML, mime type %s", ctype)
return 0
def setgood(self, url):
if self.bad.has_key(url):
del self.bad[url]
self.changed = 1
self.note(0, "(Clear previously seen error)")
def setbad(self, url, msg):
if self.bad.has_key(url) and self.bad[url] == msg:
self.note(0, "(Seen this error before)")
return
self.bad[url] = msg
self.changed = 1
self.markerror(url)
def markerror(self, url):
try:
origins = self.todo[url]
except KeyError:
origins = self.done[url]
for source, rawlink in origins:
triple = url, rawlink, self.bad[url]
self.seterror(source, triple)
def seterror(self, url, triple):
try:
# Because of the way the URLs are now processed, I need to
# check to make sure the URL hasn't been entered in the
# error list. The first element of the triple here is a
# (URL, fragment) pair, but the URL key is not, since it's
# from the list of origins.
if triple not in self.errors[url]:
self.errors[url].append(triple)
except KeyError:
self.errors[url] = [triple]
# The following used to be toplevel functions; they have been
# changed into methods so they can be overridden in subclasses.
def show(self, p1, link, p2, origins):
self.message("%s %s", p1, link)
i = 0
for source, rawlink in origins:
i = i+1
if i == 2:
p2 = ' '*len(p2)
if rawlink != link: s = " (%s)" % rawlink
else: s = ""
self.message("%s %s%s", p2, source, s)
def sanitize(self, msg):
if isinstance(IOError, ClassType) and isinstance(msg, IOError):
# Do the other branch recursively
msg.args = self.sanitize(msg.args)
elif isinstance(msg, TupleType):
if len(msg) >= 4 and msg[0] == 'http error' and \
isinstance(msg[3], InstanceType):
# Remove the Message instance -- it may contain
# a file object which prevents pickling.
msg = msg[:3] + msg[4:]
return msg
def safeclose(self, f):
try:
url = f.geturl()
except AttributeError:
pass
else:
if url[:4] == 'ftp:' or url[:7] == 'file://':
# Apparently ftp connections don't like to be closed
# prematurely...
text = f.read()
f.close()
def save_pickle(self, dumpfile=DUMPFILE):
if not self.changed:
self.note(0, "\nNo need to save checkpoint")
elif not dumpfile:
self.note(0, "No dumpfile, won't save checkpoint")
else:
self.note(0, "\nSaving checkpoint to %s ...", dumpfile)
newfile = dumpfile + ".new"
f = open(newfile, "wb")
pickle.dump(self, f)
f.close()
try:
os.unlink(dumpfile)
except os.error:
pass
os.rename(newfile, dumpfile)
self.note(0, "Done.")
return 1
class Page:
def __init__(self, text, url, verbose=VERBOSE, maxpage=MAXPAGE, checker=None):
self.text = text
self.url = url
self.verbose = verbose
self.maxpage = maxpage
self.checker = checker
# The parsing of the page is done in the __init__() routine in
# order to initialize the list of names the file
# contains. Stored the parser in an instance variable. Passed
# the URL to MyHTMLParser().
size = len(self.text)
if size > self.maxpage:
self.note(0, "Skip huge file %s (%.0f Kbytes)", self.url, (size*0.001))
self.parser = None
return
self.checker.note(2, " Parsing %s (%d bytes)", self.url, size)
self.parser = MyHTMLParser(url, verbose=self.verbose,
checker=self.checker)
self.parser.feed(self.text)
self.parser.close()
def note(self, level, msg, *args):
if self.checker:
apply(self.checker.note, (level, msg) + args)
else:
if self.verbose >= level:
if args:
msg = msg%args
print msg
# Method to retrieve names.
def getnames(self):
if self.parser:
return self.parser.names
else:
return []
def getlinkinfos(self):
# File reading is done in __init__() routine. Store parser in
# local variable to indicate success of parsing.
# If no parser was stored, fail.
if not self.parser: return []
rawlinks = self.parser.getlinks()
base = urlparse.urljoin(self.url, self.parser.getbase() or "")
infos = []
for rawlink in rawlinks:
t = urlparse.urlparse(rawlink)
# DON'T DISCARD THE FRAGMENT! Instead, include
# it in the tuples which are returned. See Checker.dopage().
fragment = t[-1]
t = t[:-1] + ('',)
rawlink = urlparse.urlunparse(t)
link = urlparse.urljoin(base, rawlink)
infos.append((link, rawlink, fragment))
return infos
class MyStringIO(StringIO.StringIO):
def __init__(self, url, info):
self.__url = url
self.__info = info
StringIO.StringIO.__init__(self)
def info(self):
return self.__info
def geturl(self):
return self.__url
class MyURLopener(urllib.FancyURLopener):
http_error_default = urllib.URLopener.http_error_default
def __init__(*args):
self = args[0]
apply(urllib.FancyURLopener.__init__, args)
self.addheaders = [
('User-agent', 'Python-webchecker/%s' % __version__),
]
def http_error_401(self, url, fp, errcode, errmsg, headers):
return None
def open_file(self, url):
path = urllib.url2pathname(urllib.unquote(url))
if os.path.isdir(path):
if path[-1] != os.sep:
url = url + '/'
indexpath = os.path.join(path, "index.html")
if os.path.exists(indexpath):
return self.open_file(url + "index.html")
try:
names = os.listdir(path)
except os.error, msg:
exc_type, exc_value, exc_tb = sys.exc_info()
raise IOError, msg, exc_tb
names.sort()
s = MyStringIO("file:"+url, {'content-type': 'text/html'})
s.write('<BASE HREF="file:%s">\n' %
urllib.quote(os.path.join(path, "")))
for name in names:
q = urllib.quote(name)
s.write('<A HREF="%s">%s</A>\n' % (q, q))
s.seek(0)
return s
return urllib.FancyURLopener.open_file(self, url)
class MyHTMLParser(sgmllib.SGMLParser):
def __init__(self, url, verbose=VERBOSE, checker=None):
self.myverbose = verbose # now unused
self.checker = checker
self.base = None
self.links = {}
self.names = []
self.url = url
sgmllib.SGMLParser.__init__(self)
def check_name_id(self, attributes):
""" Check the name or id attributes on an element.
"""
# We must rescue the NAME or id (name is deprecated in XHTML)
# attributes from the anchor, in order to
# cache the internal anchors which are made
# available in the page.
for name, value in attributes:
if name == "name" or name == "id":
if value in self.names:
self.checker.message("WARNING: duplicate ID name %s in %s",
value, self.url)
else: self.names.append(value)
break
def unknown_starttag(self, tag, attributes):
""" In XHTML, you can have id attributes on any element.
"""
self.check_name_id(attributes)
def start_a(self, attributes):
self.link_attr(attributes, 'href')
self.check_name_id(attributes)
def end_a(self): pass
def do_area(self, attributes):
self.link_attr(attributes, 'href')
self.check_name_id(attributes)
def do_body(self, attributes):
self.link_attr(attributes, 'background', 'bgsound')
self.check_name_id(attributes)
def do_img(self, attributes):
self.link_attr(attributes, 'src', 'lowsrc')
self.check_name_id(attributes)
def do_frame(self, attributes):
self.link_attr(attributes, 'src', 'longdesc')
self.check_name_id(attributes)
def do_iframe(self, attributes):
self.link_attr(attributes, 'src', 'longdesc')
self.check_name_id(attributes)
def do_link(self, attributes):
for name, value in attributes:
if name == "rel":
parts = value.lower().split()
if ( parts == ["stylesheet"]
or parts == ["alternate", "stylesheet"]):
self.link_attr(attributes, "href")
break
self.check_name_id(attributes)
def do_object(self, attributes):
self.link_attr(attributes, 'data', 'usemap')
self.check_name_id(attributes)
def do_script(self, attributes):
self.link_attr(attributes, 'src')
self.check_name_id(attributes)
def do_table(self, attributes):
self.link_attr(attributes, 'background')
self.check_name_id(attributes)
def do_td(self, attributes):
self.link_attr(attributes, 'background')
self.check_name_id(attributes)
def do_th(self, attributes):
self.link_attr(attributes, 'background')
self.check_name_id(attributes)
def do_tr(self, attributes):
self.link_attr(attributes, 'background')
self.check_name_id(attributes)
def link_attr(self, attributes, *args):
for name, value in attributes:
if name in args:
if value: value = value.strip()
if value: self.links[value] = None
def do_base(self, attributes):
for name, value in attributes:
if name == 'href':
if value: value = value.strip()
if value:
if self.checker:
self.checker.note(1, " Base %s", value)
self.base = value
self.check_name_id(attributes)
def getlinks(self):
return self.links.keys()
def getbase(self):
return self.base
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Tests of disco_acm
"""
from datetime import datetime, timedelta
from unittest import TestCase
from mock import MagicMock
from disco_aws_automation import DiscoACM
from disco_aws_automation.disco_acm import (
CERT_SUMMARY_LIST_KEY,
CERT_ARN_KEY,
DOMAIN_NAME_KEY,
CERT_ALT_NAMES_KEY,
CERT_KEY,
CERT_ISSUED_DATE_KEY
)
TEST_DOMAIN_NAME = 'test.example.com'
TEST_ALT_DOMAIN_NAME = 'test2.example.com'
TEST_DEEP_DOMAIN_NAME = 'a.deeper.test.example.com'
TEST_WILDCARD_DOMAIN_NAME = '*.example.com'
TEST_CERTIFICATE_ARN_ACM_EXACT = 'arn:aws:acm::123:exact'
TEST_CERTIFICATE_ARN_ACM_EXACT_OLDER = 'arn:aws:acm::123:exact_older'
TEST_CERTIFICATE_ARN_ACM_WILDCARD = 'arn:aws:acm::123:wildcard'
TEST_MULTI_CERT_ARN_ACM = 'arn:aws:acm::123:multi'
TEST_MULTI_DOMAIN_NAME = 'multi.foo.com'
TEST_MULTI_ALT_DOMAIN_NAME = 'foo.com'
TEST_CERT = {CERT_ARN_KEY: TEST_CERTIFICATE_ARN_ACM_EXACT, DOMAIN_NAME_KEY: TEST_DOMAIN_NAME}
TEST_CERT_OLDER = {CERT_ARN_KEY: TEST_CERTIFICATE_ARN_ACM_EXACT_OLDER, DOMAIN_NAME_KEY: TEST_DOMAIN_NAME}
TEST_WILDCARD_CERT = {CERT_ARN_KEY: TEST_CERTIFICATE_ARN_ACM_WILDCARD,
DOMAIN_NAME_KEY: TEST_WILDCARD_DOMAIN_NAME}
TEST_MULTI_CERT = {CERT_ARN_KEY: TEST_MULTI_CERT_ARN_ACM, DOMAIN_NAME_KEY: TEST_MULTI_DOMAIN_NAME}
class DiscoACMTests(TestCase):
"""Test disco_acm.py"""
def setUp(self):
self._acm = MagicMock()
self.disco_acm = DiscoACM(self._acm)
self._acm.list_certificates.return_value = {
CERT_SUMMARY_LIST_KEY: [TEST_CERT_OLDER, TEST_CERT, TEST_WILDCARD_CERT, TEST_MULTI_CERT]
}
# pylint: disable=invalid-name
def _describe_cert(CertificateArn):
cert_data = {
TEST_CERTIFICATE_ARN_ACM_WILDCARD: {
CERT_ARN_KEY: TEST_CERTIFICATE_ARN_ACM_WILDCARD,
CERT_ALT_NAMES_KEY: [TEST_WILDCARD_DOMAIN_NAME],
CERT_ISSUED_DATE_KEY: datetime.now(),
},
TEST_CERTIFICATE_ARN_ACM_EXACT_OLDER: {
CERT_ARN_KEY: TEST_CERTIFICATE_ARN_ACM_EXACT_OLDER,
CERT_ALT_NAMES_KEY: [TEST_DOMAIN_NAME],
CERT_ISSUED_DATE_KEY: datetime.now() - timedelta(days=1),
},
TEST_CERTIFICATE_ARN_ACM_EXACT: {
CERT_ARN_KEY: TEST_CERTIFICATE_ARN_ACM_EXACT,
CERT_ALT_NAMES_KEY: [TEST_DOMAIN_NAME],
CERT_ISSUED_DATE_KEY: datetime.now(),
},
TEST_MULTI_CERT_ARN_ACM: {
CERT_ARN_KEY: TEST_MULTI_CERT_ARN_ACM,
CERT_ALT_NAMES_KEY: [TEST_MULTI_DOMAIN_NAME, TEST_MULTI_ALT_DOMAIN_NAME],
CERT_ISSUED_DATE_KEY: datetime.now(),
}
}
return {CERT_KEY: cert_data.get(CertificateArn)}
self._acm.describe_certificate.side_effect = _describe_cert
def test_get_certificate_arn_exact_match(self):
"""
exact match between the host and cert work
e.g. a.b.c matches a.b.c
"""
self.assertEqual(TEST_CERTIFICATE_ARN_ACM_EXACT,
self.disco_acm.get_certificate_arn(TEST_DOMAIN_NAME),
'Exact matching of host domain name to cert domain needs to be fixed.')
def test_get_certificate_arn_wildcard_match(self):
"""
wildcard match between the host and cert work
e.g. a.b.c matches *.b.c
"""
self.assertEqual(TEST_CERTIFICATE_ARN_ACM_WILDCARD,
self.disco_acm.get_certificate_arn(TEST_ALT_DOMAIN_NAME),
'Exact matching of host domain name to cert domain needs to be fixed.')
def test_get_certificate_arn_bad_left_label(self):
"""
host name starting with *. is invalid and should not return a cert match
e.g. *.b.c does not match a.b.c or *.b.c
"""
self.assertFalse(self.disco_acm.get_certificate_arn(TEST_WILDCARD_DOMAIN_NAME),
'An FQDN with an invalid left-most label should not match.')
def test_get_certificate_arn_empty(self):
"""
empty string should not should NOT return a cert
e.g. '' does not match a.b.c or *.b.c
"""
self.assertFalse(self.disco_acm.get_certificate_arn(''), 'An empty string should not match certs.')
def test_get_certificate_arn_no_hostname(self):
"""
dns names beginning with . should NOT return a cert
e.g. .b.c does not match a.b.c or *.b.c
"""
self.assertFalse(self.disco_acm.get_certificate_arn('.example.com'),
'A missing host name should not match cert domains.')
def test_get_certificate_arn_no_match(self):
"""
host that does not match cert domains should NOT return a cert
e.f.g.h does not match a.b.c or *.b.c
"""
self.assertFalse(self.disco_acm.get_certificate_arn('non.existent.cert.domain'),
'Matching of host domain name to cert domain is generating false positives.')
def test_get_certificate_arn_substring(self):
"""
host that is a only a substring of a domain should NOT return a cert
a.b does not match a.b.c or *.b.c
"""
self.assertFalse(self.disco_acm.get_certificate_arn('test.example'),
'a.b should not match a.b.c or *.b.c.')
def test_get_cert_arn_match_most_specific(self):
"""
test both orderings of exact and wildcard matching cert domains
to ensure the host domain matches the most specific cert domain in both cases
a.b.c will match a.b.c in preference to *.b.c
"""
self._acm.list_certificates.return_value = {CERT_SUMMARY_LIST_KEY: [TEST_CERT, TEST_WILDCARD_CERT]}
self.assertEqual(TEST_CERTIFICATE_ARN_ACM_EXACT,
self.disco_acm.get_certificate_arn(TEST_DOMAIN_NAME),
'Failed to match most specific cert domain.')
self._acm.list_certificates.return_value = {CERT_SUMMARY_LIST_KEY: [TEST_WILDCARD_CERT, TEST_CERT]}
self.assertEqual(TEST_CERTIFICATE_ARN_ACM_EXACT,
self.disco_acm.get_certificate_arn(TEST_DOMAIN_NAME),
'Failed to match most specific cert domain.')
def test_get_cert_with_alt_names(self):
"""
test that the correct cert is returned for a domain in the cert's alternative domains
"""
self.assertEqual(TEST_MULTI_CERT_ARN_ACM,
self.disco_acm.get_certificate_arn(TEST_MULTI_ALT_DOMAIN_NAME))
self.assertEqual(TEST_MULTI_CERT_ARN_ACM,
self.disco_acm.get_certificate_arn(TEST_MULTI_DOMAIN_NAME),
'Matching of domains with alt names needs to be fixed.')
|
unknown
|
codeparrot/codeparrot-clean
| ||
import nltk
class DecisionTreeClassifierHelper :
""" Decorator object for DecisionTreeClassifier instances """
def __init__(self, classifier) :
self.classifier = classifier
classifier.pythoncode = self.pythoncode
classifier.pythonclasscode = self.pythonclasscode
def pythoncode(self_, prefix='', depth=4) :
"""
Return python code implementing the decision tree
"""
self = self_.classifier
if self._fname is None:
return "%sreturn %r\n" % (prefix, self._label)
s = ''
first = True
keychecked = False
for (fval, result) in sorted(self._decisions.items()):
# None is always the first
if fval == None :
s += '%s%s "%s" not in featureset or featureset["%s"] == None: ' % (prefix, ('elif', 'if')[first], self._fname, self._fname)
else :
if not keychecked :
s += '%sif "%s" in featureset:\n' % (prefix, self._fname)
prefix += ' '
keychecked = True
s += '%s%s featureset["%s"] == %r: ' % (prefix, ('elif', 'if')[first], self._fname, fval)
if result._fname is not None and depth>1:
DecisionTreeClassifierHelper(result)
s += '\n'+result.pythoncode(prefix+' ', depth-1)
else:
s += 'return %r\n' % result._label
if first :
if fval == None :
#s += '%selif "%s" in featureset:\n' % (prefix, self._fname)
#s += '%selse:\n' % (prefix,)
#prefix += ' '
#first = True
first = False
keychecked = True
else :
first = False
if self._default is not None:
if len(self._decisions) == 1:
s += '%sif "%s" not in featureset or featureset["%s"] != %r: '% (prefix, self._fname, self._fname,
self._decisions.keys()[0])
else:
s += '%selse: ' % (prefix,)
if self._default._fname is not None and depth>1:
DecisionTreeClassifierHelper(self._default)
s += '\n'+self._default.pythoncode(prefix+' ', depth-1)
else:
s += 'return %r\n' % self._default._label
return s
def pythonclasscode(self_, classname, depth=1) :
self = self_.classifier
code = "class "+classname+"(nltk.DecisionTreeClassifier) :\n"
code += " def classify(self, featureset) :\n"
code += self.pythoncode(prefix=' ', depth=depth)
code += "\n"
return code
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2013 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import shutil
import subprocess
import tarfile
from testrunner.local import testsuite
from testrunner.objects import testcase
class BenchmarksTestSuite(testsuite.TestSuite):
def __init__(self, name, root):
super(BenchmarksTestSuite, self).__init__(name, root)
self.testroot = root
def ListTests(self, context):
tests = []
for test in [
"kraken/ai-astar",
"kraken/audio-beat-detection",
"kraken/audio-dft",
"kraken/audio-fft",
"kraken/audio-oscillator",
"kraken/imaging-darkroom",
"kraken/imaging-desaturate",
"kraken/imaging-gaussian-blur",
"kraken/json-parse-financial",
"kraken/json-stringify-tinderbox",
"kraken/stanford-crypto-aes",
"kraken/stanford-crypto-ccm",
"kraken/stanford-crypto-pbkdf2",
"kraken/stanford-crypto-sha256-iterative",
"octane/box2d",
"octane/code-load",
"octane/crypto",
"octane/deltablue",
"octane/earley-boyer",
"octane/gbemu-part1",
"octane/mandreel",
"octane/navier-stokes",
"octane/pdfjs",
"octane/raytrace",
"octane/regexp",
"octane/richards",
"octane/splay",
"octane/typescript",
"octane/zlib",
"sunspider/3d-cube",
"sunspider/3d-morph",
"sunspider/3d-raytrace",
"sunspider/access-binary-trees",
"sunspider/access-fannkuch",
"sunspider/access-nbody",
"sunspider/access-nsieve",
"sunspider/bitops-3bit-bits-in-byte",
"sunspider/bitops-bits-in-byte",
"sunspider/bitops-bitwise-and",
"sunspider/bitops-nsieve-bits",
"sunspider/controlflow-recursive",
"sunspider/crypto-aes",
"sunspider/crypto-md5",
"sunspider/crypto-sha1",
"sunspider/date-format-tofte",
"sunspider/date-format-xparb",
"sunspider/math-cordic",
"sunspider/math-partial-sums",
"sunspider/math-spectral-norm",
"sunspider/regexp-dna",
"sunspider/string-base64",
"sunspider/string-fasta",
"sunspider/string-tagcloud",
"sunspider/string-unpack-code",
"sunspider/string-validate-input"]:
tests.append(testcase.TestCase(self, test))
return tests
def GetFlagsForTestCase(self, testcase, context):
result = []
result += context.mode_flags
if testcase.path.startswith("kraken"):
result.append(os.path.join(self.testroot, "%s-data.js" % testcase.path))
result.append(os.path.join(self.testroot, "%s.js" % testcase.path))
elif testcase.path.startswith("octane"):
result.append(os.path.join(self.testroot, "octane/base.js"))
result.append(os.path.join(self.testroot, "%s.js" % testcase.path))
if testcase.path.startswith("octane/gbemu"):
result.append(os.path.join(self.testroot, "octane/gbemu-part2.js"))
elif testcase.path.startswith("octane/typescript"):
result.append(os.path.join(self.testroot,
"octane/typescript-compiler.js"))
result.append(os.path.join(self.testroot, "octane/typescript-input.js"))
elif testcase.path.startswith("octane/zlib"):
result.append(os.path.join(self.testroot, "octane/zlib-data.js"))
result += ["-e", "BenchmarkSuite.RunSuites({});"]
elif testcase.path.startswith("sunspider"):
result.append(os.path.join(self.testroot, "%s.js" % testcase.path))
return testcase.flags + result
def GetSourceForTest(self, testcase):
filename = os.path.join(self.testroot, testcase.path + ".js")
with open(filename) as f:
return f.read()
def _DownloadIfNecessary(self, url, revision, target_dir):
# Maybe we're still up to date?
revision_file = "CHECKED_OUT_%s" % target_dir
checked_out_revision = None
if os.path.exists(revision_file):
with open(revision_file) as f:
checked_out_revision = f.read()
if checked_out_revision == revision:
return
# If we have a local archive file with the test data, extract it.
if os.path.exists(target_dir):
shutil.rmtree(target_dir)
archive_file = "downloaded_%s_%s.tar.gz" % (target_dir, revision)
if os.path.exists(archive_file):
with tarfile.open(archive_file, "r:gz") as tar:
tar.extractall()
with open(revision_file, "w") as f:
f.write(revision)
return
# No cached copy. Check out via SVN, and pack as .tar.gz for later use.
command = "svn co %s -r %s %s" % (url, revision, target_dir)
code = subprocess.call(command, shell=True)
if code != 0:
raise Exception("Error checking out %s benchmark" % target_dir)
with tarfile.open(archive_file, "w:gz") as tar:
tar.add("%s" % target_dir)
with open(revision_file, "w") as f:
f.write(revision)
def DownloadData(self):
old_cwd = os.getcwd()
os.chdir(os.path.abspath(self.root))
self._DownloadIfNecessary(
("http://svn.webkit.org/repository/webkit/trunk/PerformanceTests/"
"SunSpider/tests/sunspider-1.0.2/"),
"159499", "sunspider")
self._DownloadIfNecessary(
("http://kraken-mirror.googlecode.com/svn/trunk/kraken/tests/"
"kraken-1.1/"),
"8", "kraken")
self._DownloadIfNecessary(
"http://octane-benchmark.googlecode.com/svn/trunk/",
"26", "octane")
os.chdir(old_cwd)
def VariantFlags(self, testcase, default_flags):
# Both --nocrankshaft and --stressopt are very slow. Add TF but without
# always opt to match the way the benchmarks are run for performance
# testing.
return [[], ["--turbo-asm", "--turbo-filter=*"]]
def GetSuite(name, root):
return BenchmarksTestSuite(name, root)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python2.4
# Copyright 2007 John Kasunich and Jeff Epler
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from vismach import *
import hal
import math
import sys
c = hal.component("scaragui")
c.newpin("joint0", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint1", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint2", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint3", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint4", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint5", hal.HAL_FLOAT, hal.HAL_IN)
c.ready()
# parameters that define the geometry see scarakins.c for definitions these
# numbers match the defaults there, and will need to be changed or specified on
# the commandline if you are not using the defaults.
d1 = 490.0
d2 = 340.0
d3 = 50.0
d4 = 250.0
d5 = 50.0
d6 = 50.0
j3min = 40.0
j3max = 270.0
for setting in sys.argv[1:]: exec setting
# calculate a bunch of other dimensions that are used
# to scale the model of the machine
# most of these scale factors are arbitrary, to give
# a nicely proportioned machine. If you know specifics
# for the machine you are modeling, feel free to change
# these numbers
tool_len = math.sqrt(d5*d5+d6*d6) # don't change
tool_dia = tool_len / 6.0
# diameters of the arms
l1_dia = d2 / 5.0
l2_dia = d4 / 5.0
l3_dia = l2_dia * 0.8
# diameters of the "lumps" at the joints
j0_dia = l1_dia * 1.5
j1_dia = max(l1_dia * 1.25, l2_dia * 1.5)
j2_dia = l2_dia * 1.25
# other dims
j0_hi = l1_dia * 1.2
j1_hi1 = l1_dia * 1.1
j1_hi2 = l2_dia * 1.2
j2_hi = l2_dia * 1.3
# don't change these
tool_angle = math.degrees(math.atan2(d6,d5))
tool_radius = tool_dia / 2.0
l1_rad = l1_dia / 2.0
l2_rad = l2_dia / 2.0
l3_len = j3max + j2_hi * 0.7
l3_rad = l3_dia / 2.0
j0_hi = j0_hi / 2.0
j0_rad = j0_dia / 2.0
j1_hi1 = j1_hi1 / 2.0
j1_hi2 = j1_hi2 / 2.0
j1_rad = j1_dia / 2.0
j2_hi = j2_hi / 2.0
j2_rad = j2_dia / 2.0
size = max(d1+d3+l3_len,d2+d4+d6)
# tool - cylinder with a point, and a ball to hide the blunt back end
# the origin starts out at the tool tip, and we want to capture this
# "tooltip" coordinate system
tooltip = Capture()
tool = Collection([
tooltip,
Sphere(0.0, 0.0, tool_len, tool_dia),
CylinderZ(tool_len, tool_radius, tool_dia, tool_radius),
CylinderZ(tool_dia, tool_radius, 0.0, 0.0)])
# translate so origin is at base of tool, not the tip
tool = Translate([tool],0.0,0.0,-tool_len)
# the tool might not be pointing straight down
tool = Rotate([tool],tool_angle,0.0,-1.0,0.0)
# make joint 3 rotate
tool = HalRotate([tool],c,"joint3",1,0,0,1)
link3 = CylinderZ(0.0, l3_rad, l3_len, l3_rad)
# attach tool to end
link3 = Collection([tool,link3])
# make joint 2 go up and down
link3 = HalTranslate([link3],c,"joint2",0,0,-1)
# outer arm
# start with link3 and the cylinder it slides in
link2 = Collection([
link3,
CylinderZ(-j2_hi, j2_rad, j2_hi, j2_rad)])
# move to end of arm
link2 = Translate([link2], d4, 0.0, 0.0)
# add the arm itself
link2 = Collection([
link2,
CylinderX(d4, l2_rad, 1.5*j1_rad, l2_rad)])
# the joint gets interesting, because link2 can be above or below link1
if d3 > 0:
flip = 1
else:
flip = -1
# add the joint
link2 = Collection([
link2,
Box(1.5*j1_rad, -0.9*j1_rad, -j1_hi2, 1.15*j1_rad, 0.9*j1_rad, j1_hi2),
Box(1.15*j1_rad, -0.9*j1_rad, -0.4*d3, 0.0, 0.9*j1_rad, flip*j1_hi2),
CylinderZ(-0.4*d3, j1_rad, flip*1.2*j1_hi2, j1_rad)])
# make the joint work
link2 = HalRotate([link2],c,"joint1",1,0,0,1)
# inner arm
# the outer arm and the joint
link1 = Collection([
Translate([link2],0.0,0.0,d3),
Box(-1.5*j1_rad, -0.9*j1_rad, -j1_hi1, -1.15*j1_rad, 0.9*j1_rad, j1_hi1),
Box(-1.15*j1_rad, -0.9*j1_rad, 0.4*d3, 0.0, 0.9*j1_rad, -flip*j1_hi1),
CylinderZ(0.4*d3, j1_rad, flip*-1.2*j1_hi1, j1_rad),
CylinderZ(0.6*d3, 0.8*j1_rad, 0.4*d3, 0.8*j1_rad)])
# move to end of arm
link1 = Translate([link1], d2, 0.0, 0.0)
# add the arm itself, and the inner joint
link1 = Collection([
link1,
CylinderX(d2-1.5*j1_rad, l1_rad, 1.5*j0_rad, l1_rad),
Box(1.5*j0_rad, -0.9*j0_rad, -j0_hi, 0.0, 0.9*j0_rad, j0_hi),
CylinderZ(-1.2*j0_hi, j0_rad, 1.2*j0_hi, j0_rad)])
# make the joint work
link1 = HalRotate([link1],c,"joint0",1,0,0,1)
#stationary base
link0 = Collection([
CylinderZ(d1-j0_hi, 0.8*j0_rad, d1-1.5*j0_hi, 0.8*j0_rad),
CylinderZ(d1-1.5*j0_hi, 0.8*j0_rad, 0.07*d1, 1.3*j0_rad),
CylinderZ(0.07*d1, 2.0*j0_rad, 0.0, 2.0*j0_rad)])
# slap the arm on top
link0 = Collection([
link0,
Translate([link1],0,0,d1)])
# add a floor
floor = Box(-0.5*size,-0.5*size,-0.02*size,0.5*size,0.5*size,0.0)
# and a table for the workpiece - define in workpiece coords
reach = d2+d4-d6
table_height = d1+d3-j3max-d5
work = Capture()
table = Collection([
work,
Box(-0.35*reach,-0.5*reach, -0.1*d1, 0.35*reach, 0.5*reach, 0.0)])
# make the table moveable (tilting)
table = HalRotate([table],c,"joint4",1,0,1,0)
table = HalRotate([table],c,"joint5",1,1,0,0)
# put the table into its proper place
table = Translate([table],0.5*reach,0.0,table_height)
model = Collection([link0, floor, table])
main(model, tooltip, work, size)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
"""QGIS Unit tests for postgres transaction groups.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '11/06/2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
import qgis # NOQA
import os
from qgis.core import (
QgsVectorLayer,
QgsProject,
QgsTransaction,
QgsDataSourceUri
)
from qgis.testing import start_app, unittest
start_app()
class TestQgsPostgresTransaction(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""
Setup the involved layers and relations for a n:m relation
:return:
"""
cls.dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
cls.dbconn = os.environ['QGIS_PGTEST_DB']
# Create test layer
cls.vl_b = QgsVectorLayer(cls.dbconn + ' sslmode=disable key=\'pk\' table="qgis_test"."books" sql=', 'books',
'postgres')
cls.vl_a = QgsVectorLayer(cls.dbconn + ' sslmode=disable key=\'pk\' table="qgis_test"."authors" sql=',
'authors', 'postgres')
QgsProject.instance().addMapLayer(cls.vl_b)
QgsProject.instance().addMapLayer(cls.vl_a)
cls.relMgr = QgsProject.instance().relationManager()
assert (cls.vl_a.isValid())
assert (cls.vl_b.isValid())
def startTransaction(self):
"""
Start a new transaction and set all layers into transaction mode.
:return: None
"""
lyrs = [self.vl_a, self.vl_b]
self.transaction = QgsTransaction.create(lyrs)
self.transaction.begin()
for l in lyrs:
l.startEditing()
def rollbackTransaction(self):
"""
Rollback all changes done in this transaction.
We always rollback and never commit to have the database in a pristine
state at the end of each test.
:return: None
"""
lyrs = [self.vl_a, self.vl_b]
for l in lyrs:
l.commitChanges()
self.transaction.rollback()
def test_transactionsGroup(self):
conn_string = QgsDataSourceUri(self.vl_b.source()).connectionInfo()
# No transaction group.
QgsProject.instance().setAutoTransaction(False)
noTg = QgsProject.instance().transactionGroup("postgres", conn_string)
self.assertIsNone(noTg)
# start transaction - no auto transaction
self.startTransaction()
noTg = QgsProject.instance().transactionGroup("postgres", conn_string)
self.assertIsNone(noTg)
self.rollbackTransaction()
# with auto transactions
QgsProject.instance().setAutoTransaction(True)
self.startTransaction()
noTg = QgsProject.instance().transactionGroup("postgres", conn_string)
self.assertIsNotNone(noTg)
self.rollbackTransaction()
# bad provider key
self.startTransaction()
noTg = QgsProject.instance().transactionGroup("xxxpostgres", conn_string)
self.assertIsNone(noTg)
self.rollbackTransaction()
if __name__ == '__main__':
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/bin/sh
test_description='cherry-pick should rerere for conflicts'
GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME=main
export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
. ./test-lib.sh
test_expect_success setup '
test_commit foo &&
test_commit foo-main foo &&
test_commit bar-main bar &&
git checkout -b dev foo &&
test_commit foo-dev foo &&
test_commit bar-dev bar &&
git config rerere.enabled true
'
test_expect_success 'conflicting merge' '
test_must_fail git merge main
'
test_expect_success 'fixup' '
echo foo-resolved >foo &&
echo bar-resolved >bar &&
git commit -am resolved &&
cp foo foo-expect &&
cp bar bar-expect &&
git reset --hard HEAD^
'
test_expect_success 'cherry-pick conflict with --rerere-autoupdate' '
test_must_fail git cherry-pick --rerere-autoupdate foo..bar-main &&
test_cmp foo-expect foo &&
git diff-files --quiet &&
test_must_fail git cherry-pick --continue &&
test_cmp bar-expect bar &&
git diff-files --quiet &&
git cherry-pick --continue &&
git reset --hard bar-dev
'
test_expect_success 'cherry-pick conflict respects rerere.autoUpdate' '
test_config rerere.autoUpdate true &&
test_must_fail git cherry-pick foo..bar-main &&
test_cmp foo-expect foo &&
git diff-files --quiet &&
test_must_fail git cherry-pick --continue &&
test_cmp bar-expect bar &&
git diff-files --quiet &&
git cherry-pick --continue &&
git reset --hard bar-dev
'
test_expect_success 'cherry-pick conflict with --no-rerere-autoupdate' '
test_config rerere.autoUpdate true &&
test_must_fail git cherry-pick --no-rerere-autoupdate foo..bar-main &&
test_cmp foo-expect foo &&
test_must_fail git diff-files --quiet &&
git add foo &&
test_must_fail git cherry-pick --continue &&
test_cmp bar-expect bar &&
test_must_fail git diff-files --quiet &&
git add bar &&
git cherry-pick --continue &&
git reset --hard bar-dev
'
test_expect_success 'cherry-pick --continue rejects --rerere-autoupdate' '
test_must_fail git cherry-pick --rerere-autoupdate foo..bar-main &&
test_cmp foo-expect foo &&
git diff-files --quiet &&
test_must_fail git cherry-pick --continue --rerere-autoupdate >actual 2>&1 &&
echo "fatal: cherry-pick: --rerere-autoupdate cannot be used with --continue" >expect &&
test_cmp expect actual &&
test_must_fail git cherry-pick --continue --no-rerere-autoupdate >actual 2>&1 &&
echo "fatal: cherry-pick: --no-rerere-autoupdate cannot be used with --continue" >expect &&
test_cmp expect actual &&
git cherry-pick --abort
'
test_expect_success 'cherry-pick --rerere-autoupdate more than once' '
test_must_fail git cherry-pick --rerere-autoupdate --rerere-autoupdate foo..bar-main &&
test_cmp foo-expect foo &&
git diff-files --quiet &&
git cherry-pick --abort &&
test_must_fail git cherry-pick --rerere-autoupdate --no-rerere-autoupdate --rerere-autoupdate foo..bar-main &&
test_cmp foo-expect foo &&
git diff-files --quiet &&
git cherry-pick --abort &&
test_must_fail git cherry-pick --rerere-autoupdate --no-rerere-autoupdate foo..bar-main &&
test_must_fail git diff-files --quiet &&
git cherry-pick --abort
'
test_expect_success 'cherry-pick conflict without rerere' '
test_config rerere.enabled false &&
test_must_fail git cherry-pick foo-main &&
grep ===== foo &&
grep foo-dev foo &&
grep foo-main foo
'
test_done
|
unknown
|
github
|
https://github.com/git/git
|
t/t3504-cherry-pick-rerere.sh
|
""" Automate Commands """
from paver.easy import *
import paver.doctools
from paver.tasks import task
from pip.req import parse_requirements
from paver.setuputils import (
install_distutils_tasks
)
# -- REQUIRED-FOR: setup, sdist, ...
# NOTE: Adds a lot more python-project related tasks.
install_distutils_tasks()
# ----------------------------------------------------------------------------
# PROJECT CONFIGURATION (for sdist/setup mostly):
# ----------------------------------------------------------------------------
NAME = "MemSim"
VERSION = open("VERSION.txt").read().strip()
DESCRIPTION = "Memory Mangement Simulator"
CLASSIFIERS = """\
Development Status :: 4 - Beta
Environment :: Console
Framework :: behave
Intended Audience :: Developers
License :: OSI Approved :: New BSD License
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Software Development
Topic :: Documentation
Topic :: Education
"""
# ----------------------------------------------------------------------------
# TASK CONFIGURATION:
# ----------------------------------------------------------------------------
options(
setup=dict(
name=NAME,
version=VERSION,
url="http://pypi.python.org/pypi/%s/" % NAME,
author="Kyle Grierson",
author_email="grierson@openmailbox.net",
license="BSD",
description=DESCRIPTION,
keywords="utility",
platforms=['any'],
classifiers=CLASSIFIERS.splitlines(),
include_package_data=True,
),
minilib=Bunch(
extra_files=['doctools', 'virtual']
),
pip=Bunch(
requirements_files=["requirements.txt"],
),
test=Bunch(
default_args=["features/"],
behave_formatter="progress",
),
pylint=Bunch(default_args=NAME),
)
# Commands
@task
def run():
"""run"""
sh('python3 source/main.py')
@task
def unit():
"""unit_test"""
sh('py.test test/')
@task
def acceptance():
"""bdd"""
sh('behave')
@task
def pylint():
"""pylint"""
sh('pylint source/')
@task
def cov():
"""cov"""
sh('py.test --cov=source/')
# Generate Reports
@task
def report_pylint():
"""lint"""
#sh('pylint --msg-template="{path}:{line}:[{msg_id}({symbol}), {obj}] {msg}" memsim/ > reports/pylint.txt')
sh('pylint source/ --output-format=html > reports/pylint.html')
@task
def report_cov():
"""cov"""
sh('py.test --cov-report xml --cov=source/')
sh('mv coverage.xml reports/.')
@task
def report_unit():
"""cov"""
sh('py.test --junitxml=reports/junit_unit.xml')
@task
def report_acceptance():
"""report_sbe"""
sh('behave --junit --junit-directory=reports/')
@needs('report_acceptance', 'report_unit', 'report_cov', 'report_pylint')
def report():
"""report"""
pass
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""Bisection algorithms."""
def insort_right(a, x, lo=0, hi=None):
"""Insert item x in list a, and keep it sorted assuming a is sorted.
If x is already in a, insert it to the right of the rightmost x.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)/2
if x < a[mid]: hi = mid
else: lo = mid+1
a.insert(lo, x)
insort = insort_right # backward compatibility
def bisect_right(a, x, lo=0, hi=None):
"""Return the index where to insert item x in list a, assuming a is sorted.
The return value i is such that all e in a[:i] have e <= x, and all e in
a[i:] have e > x. So if x already appears in the list, i points just
beyond the rightmost x already there.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)/2
if x < a[mid]: hi = mid
else: lo = mid+1
return lo
bisect = bisect_right # backward compatibility
def insort_left(a, x, lo=0, hi=None):
"""Insert item x in list a, and keep it sorted assuming a is sorted.
If x is already in a, insert it to the left of the leftmost x.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)/2
if a[mid] < x: lo = mid+1
else: hi = mid
a.insert(lo, x)
def bisect_left(a, x, lo=0, hi=None):
"""Return the index where to insert item x in list a, assuming a is sorted.
The return value i is such that all e in a[:i] have e < x, and all e in
a[i:] have e >= x. So if x already appears in the list, i points just
before the leftmost x already there.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)/2
if a[mid] < x: lo = mid+1
else: hi = mid
return lo
|
unknown
|
codeparrot/codeparrot-clean
| ||
import pytest
import random
import string
import functools
from unittest import mock
from tests.utils import async
from waterbutler.core import streams
@pytest.fixture
def blob():
return ''.join(random.sample(string.printable, 50)).encode('utf-8')
class TestMultiStream:
@async
def test_single_stream(self, blob):
stream = streams.MultiStream(streams.StringStream(blob))
data = yield from stream.read()
assert data == blob
@async
def test_double_same_stream(self, blob):
stream = streams.MultiStream(
streams.StringStream(blob),
streams.StringStream(blob)
)
data = yield from stream.read()
assert data == (blob * 2)
@async
def test_1_at_a_time_single_stream(self, blob):
stream = streams.MultiStream(streams.StringStream(blob))
for i in range(len(blob)):
assert blob[i:i + 1] == (yield from stream.read(1))
@async
def test_1_at_a_time_many_stream(self, blob):
count = 4
stream = streams.MultiStream(*[streams.StringStream(blob) for _ in range(count)])
for _ in range(count):
for i in range(len(blob)):
assert blob[i:i + 1] == (yield from stream.read(1))
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
#
# Program Parameters
#
import os
import sys
import subprocess
from os import access, getenv, X_OK
jar_file = 'DeDup-0.12.5.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx1g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') == None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args)
def main():
java = java_executable()
jar_dir = real_dirname(sys.argv[0])
(mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java]+ mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
if '--jar_dir' in sys.argv[1:]:
print(jar_path)
else:
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
import time
import inspect
method_wrapper = type(object().__str__)
base_types = [inspect.types.BooleanType,
inspect.types.BufferType,
inspect.types.CodeType,
inspect.types.ComplexType,
inspect.types.DictProxyType,
inspect.types.DictType,
inspect.types.DictionaryType,
inspect.types.EllipsisType,
inspect.types.FileType,
inspect.types.FloatType,
inspect.types.FrameType,
inspect.types.GeneratorType,
inspect.types.GetSetDescriptorType,
inspect.types.IntType,
inspect.types.ListType,
inspect.types.LongType,
inspect.types.MemberDescriptorType,
inspect.types.ModuleType,
inspect.types.NoneType,
inspect.types.NotImplementedType,
inspect.types.SliceType,
inspect.types.StringType,
inspect.types.StringTypes,
inspect.types.TracebackType,
inspect.types.TupleType,
inspect.types.TypeType,
inspect.types.UnicodeType,
inspect.types.XRangeType]
def is_wrapping(x):
x = type(x)
for t in base_types:
if x is t:
return False
return True
class Proxy:
def __init__(self, client, retry, wait_time):
self.client = client
self.retry = retry
self.wait_time = wait_time
def wait(self):
time.sleep(self.wait_time)
def __call__(self, *args, **kwargs):
c = 0
result = None
is_retry = True
while is_retry:
try:
result = self.client(*args, **kwargs)
is_retry = False
except Exception as e:
if c < self.retry:
c += 1
self.wait()
else:
raise e
return result
def __getattr__(self, name):
attr = getattr(self.client, name)
if inspect.ismethod(attr) or (type(attr) is method_wrapper) or is_wrapping(attr):
return Proxy(attr, self.retry, self.wait_time)
return attr
|
unknown
|
codeparrot/codeparrot-clean
| ||
//go:build !linux
package vfs
import (
"github.com/moby/go-archive/chrootarchive"
"github.com/moby/sys/user"
)
func dirCopy(srcDir, dstDir string) error {
return chrootarchive.NewArchiver(user.IdentityMapping{}).CopyWithTar(srcDir, dstDir)
}
|
go
|
github
|
https://github.com/moby/moby
|
daemon/graphdriver/vfs/copy_unsupported.go
|
#! /usr/bin/python
import unittest
import sys
import os
import json
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
from nysa.ibuilder.lib import verilog_utils as vutils
TEST_MODULE_LOCATION = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir,
"fake",
"test_wb_slave.v"))
#print "test module location: %s" % TEST_MODULE_LOCATION
GPIO_FILENAME = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir,
"mock",
"gpio_module_tags.txt"))
CAMERA_FILENAME = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir,
"mock",
"sf_camera_module_tags.txt"))
class Test (unittest.TestCase):
"""Unit test for verilog_utils"""
def setUp(self):
base = os.path.join( os.path.dirname(__file__),
os.pardir)
self.nysa_base = os.path.abspath(base)
f = open(GPIO_FILENAME, "r")
self.gpio_tags = json.load(f)
f.close()
f = open(CAMERA_FILENAME, "r")
self.camera_tags = json.load(f)
f.close()
self.dbg = False
def test_get_module_buffer_tags(self):
f = open(TEST_MODULE_LOCATION, "r")
module_buffer = f.read()
f.close()
tags = vutils.get_module_buffer_tags(module_buffer)
#print "tags: %s" % str(tags)
assert "parameters" in tags
assert "keywords" in tags
assert "arbiter_masters" in tags
assert "module" in tags
assert tags["module"] == "wb_test"
assert "ports" in tags
def test_get_module_tags(self):
tags = vutils.get_module_tags(TEST_MODULE_LOCATION)
#print "tags: %s" % str(tags)
assert "parameters" in tags
assert "keywords" in tags
assert "arbiter_masters" in tags
assert "module" in tags
assert tags["module"] == "wb_test"
assert "ports" in tags
def test_remove_comments(self):
"""try and remove all comments from a buffer"""
bufin = "not comment/*comment\n*/\n//comment\n/*\nabc\n*/something//comment"
output_buffer = vutils.remove_comments(bufin)
good = "not comment\n\nsomething\n"
self.assertEqual(output_buffer, good)
def test_generate_module_port_signals(self):
buf = vutils.generate_module_port_signals(invert_reset = False,
name = "gpio_device",
prename = "test",
module_tags = self.gpio_tags)
test_str = ""\
"wb_gpio #(\n"\
" .DEFAULT_INTERRUPT_MASK(0 ),\n"\
" .DEFAULT_INTERRUPT_EDGE(0 )\n"\
")gpio_device (\n"\
" .clk (clk ),\n"\
" .rst (rst ),\n"\
"\n" \
" //inputs\n"\
" .gpio_in (test_gpio_in ),\n"\
" .i_wbs_adr (test_i_wbs_adr ),\n"\
" .i_wbs_cyc (test_i_wbs_cyc ),\n"\
" .i_wbs_dat (test_i_wbs_dat ),\n"\
" .i_wbs_sel (test_i_wbs_sel ),\n"\
" .i_wbs_stb (test_i_wbs_stb ),\n"\
" .i_wbs_we (test_i_wbs_we ),\n"\
"\n" \
" //outputs\n"\
" .debug (test_debug ),\n"\
" .gpio_out (test_gpio_out ),\n"\
" .o_wbs_ack (test_o_wbs_ack ),\n"\
" .o_wbs_dat (test_o_wbs_dat ),\n"\
" .o_wbs_int (test_o_wbs_int )\n"\
");"
#print "signals: %s" % str(buf)
#signals = vutils.generate_module_port_signals(invert_reset = False,
# name = "camera_device",
# prename = "test_camera",
# module_tags = self.camera_tags)
#print "signals: %s" % str(signals)
assert test_str in buf
def test_get_port_count(self):
port_count = vutils.get_port_count(self.gpio_tags)
assert port_count == 14
def test_create_reg_buf_from_dict_single(self):
buf = vutils.create_reg_buf_from_dict("test", {"size":1})
assert "reg test;" in buf
def test_create_reg_buf_from_dict_array(self):
d = {"size":2, "max_val":1, "min_val":0}
buf = vutils.create_reg_buf_from_dict("test", d)
assert "reg [1:0] test;" in buf
def test_create_reg_buf_single(self):
buf = vutils.create_reg_buf("test", size = 1, max_val = 0, min_val = 0)
assert "reg test;" in buf
def test_create_reg_buf_array(self):
buf = vutils.create_reg_buf("test", size = 2, max_val = 1, min_val = 0)
assert "reg [1:0] test;" in buf
def test_create_wire_buf_from_dict_single(self):
buf = vutils.create_wire_buf_from_dict("test", {"size":1})
assert "wire test;" in buf
def test_create_wire_buf_from_dict_array(self):
d = {"size":2, "max_val":1, "min_val":0}
buf = vutils.create_wire_buf_from_dict("test", d)
assert "wire [1:0] test;" in buf
def test_create_wire_buf_single(self):
d = {"size":2, "max_val":1, "min_val":0}
buf = vutils.create_wire_buf("test", size = 1, max_val = 0, min_val = 0)
assert "wire test;" in buf
def test_create_wire_buf_single(self):
d = {"size":2, "max_val":1, "min_val":0}
buf = vutils.create_wire_buf("test", size = 2, max_val = 1, min_val = 0)
assert "wire [1:0] test;" in buf
def test_generate_assigns_buffer(self):
bind = {"test1":{"direction":"input", "loc":"clk"}, "test2":{"direction":"input","loc":"rst"}}
ibind = {"a":{"signal":"test1"}}
buf = vutils.generate_assigns_buffer(invert_reset = False,
bindings = bind,
internal_bindings = ibind)
#print "buf: %s" % buf
#f = open("/home/cospan/sandbox/register_buf.txt", "w")
bind_str = ""\
"//Internal Bindings\n"\
"assign a = test1;\n"\
"\n"\
"\n"\
"//Bindings\n"\
"assign test1 = clk;\n"\
"assign test2 = rst;\n"
assert bind_str in buf
def test_port_cmp(self):
assert vutils.port_cmp("p[0]", "p[1]") == -1
assert vutils.port_cmp("p[1]", "p[1]") == 0
assert vutils.port_cmp("p[10]", "p[1]") == -1
if __name__ == "__main__":
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jmx.export.metadata;
import org.jspecify.annotations.Nullable;
/**
* Metadata that indicates to expose a given bean property as JMX attribute.
* Only valid when used on a JavaBean getter or setter.
*
* @author Rob Harrop
* @since 1.2
* @see org.springframework.jmx.export.assembler.MetadataMBeanInfoAssembler
* @see org.springframework.jmx.export.MBeanExporter
*/
public class ManagedAttribute extends AbstractJmxAttribute {
/**
* Empty attributes.
*/
public static final ManagedAttribute EMPTY = new ManagedAttribute();
private @Nullable Object defaultValue;
private @Nullable String persistPolicy;
private int persistPeriod = -1;
/**
* Set the default value of this attribute.
*/
public void setDefaultValue(@Nullable Object defaultValue) {
this.defaultValue = defaultValue;
}
/**
* Return the default value of this attribute.
*/
public @Nullable Object getDefaultValue() {
return this.defaultValue;
}
public void setPersistPolicy(@Nullable String persistPolicy) {
this.persistPolicy = persistPolicy;
}
public @Nullable String getPersistPolicy() {
return this.persistPolicy;
}
public void setPersistPeriod(int persistPeriod) {
this.persistPeriod = persistPeriod;
}
public int getPersistPeriod() {
return this.persistPeriod;
}
}
|
java
|
github
|
https://github.com/spring-projects/spring-framework
|
spring-context/src/main/java/org/springframework/jmx/export/metadata/ManagedAttribute.java
|
"""Test OpenBabel executables from Python
Note: Python bindings not used
On Windows or Linux, you can run these tests at the commandline
in the build folder with:
"C:\Program Files\CMake 2.6\bin\ctest.exe" -C CTestTestfile.cmake
-R pytest -VV
You could also "chdir" into build/test and run the test file directly:
python ../../../test/testfastsearch.py
In both cases, the test file is run directly from the source folder,
and so you can quickly develop the tests and try them out.
"""
import unittest
from testbabel import run_exec, BaseTest
class TestSym(BaseTest):
"""A series of tests relating to fastsearch functionality"""
def setUp(self):
self.canFindExecutable("babel")
def testSingleHit(self):
"""PR#2955101 - Difficulty reading from a fastsearch index"""
smiles = """C12(C(N(C(=O)C)c3c2cccc3)=O)Nc2c(ccc(c2N1)OCCCC)OCCCC
n1c([nH]c(cc1c1ccccc1)=O)c1ccc(cc1)Br
n1c(nc2c(c1N(C)C)cccc2)c1c(O)cccc1
C1(/[CH]2[CH]3\C(=C4/CC(C)(C)NC(C4)(C)C)C=C[CH]3[CH]1C=C2)=C1/CC(C)(C)NC(C1)(C)C
n1c(c2ccc(C(=O)O)cc2)ccc(c1)CCCCC
N1(C(CN(CC1=O)C(=O)C1CCCCC1)=O)CCc1ccccc1
S(N1[CH](c2ccccc2C=C1)C#N)(c1ccc(cc1)C)(=O)=O
c12c(c(OC)c3c(c1OC)occ3)ccc(o2)=O
c12c(O[CH](C1=O)C(C)C)cc1c(c2)ccc(=O)o1
c12[C]3([C@H]4([N@@](CCc1c1ccccc1[nH]2)C[C@H](C=C4CC)C3))C(=O)OC"""
outputfile = open("ten.smi", "w")
outputfile.write(smiles)
outputfile.close()
output, error = run_exec("babel ten.smi ten.fs")
self.canFindFile("ten.fs")
self.assertConverted(error, 10)
query = "Nc2nc(c1ccccc1)nc3ccccc23"
output, error = run_exec("babel ten.fs -ifs -s %s -osmi" % query)
self.assertConverted(error, 1)
output, error = run_exec("babel ten.fs -ifs -s %s -at 0.5 -aa -osmi" % query)
self.assertConverted(error, 1)
if __name__ == "__main__":
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#! /usr/bin/env python
# -*- mode: python; indent-tabs-mode: nil; -*-
# vim:expandtab:shiftwidth=2:tabstop=2:smarttab:
#
# Copyright (C) 2011 Patrick Crews
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import shutil
import tarfile
from lib.util.mysqlBaseTestCase import mysqlBaseTestCase
server_requirements = [['--innodb-file-per-table']]
servers = []
server_manager = None
test_executor = None
# we explicitly use the --no-timestamp option
# here. We will be using a generic / vanilla backup dir
backup_path = None
class basicTest(mysqlBaseTestCase):
def setUp(self):
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, '_xtrabackup')
# remove backup paths
for del_path in [backup_path]:
if os.path.exists(del_path):
shutil.rmtree(del_path)
def test_ib_stream(self):
"""Tests for tar4ibd + symlinks (bug #387587)"""
self.servers = servers
innobackupex = test_executor.system_manager.innobackupex_path
xtrabackup = test_executor.system_manager.xtrabackup_path
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, '_xtrabackup')
tar_file_path = os.path.join(backup_path,'out.tar')
output_path = os.path.join(master_server.vardir, 'innobackupex.out')
exec_path = os.path.dirname(innobackupex)
# populate our server with a test bed
test_cmd = "./gentest.pl --gendata=conf/percona/percona.zz"
retcode, output = self.execute_randgen(test_cmd, test_executor, master_server)
# Move some of our .ibd files and make symlinks
shutil.move( os.path.join(master_server.datadir,'test/CC.ibd')
, master_server.vardir)
os.symlink(os.path.join(master_server.vardir,'CC.ibd')
,os.path.join(master_server.datadir,'test/CC.ibd'))
shutil.move( os.path.join(master_server.datadir,'test/BB.ibd')
, master_server.vardir)
os.symlink(os.path.join(master_server.vardir,'BB.ibd')
,os.path.join(master_server.vardir,'BB_link.ibd'))
os.symlink(os.path.join(master_server.vardir,'BB_link.ibd')
,os.path.join(master_server.datadir,'test/BB.ibd'))
# take a backup
try:
os.mkdir(backup_path)
except OSError:
pass
cmd = [ innobackupex
, "--defaults-file=%s" %master_server.cnf_file
, "--stream=tar"
, "--user=root"
, "--port=%d" %master_server.master_port
, "--host=127.0.0.1"
, "--no-timestamp"
, "--ibbackup=%s" %xtrabackup
, "%s > %s" %(backup_path,tar_file_path)
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# stop the server
master_server.stop()
# extract our backup tarball
cmd = "tar -ivxf %s" %tar_file_path
retcode, output = self.execute_cmd(cmd, output_path, backup_path, True)
self.assertEqual(retcode,0,output)
# Check for Bug 723318 - seems quicker than separate test case
self.assertTrue('xtrabackup_binary' in os.listdir(backup_path)
, msg = "Bug723318: xtrabackup_binary not included in tar archive when streaming")
# do prepare on backup
cmd = [ innobackupex
, "--apply-log"
, "--no-timestamp"
, "--use-memory=500M"
, "--ibbackup=%s" %xtrabackup
, backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode,0,output)
# remove old datadir
shutil.rmtree(master_server.datadir)
os.mkdir(master_server.datadir)
# Remove our symlinks
os.remove(os.path.join(master_server.vardir,'CC.ibd'))
os.remove(os.path.join(master_server.vardir,'BB.ibd'))
os.remove(os.path.join(master_server.vardir,'BB_link.ibd'))
# restore from backup
cmd = [ innobackupex
, "--defaults-file=%s" %master_server.cnf_file
, "--copy-back"
, "--ibbackup=%s" %(xtrabackup)
, backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode,0, output)
# restart server (and ensure it doesn't crash)
master_server.start()
self.assertEqual(master_server.status,1, 'Server failed restart from restored datadir...')
# Check the server is ok
query = "SELECT COUNT(*) FROM test.BB"
expected_output = ((10L,),)
retcode, output = self.execute_query(query, master_server)
self.assertEqual(output, expected_output, msg = "%s || %s" %(output, expected_output))
query = "SELECT COUNT(*) FROM test.CC"
expected_output = ((100L,),)
retcode, output = self.execute_query(query, master_server)
self.assertEqual(output, expected_output, msg = "%s || %s" %(output, expected_output))
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""A module containing the `_NestedSequence` protocol."""
from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
if TYPE_CHECKING:
from collections.abc import Iterator
__all__ = ["_NestedSequence"]
@runtime_checkable
class _NestedSequence[T](Protocol):
"""A protocol for representing nested sequences.
Warning
-------
`_NestedSequence` currently does not work in combination with typevars,
*e.g.* ``def func(a: _NestedSequnce[T]) -> T: ...``.
See Also
--------
collections.abc.Sequence
ABCs for read-only and mutable :term:`sequences`.
Examples
--------
.. code-block:: python
>>> from typing import TYPE_CHECKING
>>> import numpy as np
>>> from numpy._typing import _NestedSequence
>>> def get_dtype(seq: _NestedSequence[float]) -> np.dtype[np.float64]:
... return np.asarray(seq).dtype
>>> a = get_dtype([1.0])
>>> b = get_dtype([[1.0]])
>>> c = get_dtype([[[1.0]]])
>>> d = get_dtype([[[[1.0]]]])
>>> if TYPE_CHECKING:
... reveal_locals()
... # note: Revealed local types are:
... # note: a: numpy.dtype[numpy.floating[numpy._typing._64Bit]]
... # note: b: numpy.dtype[numpy.floating[numpy._typing._64Bit]]
... # note: c: numpy.dtype[numpy.floating[numpy._typing._64Bit]]
... # note: d: numpy.dtype[numpy.floating[numpy._typing._64Bit]]
"""
def __len__(self, /) -> int:
"""Implement ``len(self)``."""
raise NotImplementedError
def __getitem__(self, index: int, /) -> "T | _NestedSequence[T]":
"""Implement ``self[x]``."""
raise NotImplementedError
def __contains__(self, x: object, /) -> bool:
"""Implement ``x in self``."""
raise NotImplementedError
def __iter__(self, /) -> "Iterator[T | _NestedSequence[T]]":
"""Implement ``iter(self)``."""
raise NotImplementedError
def __reversed__(self, /) -> "Iterator[T | _NestedSequence[T]]":
"""Implement ``reversed(self)``."""
raise NotImplementedError
def count(self, value: Any, /) -> int:
"""Return the number of occurrences of `value`."""
raise NotImplementedError
def index(self, value: Any, /) -> int:
"""Return the first index of `value`."""
raise NotImplementedError
|
python
|
github
|
https://github.com/numpy/numpy
|
numpy/_typing/_nested_sequence.py
|
#!/usr/bin/env python
#
# Copyright (C) 2013 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
YCM_EXTRA_CONF_FILENAME = '.ycm_extra_conf.py'
CONFIRM_CONF_FILE_MESSAGE = ('Found {0}. Load? \n\n(Question can be turned '
'off with options, see YCM docs)')
NO_EXTRA_CONF_FILENAME_MESSAGE = ( 'No {0} file detected, so no compile flags '
'are available. Thus no semantic support for C/C++/ObjC/ObjC++. Go READ THE '
'DOCS *NOW*, DON\'T file a bug report.' ).format( YCM_EXTRA_CONF_FILENAME )
NO_DIAGNOSTIC_SUPPORT_MESSAGE = ( 'YCM has no diagnostics support for this '
'filetype; refer to Syntastic docs if using Syntastic.')
class ServerError( Exception ):
def __init__( self, message ):
super( ServerError, self ).__init__( message )
class UnknownExtraConf( ServerError ):
def __init__( self, extra_conf_file ):
message = CONFIRM_CONF_FILE_MESSAGE.format( extra_conf_file )
super( UnknownExtraConf, self ).__init__( message )
self.extra_conf_file = extra_conf_file
class NoExtraConfDetected( ServerError ):
def __init__( self ):
super( NoExtraConfDetected, self ).__init__(
NO_EXTRA_CONF_FILENAME_MESSAGE )
class NoDiagnosticSupport( ServerError ):
def __init__( self ):
super( NoDiagnosticSupport, self ).__init__( NO_DIAGNOSTIC_SUPPORT_MESSAGE )
def BuildGoToResponse( filepath, line_num, column_num, description = None ):
response = {
'filepath': os.path.realpath( filepath ),
'line_num': line_num,
'column_num': column_num
}
if description:
response[ 'description' ] = description
return response
def BuildDescriptionOnlyGoToResponse( text ):
return {
'description': text,
}
# TODO: Look at all the callers and ensure they are not using this instead of an
# exception.
def BuildDisplayMessageResponse( text ):
return {
'message': text
}
def BuildCompletionData( insertion_text,
extra_menu_info = None,
detailed_info = None,
menu_text = None,
kind = None ):
completion_data = {
'insertion_text': insertion_text
}
if extra_menu_info:
completion_data[ 'extra_menu_info' ] = extra_menu_info
if menu_text:
completion_data[ 'menu_text' ] = menu_text
if detailed_info:
completion_data[ 'detailed_info' ] = detailed_info
if kind:
completion_data[ 'kind' ] = kind
return completion_data
def BuildDiagnosticData( diagnostic ):
def BuildRangeData( source_range ):
return {
'start': BuildLocationData( source_range.start_ ),
'end': BuildLocationData( source_range.end_ ),
}
def BuildLocationData( location ):
return {
'line_num': location.line_number_ - 1,
'column_num': location.column_number_ - 1,
'filepath': location.filename_,
}
return {
'ranges': [ BuildRangeData( x ) for x in diagnostic.ranges_ ],
'location': BuildLocationData( diagnostic.location_ ),
'location_extent': BuildRangeData( diagnostic.location_extent_ ),
'text': diagnostic.text_,
'kind': diagnostic.kind_
}
def BuildExceptionResponse( exception, traceback ):
return {
'exception': exception,
'message': str( exception ),
'traceback': traceback
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
import Link from "next/link";
import Avatar from "./avatar";
import DateComponent from "./date";
import CoverImage from "./cover-image";
function PostPreview({
title,
coverImage,
date,
excerpt,
author,
slug,
}: {
title: string;
coverImage: any;
date: string;
excerpt: string;
author: any;
slug: string;
}) {
return (
<div>
<div className="mb-5">
<CoverImage title={title} slug={slug} url={coverImage.url} />
</div>
<h3 className="text-3xl mb-3 leading-snug">
<Link href={`/posts/${slug}`} className="hover:underline">
{title}
</Link>
</h3>
<div className="text-lg mb-4">
<DateComponent dateString={date} />
</div>
<p className="text-lg leading-relaxed mb-4">{excerpt}</p>
{author && <Avatar name={author.name} picture={author.picture} />}
</div>
);
}
export default function MoreStories({ morePosts }: { morePosts: any[] }) {
return (
<section>
<h2 className="mb-8 text-6xl md:text-7xl font-bold tracking-tighter leading-tight">
More Stories
</h2>
<div className="grid grid-cols-1 md:grid-cols-2 md:gap-x-16 lg:gap-x-32 gap-y-20 md:gap-y-32 mb-32">
{morePosts.map((post) => (
<PostPreview
key={post.slug}
title={post.title}
coverImage={post.coverImage}
date={post.date}
author={post.author}
slug={post.slug}
excerpt={post.excerpt}
/>
))}
</div>
</section>
);
}
|
typescript
|
github
|
https://github.com/vercel/next.js
|
examples/cms-contentful/app/more-stories.tsx
|
/*
* Copyright (C) 2016 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.graph;
import java.util.Optional;
import java.util.Set;
import org.jspecify.annotations.Nullable;
/**
* A class to allow {@link Network} implementations to be backed by a provided delegate. This is not
* currently planned to be released as a general-purpose forwarding class.
*
* @author James Sexton
* @author Joshua O'Madadhain
*/
abstract class ForwardingNetwork<N, E> extends AbstractNetwork<N, E> {
abstract Network<N, E> delegate();
@Override
public Set<N> nodes() {
return delegate().nodes();
}
@Override
public Set<E> edges() {
return delegate().edges();
}
@Override
public boolean isDirected() {
return delegate().isDirected();
}
@Override
public boolean allowsParallelEdges() {
return delegate().allowsParallelEdges();
}
@Override
public boolean allowsSelfLoops() {
return delegate().allowsSelfLoops();
}
@Override
public ElementOrder<N> nodeOrder() {
return delegate().nodeOrder();
}
@Override
public ElementOrder<E> edgeOrder() {
return delegate().edgeOrder();
}
@Override
public Set<N> adjacentNodes(N node) {
return delegate().adjacentNodes(node);
}
@Override
public Set<N> predecessors(N node) {
return delegate().predecessors(node);
}
@Override
public Set<N> successors(N node) {
return delegate().successors(node);
}
@Override
public Set<E> incidentEdges(N node) {
return delegate().incidentEdges(node);
}
@Override
public Set<E> inEdges(N node) {
return delegate().inEdges(node);
}
@Override
public Set<E> outEdges(N node) {
return delegate().outEdges(node);
}
@Override
public EndpointPair<N> incidentNodes(E edge) {
return delegate().incidentNodes(edge);
}
@Override
public Set<E> adjacentEdges(E edge) {
return delegate().adjacentEdges(edge);
}
@Override
public int degree(N node) {
return delegate().degree(node);
}
@Override
public int inDegree(N node) {
return delegate().inDegree(node);
}
@Override
public int outDegree(N node) {
return delegate().outDegree(node);
}
@Override
public Set<E> edgesConnecting(N nodeU, N nodeV) {
return delegate().edgesConnecting(nodeU, nodeV);
}
@Override
public Set<E> edgesConnecting(EndpointPair<N> endpoints) {
return delegate().edgesConnecting(endpoints);
}
@Override
public Optional<E> edgeConnecting(N nodeU, N nodeV) {
return delegate().edgeConnecting(nodeU, nodeV);
}
@Override
public Optional<E> edgeConnecting(EndpointPair<N> endpoints) {
return delegate().edgeConnecting(endpoints);
}
@Override
public @Nullable E edgeConnectingOrNull(N nodeU, N nodeV) {
return delegate().edgeConnectingOrNull(nodeU, nodeV);
}
@Override
public @Nullable E edgeConnectingOrNull(EndpointPair<N> endpoints) {
return delegate().edgeConnectingOrNull(endpoints);
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeU, nodeV);
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(endpoints);
}
}
|
java
|
github
|
https://github.com/google/guava
|
guava/src/com/google/common/graph/ForwardingNetwork.java
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class SharedAccessAuthorizationRuleCreateOrUpdateParameters(Resource):
"""Parameters supplied to the CreateOrUpdate Namespace AuthorizationRules.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:param sku: The sku of the created namespace
:type sku: :class:`Sku <azure.mgmt.notificationhubs.models.Sku>`
:param properties: Properties of the Namespace AuthorizationRules.
:type properties: :class:`SharedAccessAuthorizationRuleProperties
<azure.mgmt.notificationhubs.models.SharedAccessAuthorizationRuleProperties>`
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'properties': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'properties': {'key': 'properties', 'type': 'SharedAccessAuthorizationRuleProperties'},
}
def __init__(self, location, properties, tags=None, sku=None):
super(SharedAccessAuthorizationRuleCreateOrUpdateParameters, self).__init__(location=location, tags=tags, sku=sku)
self.properties = properties
|
unknown
|
codeparrot/codeparrot-clean
| ||
#ifndef _NPY_COMMON_TAG_H_
#define _NPY_COMMON_TAG_H_
#include "../npysort/npysort_common.h"
namespace npy {
template<typename... tags>
struct taglist {
static constexpr unsigned size = sizeof...(tags);
};
struct integral_tag {
};
struct floating_point_tag {
};
struct complex_tag {
};
struct date_tag {
};
struct bool_tag : integral_tag {
using type = npy_bool;
static constexpr NPY_TYPES type_value = NPY_BOOL;
static int less(type const& a, type const& b) {
return BOOL_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct byte_tag : integral_tag {
using type = npy_byte;
static constexpr NPY_TYPES type_value = NPY_BYTE;
static int less(type const& a, type const& b) {
return BYTE_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct ubyte_tag : integral_tag {
using type = npy_ubyte;
static constexpr NPY_TYPES type_value = NPY_UBYTE;
static int less(type const& a, type const& b) {
return UBYTE_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct short_tag : integral_tag {
using type = npy_short;
static constexpr NPY_TYPES type_value = NPY_SHORT;
static int less(type const& a, type const& b) {
return SHORT_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct ushort_tag : integral_tag {
using type = npy_ushort;
static constexpr NPY_TYPES type_value = NPY_USHORT;
static int less(type const& a, type const& b) {
return USHORT_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct int_tag : integral_tag {
using type = npy_int;
static constexpr NPY_TYPES type_value = NPY_INT;
static int less(type const& a, type const& b) {
return INT_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct uint_tag : integral_tag {
using type = npy_uint;
static constexpr NPY_TYPES type_value = NPY_UINT;
static int less(type const& a, type const& b) {
return UINT_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct long_tag : integral_tag {
using type = npy_long;
static constexpr NPY_TYPES type_value = NPY_LONG;
static int less(type const& a, type const& b) {
return LONG_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct ulong_tag : integral_tag {
using type = npy_ulong;
static constexpr NPY_TYPES type_value = NPY_ULONG;
static int less(type const& a, type const& b) {
return ULONG_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct longlong_tag : integral_tag {
using type = npy_longlong;
static constexpr NPY_TYPES type_value = NPY_LONGLONG;
static int less(type const& a, type const& b) {
return LONGLONG_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct ulonglong_tag : integral_tag {
using type = npy_ulonglong;
static constexpr NPY_TYPES type_value = NPY_ULONGLONG;
static int less(type const& a, type const& b) {
return ULONGLONG_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct half_tag {
using type = npy_half;
static constexpr NPY_TYPES type_value = NPY_HALF;
static int less(type const& a, type const& b) {
return HALF_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct float_tag : floating_point_tag {
using type = npy_float;
static constexpr NPY_TYPES type_value = NPY_FLOAT;
static int less(type const& a, type const& b) {
return FLOAT_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct double_tag : floating_point_tag {
using type = npy_double;
static constexpr NPY_TYPES type_value = NPY_DOUBLE;
static int less(type const& a, type const& b) {
return DOUBLE_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct longdouble_tag : floating_point_tag {
using type = npy_longdouble;
static constexpr NPY_TYPES type_value = NPY_LONGDOUBLE;
static int less(type const& a, type const& b) {
return LONGDOUBLE_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct cfloat_tag : complex_tag {
using type = npy_cfloat;
static constexpr NPY_TYPES type_value = NPY_CFLOAT;
static int less(type const& a, type const& b) {
return CFLOAT_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct cdouble_tag : complex_tag {
using type = npy_cdouble;
static constexpr NPY_TYPES type_value = NPY_CDOUBLE;
static int less(type const& a, type const& b) {
return CDOUBLE_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct clongdouble_tag : complex_tag {
using type = npy_clongdouble;
static constexpr NPY_TYPES type_value = NPY_CLONGDOUBLE;
static int less(type const& a, type const& b) {
return CLONGDOUBLE_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct datetime_tag : date_tag {
using type = npy_datetime;
static constexpr NPY_TYPES type_value = NPY_DATETIME;
static int less(type const& a, type const& b) {
return DATETIME_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct timedelta_tag : date_tag {
using type = npy_timedelta;
static constexpr NPY_TYPES type_value = NPY_TIMEDELTA;
static int less(type const& a, type const& b) {
return TIMEDELTA_LT(a, b);
}
static int less_equal(type const& a, type const& b) {
return !less(b, a);
}
};
struct string_tag {
using type = npy_char;
static constexpr NPY_TYPES type_value = NPY_STRING;
static int less(type const* a, type const* b, size_t len) {
return STRING_LT(a, b, len);
}
static int less_equal(type const* a, type const* b, size_t len) {
return !less(b, a, len);
}
static void swap(type* a, type* b, size_t len) {
STRING_SWAP(a, b, len);
}
static void copy(type * a, type const* b, size_t len) {
STRING_COPY(a, b, len);
}
};
struct unicode_tag {
using type = npy_ucs4;
static constexpr NPY_TYPES type_value = NPY_UNICODE;
static int less(type const* a, type const* b, size_t len) {
return UNICODE_LT(a, b, len);
}
static int less_equal(type const* a, type const* b, size_t len) {
return !less(b, a, len);
}
static void swap(type* a, type* b, size_t len) {
UNICODE_SWAP(a, b, len);
}
static void copy(type * a, type const* b, size_t len) {
UNICODE_COPY(a, b, len);
}
};
} // namespace npy
#endif
|
c
|
github
|
https://github.com/numpy/numpy
|
numpy/_core/src/common/numpy_tag.h
|
/*
* Copyright (C) 2010 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.base;
import static com.google.common.base.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.testing.NullPointerTester;
import junit.framework.TestCase;
import org.jspecify.annotations.NullMarked;
/**
* Unit test for {@link Strings}.
*
* @author Kevin Bourrillion
*/
@NullMarked
@GwtCompatible
public class StringsTest extends TestCase {
public void testNullToEmpty() {
assertEquals("", Strings.nullToEmpty(null));
assertEquals("", Strings.nullToEmpty(""));
assertEquals("a", Strings.nullToEmpty("a"));
}
public void testEmptyToNull() {
assertThat(Strings.emptyToNull(null)).isNull();
assertThat(Strings.emptyToNull("")).isNull();
assertEquals("a", Strings.emptyToNull("a"));
}
public void testIsNullOrEmpty() {
assertTrue(Strings.isNullOrEmpty(null));
assertTrue(Strings.isNullOrEmpty(""));
assertFalse(Strings.isNullOrEmpty("a"));
}
public void testPadStart_noPadding() {
assertSame("", Strings.padStart("", 0, '-'));
assertSame("x", Strings.padStart("x", 0, '-'));
assertSame("x", Strings.padStart("x", 1, '-'));
assertSame("xx", Strings.padStart("xx", 0, '-'));
assertSame("xx", Strings.padStart("xx", 2, '-'));
}
public void testPadStart_somePadding() {
assertEquals("-", Strings.padStart("", 1, '-'));
assertEquals("--", Strings.padStart("", 2, '-'));
assertEquals("-x", Strings.padStart("x", 2, '-'));
assertEquals("--x", Strings.padStart("x", 3, '-'));
assertEquals("-xx", Strings.padStart("xx", 3, '-'));
}
public void testPadStart_negativeMinLength() {
assertSame("x", Strings.padStart("x", -1, '-'));
}
// TODO: could remove if we got NPT working in GWT somehow
public void testPadStart_null() {
assertThrows(NullPointerException.class, () -> Strings.padStart(null, 5, '0'));
}
public void testPadEnd_noPadding() {
assertSame("", Strings.padEnd("", 0, '-'));
assertSame("x", Strings.padEnd("x", 0, '-'));
assertSame("x", Strings.padEnd("x", 1, '-'));
assertSame("xx", Strings.padEnd("xx", 0, '-'));
assertSame("xx", Strings.padEnd("xx", 2, '-'));
}
public void testPadEnd_somePadding() {
assertEquals("-", Strings.padEnd("", 1, '-'));
assertEquals("--", Strings.padEnd("", 2, '-'));
assertEquals("x-", Strings.padEnd("x", 2, '-'));
assertEquals("x--", Strings.padEnd("x", 3, '-'));
assertEquals("xx-", Strings.padEnd("xx", 3, '-'));
}
public void testPadEnd_negativeMinLength() {
assertSame("x", Strings.padEnd("x", -1, '-'));
}
public void testPadEnd_null() {
assertThrows(NullPointerException.class, () -> Strings.padEnd(null, 5, '0'));
}
@SuppressWarnings("InlineMeInliner") // test of method that doesn't just delegate
public void testRepeat() {
String input = "20";
assertEquals("", Strings.repeat(input, 0));
assertEquals("20", Strings.repeat(input, 1));
assertEquals("2020", Strings.repeat(input, 2));
assertEquals("202020", Strings.repeat(input, 3));
assertEquals("", Strings.repeat("", 4));
for (int i = 0; i < 100; ++i) {
assertEquals(2 * i, Strings.repeat(input, i).length());
}
assertThrows(IllegalArgumentException.class, () -> Strings.repeat("x", -1));
assertThrows(
ArrayIndexOutOfBoundsException.class, () -> Strings.repeat("12345678", (1 << 30) + 3));
}
@SuppressWarnings("InlineMeInliner") // test of method that doesn't just delegate
public void testRepeat_null() {
assertThrows(NullPointerException.class, () -> Strings.repeat(null, 5));
}
@SuppressWarnings("UnnecessaryStringBuilder") // We want to test a non-String CharSequence
public void testCommonPrefix() {
assertEquals("", Strings.commonPrefix("", ""));
assertEquals("", Strings.commonPrefix("abc", ""));
assertEquals("", Strings.commonPrefix("", "abc"));
assertEquals("", Strings.commonPrefix("abcde", "xyz"));
assertEquals("", Strings.commonPrefix("xyz", "abcde"));
assertEquals("", Strings.commonPrefix("xyz", "abcxyz"));
assertEquals("a", Strings.commonPrefix("abc", "aaaaa"));
assertEquals("aa", Strings.commonPrefix("aa", "aaaaa"));
assertEquals("abc", Strings.commonPrefix(new StringBuilder("abcdef"), "abcxyz"));
// Identical valid surrogate pairs.
assertEquals(
"abc\uD8AB\uDCAB", Strings.commonPrefix("abc\uD8AB\uDCABdef", "abc\uD8AB\uDCABxyz"));
// Differing valid surrogate pairs.
assertEquals("abc", Strings.commonPrefix("abc\uD8AB\uDCABdef", "abc\uD8AB\uDCACxyz"));
// One invalid pair.
assertEquals("abc", Strings.commonPrefix("abc\uD8AB\uDCABdef", "abc\uD8AB\uD8ABxyz"));
// Two identical invalid pairs.
assertEquals(
"abc\uD8AB\uD8AC", Strings.commonPrefix("abc\uD8AB\uD8ACdef", "abc\uD8AB\uD8ACxyz"));
// Two differing invalid pairs.
assertEquals("abc\uD8AB", Strings.commonPrefix("abc\uD8AB\uD8ABdef", "abc\uD8AB\uD8ACxyz"));
// One orphan high surrogate.
assertEquals("", Strings.commonPrefix("\uD8AB\uDCAB", "\uD8AB"));
// Two orphan high surrogates.
assertEquals("\uD8AB", Strings.commonPrefix("\uD8AB", "\uD8AB"));
}
@SuppressWarnings("UnnecessaryStringBuilder") // We want to test a non-String CharSequence
public void testCommonSuffix() {
assertEquals("", Strings.commonSuffix("", ""));
assertEquals("", Strings.commonSuffix("abc", ""));
assertEquals("", Strings.commonSuffix("", "abc"));
assertEquals("", Strings.commonSuffix("abcde", "xyz"));
assertEquals("", Strings.commonSuffix("xyz", "abcde"));
assertEquals("", Strings.commonSuffix("xyz", "xyzabc"));
assertEquals("c", Strings.commonSuffix("abc", "ccccc"));
assertEquals("aa", Strings.commonSuffix("aa", "aaaaa"));
assertEquals("abc", Strings.commonSuffix(new StringBuilder("xyzabc"), "xxxabc"));
// Identical valid surrogate pairs.
assertEquals(
"\uD8AB\uDCABdef", Strings.commonSuffix("abc\uD8AB\uDCABdef", "xyz\uD8AB\uDCABdef"));
// Differing valid surrogate pairs.
assertEquals("def", Strings.commonSuffix("abc\uD8AB\uDCABdef", "abc\uD8AC\uDCABdef"));
// One invalid pair.
assertEquals("def", Strings.commonSuffix("abc\uD8AB\uDCABdef", "xyz\uDCAB\uDCABdef"));
// Two identical invalid pairs.
assertEquals(
"\uD8AB\uD8ABdef", Strings.commonSuffix("abc\uD8AB\uD8ABdef", "xyz\uD8AB\uD8ABdef"));
// Two differing invalid pairs.
assertEquals("\uDCABdef", Strings.commonSuffix("abc\uDCAB\uDCABdef", "abc\uDCAC\uDCABdef"));
// One orphan low surrogate.
assertEquals("", Strings.commonSuffix("x\uD8AB\uDCAB", "\uDCAB"));
// Two orphan low surrogates.
assertEquals("\uDCAB", Strings.commonSuffix("\uDCAB", "\uDCAB"));
}
public void testValidSurrogatePairAt() {
assertTrue(Strings.validSurrogatePairAt("\uD8AB\uDCAB", 0));
assertTrue(Strings.validSurrogatePairAt("abc\uD8AB\uDCAB", 3));
assertTrue(Strings.validSurrogatePairAt("abc\uD8AB\uDCABxyz", 3));
assertFalse(Strings.validSurrogatePairAt("\uD8AB\uD8AB", 0));
assertFalse(Strings.validSurrogatePairAt("\uDCAB\uDCAB", 0));
assertFalse(Strings.validSurrogatePairAt("\uD8AB\uDCAB", -1));
assertFalse(Strings.validSurrogatePairAt("\uD8AB\uDCAB", 1));
assertFalse(Strings.validSurrogatePairAt("\uD8AB\uDCAB", -2));
assertFalse(Strings.validSurrogatePairAt("\uD8AB\uDCAB", 2));
assertFalse(Strings.validSurrogatePairAt("x\uDCAB", 0));
assertFalse(Strings.validSurrogatePairAt("\uD8ABx", 0));
}
@SuppressWarnings("LenientFormatStringValidation") // Intentional for testing.
public void testLenientFormat() {
assertEquals("%s", Strings.lenientFormat("%s"));
assertEquals("5", Strings.lenientFormat("%s", 5));
assertEquals("foo [5]", Strings.lenientFormat("foo", 5));
assertEquals("foo [5, 6, 7]", Strings.lenientFormat("foo", 5, 6, 7));
assertEquals("%s 1 2", Strings.lenientFormat("%s %s %s", "%s", 1, 2));
assertEquals(" [5, 6]", Strings.lenientFormat("", 5, 6));
assertEquals("123", Strings.lenientFormat("%s%s%s", 1, 2, 3));
assertEquals("1%s%s", Strings.lenientFormat("%s%s%s", 1));
assertEquals("5 + 6 = 11", Strings.lenientFormat("%s + 6 = 11", 5));
assertEquals("5 + 6 = 11", Strings.lenientFormat("5 + %s = 11", 6));
assertEquals("5 + 6 = 11", Strings.lenientFormat("5 + 6 = %s", 11));
assertEquals("5 + 6 = 11", Strings.lenientFormat("%s + %s = %s", 5, 6, 11));
assertEquals(
"5 + 6 = 11", Strings.lenientFormat("%s + %s = %s", (Object[]) new Integer[] {5, 6, 11}));
assertEquals("null [null, null]", Strings.lenientFormat("%s", null, null, null));
assertEquals("null [5, 6]", Strings.lenientFormat(null, 5, 6));
assertEquals("null", Strings.lenientFormat("%s", (Object) null));
}
@J2ktIncompatible // TODO(b/319404022): Allow passing null array as varargs
public void testLenientFormat_nullArrayVarargs() {
assertEquals("(Object[])null", Strings.lenientFormat("%s", (Object[]) null));
}
@GwtIncompatible // GWT reflection includes less data
public void testLenientFormat_badArgumentToString() {
assertThat(Strings.lenientFormat("boiler %s plate", new ThrowsOnToString()))
.matches(
// J2kt nested class name does not use "$"
"boiler <com\\.google\\.common\\.base\\.StringsTest[.$]ThrowsOnToString@[0-9a-f]+ "
+ "threw java\\.lang\\.UnsupportedOperationException> plate");
}
public void testLenientFormat_badArgumentToString_gwtFriendly() {
assertThat(Strings.lenientFormat("boiler %s plate", new ThrowsOnToString()))
.matches("boiler <.*> plate");
}
private static class ThrowsOnToString {
@Override
public String toString() {
throw new UnsupportedOperationException();
}
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointers() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(Strings.class);
}
}
|
java
|
github
|
https://github.com/google/guava
|
android/guava-tests/test/com/google/common/base/StringsTest.java
|
// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v2store
import (
"encoding/json"
"fmt"
"path"
"strconv"
"strings"
"sync"
"time"
"github.com/jonboulle/clockwork"
"go.etcd.io/etcd/client/pkg/v3/types"
"go.etcd.io/etcd/server/v3/etcdserver/api/v2error"
)
// The default version to set when the store is first initialized.
const defaultVersion = 2
var minExpireTime time.Time
func init() {
minExpireTime, _ = time.Parse(time.RFC3339, "2000-01-01T00:00:00Z")
}
type Store interface {
Version() int
Index() uint64
Get(nodePath string, recursive, sorted bool) (*Event, error)
Set(nodePath string, dir bool, value string, expireOpts TTLOptionSet) (*Event, error)
Update(nodePath string, newValue string, expireOpts TTLOptionSet) (*Event, error)
Create(nodePath string, dir bool, value string, unique bool,
expireOpts TTLOptionSet) (*Event, error)
CompareAndSwap(nodePath string, prevValue string, prevIndex uint64,
value string, expireOpts TTLOptionSet) (*Event, error)
Delete(nodePath string, dir, recursive bool) (*Event, error)
CompareAndDelete(nodePath string, prevValue string, prevIndex uint64) (*Event, error)
Watch(prefix string, recursive, stream bool, sinceIndex uint64) (Watcher, error)
Save() ([]byte, error)
Recovery(state []byte) error
Clone() Store
SaveNoCopy() ([]byte, error)
JsonStats() []byte
DeleteExpiredKeys(cutoff time.Time)
HasTTLKeys() bool
}
type TTLOptionSet struct {
ExpireTime time.Time
Refresh bool
}
type store struct {
Root *node
WatcherHub *watcherHub
CurrentIndex uint64
Stats *Stats
CurrentVersion int
ttlKeyHeap *ttlKeyHeap // need to recovery manually
worldLock sync.RWMutex // stop the world lock
clock clockwork.Clock
readonlySet types.Set
}
// New creates a store where the given namespaces will be created as initial directories.
func New(namespaces ...string) Store {
s := newStore(namespaces...)
s.clock = clockwork.NewRealClock()
return s
}
func newStore(namespaces ...string) *store {
s := new(store)
s.CurrentVersion = defaultVersion
s.Root = newDir(s, "/", s.CurrentIndex, nil, Permanent)
for _, namespace := range namespaces {
s.Root.Add(newDir(s, namespace, s.CurrentIndex, s.Root, Permanent))
}
s.Stats = newStats()
s.WatcherHub = newWatchHub(1000)
s.ttlKeyHeap = newTTLKeyHeap()
s.readonlySet = types.NewUnsafeSet(append(namespaces, "/")...)
return s
}
// Version retrieves current version of the store.
func (s *store) Version() int {
return s.CurrentVersion
}
// Index retrieves the current index of the store.
func (s *store) Index() uint64 {
s.worldLock.RLock()
defer s.worldLock.RUnlock()
return s.CurrentIndex
}
// Get returns a get event.
// If recursive is true, it will return all the content under the node path.
// If sorted is true, it will sort the content by keys.
func (s *store) Get(nodePath string, recursive, sorted bool) (*Event, error) {
var err *v2error.Error
s.worldLock.RLock()
defer s.worldLock.RUnlock()
defer func() {
if err == nil {
s.Stats.Inc(GetSuccess)
if recursive {
reportReadSuccess(GetRecursive)
} else {
reportReadSuccess(Get)
}
return
}
s.Stats.Inc(GetFail)
if recursive {
reportReadFailure(GetRecursive)
} else {
reportReadFailure(Get)
}
}()
n, err := s.internalGet(nodePath)
if err != nil {
return nil, err
}
e := newEvent(Get, nodePath, n.ModifiedIndex, n.CreatedIndex)
e.EtcdIndex = s.CurrentIndex
e.Node.loadInternalNode(n, recursive, sorted, s.clock)
return e, nil
}
// Create creates the node at nodePath. Create will help to create intermediate directories with no ttl.
// If the node has already existed, create will fail.
// If any node on the path is a file, create will fail.
func (s *store) Create(nodePath string, dir bool, value string, unique bool, expireOpts TTLOptionSet) (*Event, error) {
var err *v2error.Error
s.worldLock.Lock()
defer s.worldLock.Unlock()
defer func() {
if err == nil {
s.Stats.Inc(CreateSuccess)
reportWriteSuccess(Create)
return
}
s.Stats.Inc(CreateFail)
reportWriteFailure(Create)
}()
e, err := s.internalCreate(nodePath, dir, value, unique, false, expireOpts.ExpireTime, Create)
if err != nil {
return nil, err
}
e.EtcdIndex = s.CurrentIndex
s.WatcherHub.notify(e)
return e, nil
}
// Set creates or replace the node at nodePath.
func (s *store) Set(nodePath string, dir bool, value string, expireOpts TTLOptionSet) (*Event, error) {
var err *v2error.Error
s.worldLock.Lock()
defer s.worldLock.Unlock()
defer func() {
if err == nil {
s.Stats.Inc(SetSuccess)
reportWriteSuccess(Set)
return
}
s.Stats.Inc(SetFail)
reportWriteFailure(Set)
}()
// Get prevNode value
n, getErr := s.internalGet(nodePath)
if getErr != nil && getErr.ErrorCode != v2error.EcodeKeyNotFound {
err = getErr
return nil, err
}
if expireOpts.Refresh {
if getErr != nil {
err = getErr
return nil, err
}
value = n.Value
}
// Set new value
e, err := s.internalCreate(nodePath, dir, value, false, true, expireOpts.ExpireTime, Set)
if err != nil {
return nil, err
}
e.EtcdIndex = s.CurrentIndex
// Put prevNode into event
if getErr == nil {
prev := newEvent(Get, nodePath, n.ModifiedIndex, n.CreatedIndex)
prev.Node.loadInternalNode(n, false, false, s.clock)
e.PrevNode = prev.Node
}
if !expireOpts.Refresh {
s.WatcherHub.notify(e)
} else {
e.SetRefresh()
s.WatcherHub.add(e)
}
return e, nil
}
// returns user-readable cause of failed comparison
func getCompareFailCause(n *node, which int, prevValue string, prevIndex uint64) string {
switch which {
case CompareIndexNotMatch:
return fmt.Sprintf("[%v != %v]", prevIndex, n.ModifiedIndex)
case CompareValueNotMatch:
return fmt.Sprintf("[%v != %v]", prevValue, n.Value)
default:
return fmt.Sprintf("[%v != %v] [%v != %v]", prevValue, n.Value, prevIndex, n.ModifiedIndex)
}
}
func (s *store) CompareAndSwap(nodePath string, prevValue string, prevIndex uint64,
value string, expireOpts TTLOptionSet,
) (*Event, error) {
var err *v2error.Error
s.worldLock.Lock()
defer s.worldLock.Unlock()
defer func() {
if err == nil {
s.Stats.Inc(CompareAndSwapSuccess)
reportWriteSuccess(CompareAndSwap)
return
}
s.Stats.Inc(CompareAndSwapFail)
reportWriteFailure(CompareAndSwap)
}()
nodePath = path.Clean(path.Join("/", nodePath))
// we do not allow the user to change "/"
if s.readonlySet.Contains(nodePath) {
return nil, v2error.NewError(v2error.EcodeRootROnly, "/", s.CurrentIndex)
}
n, err := s.internalGet(nodePath)
if err != nil {
return nil, err
}
if n.IsDir() { // can only compare and swap file
err = v2error.NewError(v2error.EcodeNotFile, nodePath, s.CurrentIndex)
return nil, err
}
// If both of the prevValue and prevIndex are given, we will test both of them.
// Command will be executed, only if both of the tests are successful.
if ok, which := n.Compare(prevValue, prevIndex); !ok {
cause := getCompareFailCause(n, which, prevValue, prevIndex)
err = v2error.NewError(v2error.EcodeTestFailed, cause, s.CurrentIndex)
return nil, err
}
if expireOpts.Refresh {
value = n.Value
}
// update etcd index
s.CurrentIndex++
e := newEvent(CompareAndSwap, nodePath, s.CurrentIndex, n.CreatedIndex)
e.EtcdIndex = s.CurrentIndex
e.PrevNode = n.Repr(false, false, s.clock)
eNode := e.Node
// if test succeed, write the value
if err := n.Write(value, s.CurrentIndex); err != nil {
return nil, err
}
n.UpdateTTL(expireOpts.ExpireTime)
// copy the value for safety
valueCopy := value
eNode.Value = &valueCopy
eNode.Expiration, eNode.TTL = n.expirationAndTTL(s.clock)
if !expireOpts.Refresh {
s.WatcherHub.notify(e)
} else {
e.SetRefresh()
s.WatcherHub.add(e)
}
return e, nil
}
// Delete deletes the node at the given path.
// If the node is a directory, recursive must be true to delete it.
func (s *store) Delete(nodePath string, dir, recursive bool) (*Event, error) {
var err *v2error.Error
s.worldLock.Lock()
defer s.worldLock.Unlock()
defer func() {
if err == nil {
s.Stats.Inc(DeleteSuccess)
reportWriteSuccess(Delete)
return
}
s.Stats.Inc(DeleteFail)
reportWriteFailure(Delete)
}()
nodePath = path.Clean(path.Join("/", nodePath))
// we do not allow the user to change "/"
if s.readonlySet.Contains(nodePath) {
return nil, v2error.NewError(v2error.EcodeRootROnly, "/", s.CurrentIndex)
}
// recursive implies dir
if recursive {
dir = true
}
n, err := s.internalGet(nodePath)
if err != nil { // if the node does not exist, return error
return nil, err
}
nextIndex := s.CurrentIndex + 1
e := newEvent(Delete, nodePath, nextIndex, n.CreatedIndex)
e.EtcdIndex = nextIndex
e.PrevNode = n.Repr(false, false, s.clock)
eNode := e.Node
if n.IsDir() {
eNode.Dir = true
}
callback := func(path string) { // notify function
// notify the watchers with deleted set true
s.WatcherHub.notifyWatchers(e, path, true)
}
err = n.Remove(dir, recursive, callback)
if err != nil {
return nil, err
}
// update etcd index
s.CurrentIndex++
s.WatcherHub.notify(e)
return e, nil
}
func (s *store) CompareAndDelete(nodePath string, prevValue string, prevIndex uint64) (*Event, error) {
var err *v2error.Error
s.worldLock.Lock()
defer s.worldLock.Unlock()
defer func() {
if err == nil {
s.Stats.Inc(CompareAndDeleteSuccess)
reportWriteSuccess(CompareAndDelete)
return
}
s.Stats.Inc(CompareAndDeleteFail)
reportWriteFailure(CompareAndDelete)
}()
nodePath = path.Clean(path.Join("/", nodePath))
n, err := s.internalGet(nodePath)
if err != nil { // if the node does not exist, return error
return nil, err
}
if n.IsDir() { // can only compare and delete file
return nil, v2error.NewError(v2error.EcodeNotFile, nodePath, s.CurrentIndex)
}
// If both of the prevValue and prevIndex are given, we will test both of them.
// Command will be executed, only if both of the tests are successful.
if ok, which := n.Compare(prevValue, prevIndex); !ok {
cause := getCompareFailCause(n, which, prevValue, prevIndex)
return nil, v2error.NewError(v2error.EcodeTestFailed, cause, s.CurrentIndex)
}
// update etcd index
s.CurrentIndex++
e := newEvent(CompareAndDelete, nodePath, s.CurrentIndex, n.CreatedIndex)
e.EtcdIndex = s.CurrentIndex
e.PrevNode = n.Repr(false, false, s.clock)
callback := func(path string) { // notify function
// notify the watchers with deleted set true
s.WatcherHub.notifyWatchers(e, path, true)
}
err = n.Remove(false, false, callback)
if err != nil {
return nil, err
}
s.WatcherHub.notify(e)
return e, nil
}
func (s *store) Watch(key string, recursive, stream bool, sinceIndex uint64) (Watcher, error) {
s.worldLock.RLock()
defer s.worldLock.RUnlock()
key = path.Clean(path.Join("/", key))
if sinceIndex == 0 {
sinceIndex = s.CurrentIndex + 1
}
// WatcherHub does not know about the current index, so we need to pass it in
w, err := s.WatcherHub.watch(key, recursive, stream, sinceIndex, s.CurrentIndex)
if err != nil {
return nil, err
}
return w, nil
}
// walk walks all the nodePath and apply the walkFunc on each directory
func (s *store) walk(nodePath string, walkFunc func(prev *node, component string) (*node, *v2error.Error)) (*node, *v2error.Error) {
components := strings.Split(nodePath, "/")
curr := s.Root
var err *v2error.Error
for i := 1; i < len(components); i++ {
if len(components[i]) == 0 { // ignore empty string
return curr, nil
}
curr, err = walkFunc(curr, components[i])
if err != nil {
return nil, err
}
}
return curr, nil
}
// Update updates the value/ttl of the node.
// If the node is a file, the value and the ttl can be updated.
// If the node is a directory, only the ttl can be updated.
func (s *store) Update(nodePath string, newValue string, expireOpts TTLOptionSet) (*Event, error) {
var err *v2error.Error
s.worldLock.Lock()
defer s.worldLock.Unlock()
defer func() {
if err == nil {
s.Stats.Inc(UpdateSuccess)
reportWriteSuccess(Update)
return
}
s.Stats.Inc(UpdateFail)
reportWriteFailure(Update)
}()
nodePath = path.Clean(path.Join("/", nodePath))
// we do not allow the user to change "/"
if s.readonlySet.Contains(nodePath) {
return nil, v2error.NewError(v2error.EcodeRootROnly, "/", s.CurrentIndex)
}
currIndex, nextIndex := s.CurrentIndex, s.CurrentIndex+1
n, err := s.internalGet(nodePath)
if err != nil { // if the node does not exist, return error
return nil, err
}
if n.IsDir() && len(newValue) != 0 {
// if the node is a directory, we cannot update value to non-empty
return nil, v2error.NewError(v2error.EcodeNotFile, nodePath, currIndex)
}
if expireOpts.Refresh {
newValue = n.Value
}
e := newEvent(Update, nodePath, nextIndex, n.CreatedIndex)
e.EtcdIndex = nextIndex
e.PrevNode = n.Repr(false, false, s.clock)
eNode := e.Node
if err := n.Write(newValue, nextIndex); err != nil {
return nil, fmt.Errorf("nodePath %v : %w", nodePath, err)
}
if n.IsDir() {
eNode.Dir = true
} else {
// copy the value for safety
newValueCopy := newValue
eNode.Value = &newValueCopy
}
// update ttl
n.UpdateTTL(expireOpts.ExpireTime)
eNode.Expiration, eNode.TTL = n.expirationAndTTL(s.clock)
if !expireOpts.Refresh {
s.WatcherHub.notify(e)
} else {
e.SetRefresh()
s.WatcherHub.add(e)
}
s.CurrentIndex = nextIndex
return e, nil
}
func (s *store) internalCreate(nodePath string, dir bool, value string, unique, replace bool,
expireTime time.Time, action string,
) (*Event, *v2error.Error) {
currIndex, nextIndex := s.CurrentIndex, s.CurrentIndex+1
if unique { // append unique item under the node path
nodePath += "/" + fmt.Sprintf("%020s", strconv.FormatUint(nextIndex, 10))
}
nodePath = path.Clean(path.Join("/", nodePath))
// we do not allow the user to change "/"
if s.readonlySet.Contains(nodePath) {
return nil, v2error.NewError(v2error.EcodeRootROnly, "/", currIndex)
}
// Assume expire times that are way in the past are
// This can occur when the time is serialized to JS
if expireTime.Before(minExpireTime) {
expireTime = Permanent
}
dirName, nodeName := path.Split(nodePath)
// walk through the nodePath, create dirs and get the last directory node
d, err := s.walk(dirName, s.checkDir)
if err != nil {
s.Stats.Inc(SetFail)
reportWriteFailure(action)
err.Index = currIndex
return nil, err
}
e := newEvent(action, nodePath, nextIndex, nextIndex)
eNode := e.Node
n, _ := d.GetChild(nodeName)
// force will try to replace an existing file
if n != nil {
if !replace {
return nil, v2error.NewError(v2error.EcodeNodeExist, nodePath, currIndex)
}
if n.IsDir() {
return nil, v2error.NewError(v2error.EcodeNotFile, nodePath, currIndex)
}
e.PrevNode = n.Repr(false, false, s.clock)
if err := n.Remove(false, false, nil); err != nil {
return nil, err
}
}
if !dir { // create file
// copy the value for safety
valueCopy := value
eNode.Value = &valueCopy
n = newKV(s, nodePath, value, nextIndex, d, expireTime)
} else { // create directory
eNode.Dir = true
n = newDir(s, nodePath, nextIndex, d, expireTime)
}
// we are sure d is a directory and does not have the children with name n.Name
if err := d.Add(n); err != nil {
return nil, err
}
// node with TTL
if !n.IsPermanent() {
s.ttlKeyHeap.push(n)
eNode.Expiration, eNode.TTL = n.expirationAndTTL(s.clock)
}
s.CurrentIndex = nextIndex
return e, nil
}
// InternalGet gets the node of the given nodePath.
func (s *store) internalGet(nodePath string) (*node, *v2error.Error) {
nodePath = path.Clean(path.Join("/", nodePath))
walkFunc := func(parent *node, name string) (*node, *v2error.Error) {
if !parent.IsDir() {
err := v2error.NewError(v2error.EcodeNotDir, parent.Path, s.CurrentIndex)
return nil, err
}
child, ok := parent.Children[name]
if ok {
return child, nil
}
return nil, v2error.NewError(v2error.EcodeKeyNotFound, path.Join(parent.Path, name), s.CurrentIndex)
}
f, err := s.walk(nodePath, walkFunc)
if err != nil {
return nil, err
}
return f, nil
}
// DeleteExpiredKeys will delete all expired keys
func (s *store) DeleteExpiredKeys(cutoff time.Time) {
s.worldLock.Lock()
defer s.worldLock.Unlock()
for {
node := s.ttlKeyHeap.top()
if node == nil || node.ExpireTime.After(cutoff) {
break
}
s.CurrentIndex++
e := newEvent(Expire, node.Path, s.CurrentIndex, node.CreatedIndex)
e.EtcdIndex = s.CurrentIndex
e.PrevNode = node.Repr(false, false, s.clock)
if node.IsDir() {
e.Node.Dir = true
}
callback := func(path string) { // notify function
// notify the watchers with deleted set true
s.WatcherHub.notifyWatchers(e, path, true)
}
s.ttlKeyHeap.pop()
node.Remove(true, true, callback)
reportExpiredKey()
s.Stats.Inc(ExpireCount)
s.WatcherHub.notify(e)
}
}
// checkDir will check whether the component is a directory under parent node.
// If it is a directory, this function will return the pointer to that node.
// If it does not exist, this function will create a new directory and return the pointer to that node.
// If it is a file, this function will return error.
func (s *store) checkDir(parent *node, dirName string) (*node, *v2error.Error) {
node, ok := parent.Children[dirName]
if ok {
if node.IsDir() {
return node, nil
}
return nil, v2error.NewError(v2error.EcodeNotDir, node.Path, s.CurrentIndex)
}
n := newDir(s, path.Join(parent.Path, dirName), s.CurrentIndex+1, parent, Permanent)
parent.Children[dirName] = n
return n, nil
}
// Save saves the static state of the store system.
// It will not be able to save the state of watchers.
// It will not save the parent field of the node. Or there will
// be cyclic dependencies issue for the json package.
func (s *store) Save() ([]byte, error) {
b, err := json.Marshal(s.Clone())
if err != nil {
return nil, err
}
return b, nil
}
func (s *store) SaveNoCopy() ([]byte, error) {
b, err := json.Marshal(s)
if err != nil {
return nil, err
}
return b, nil
}
func (s *store) Clone() Store {
s.worldLock.RLock()
clonedStore := newStore()
clonedStore.CurrentIndex = s.CurrentIndex
clonedStore.Root = s.Root.Clone()
clonedStore.WatcherHub = s.WatcherHub.clone()
clonedStore.Stats = s.Stats.clone()
clonedStore.CurrentVersion = s.CurrentVersion
s.worldLock.RUnlock()
return clonedStore
}
// Recovery recovers the store system from a static state
// It needs to recover the parent field of the nodes.
// It needs to delete the expired nodes since the saved time and also
// needs to create monitoring goroutines.
func (s *store) Recovery(state []byte) error {
s.worldLock.Lock()
defer s.worldLock.Unlock()
err := json.Unmarshal(state, s)
if err != nil {
return err
}
s.ttlKeyHeap = newTTLKeyHeap()
s.Root.recoverAndclean()
return nil
}
//revive:disable:var-naming
func (s *store) JsonStats() []byte {
//revive:enable:var-naming
s.Stats.Watchers = uint64(s.WatcherHub.count)
return s.Stats.toJSON()
}
func (s *store) HasTTLKeys() bool {
s.worldLock.RLock()
defer s.worldLock.RUnlock()
return s.ttlKeyHeap.Len() != 0
}
|
go
|
github
|
https://github.com/etcd-io/etcd
|
server/etcdserver/api/v2store/store.go
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: nxos_vrf_af
version_added: "2.2"
short_description: Manages VRF AF.
description:
- Manages VRF AF
author: Gabriele Gerbino (@GGabriele)
extends_documentation_fragment: nxos
notes:
- Default, where supported, restores params default value.
options:
vrf:
description:
- Name of the VRF.
required: true
afi:
description:
- Address-Family Identifier (AFI).
required: true
choices: ['ipv4', 'ipv6']
default: null
safi:
description:
- Sub Address-Family Identifier (SAFI).
required: true
choices: ['unicast', 'multicast']
default: null
route_target_both_auto_evpn:
description:
- Enable/Disable the EVPN route-target 'auto' setting for both
import and export target communities.
required: false
choices: ['true', 'false']
default: null
state:
description:
- Determines whether the config should be present or
not on the device.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- nxos_vrf_af:
interface=nve1
vni=6000
ingress_replication=true
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"afi": "ipv4", "route_target_both_auto_evpn": true,
"safi": "unicast", "vrf": "test"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"afi": "ipv4", "route_target_both_auto_evpn": false,
"safi": "unicast", "vrf": "test"}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"afi": "ipv4", "route_target_both_auto_evpn": true,
"safi": "unicast", "vrf": "test"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["vrf context test", "address-family ipv4 unicast",
"route-target both auto evpn"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
# COMMON CODE FOR MIGRATION
import re
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.shell import ShellError
try:
from ansible.module_utils.nxos import get_module
except ImportError:
from ansible.module_utils.nxos import NetworkModule
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
BOOL_PARAMS = ['route_target_both_auto_evpn']
PARAM_TO_COMMAND_KEYMAP = {
'route_target_both_auto_evpn': 'route-target both auto evpn',
}
PARAM_TO_DEFAULT_KEYMAP = {}
WARNINGS = []
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
if arg in BOOL_PARAMS:
REGEX = re.compile(r'\s+{0}\s*$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = False
try:
if REGEX.search(config):
value = True
except TypeError:
value = False
else:
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = ''
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
value = REGEX.search(config).group('value')
return value
def get_existing(module, args):
existing = {}
netcfg = get_config(module)
parents = ['vrf context {0}'.format(module.params['vrf'])]
parents.append('address-family {0} {1}'.format(module.params['afi'],
module.params['safi']))
config = netcfg.get_section(parents)
if config:
splitted_config = config.splitlines()
vrf_index = False
for index in range(0, len(splitted_config) - 1):
if 'vrf' in splitted_config[index].strip():
vrf_index = index
break
if vrf_index:
config = '\n'.join(splitted_config[0:vrf_index])
for arg in args:
if arg not in ['afi', 'safi', 'vrf']:
existing[arg] = get_value(arg, config, module)
existing['afi'] = module.params['afi']
existing['safi'] = module.params['safi']
existing['vrf'] = module.params['vrf']
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def state_present(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.iteritems():
if value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif value == 'default':
if existing_commands.get(key):
existing_value = existing_commands.get(key)
commands.append('no {0} {1}'.format(key, existing_value))
else:
command = '{0} {1}'.format(key, value.lower())
commands.append(command)
if commands:
parents = ['vrf context {0}'.format(module.params['vrf'])]
parents.append('address-family {0} {1}'.format(module.params['afi'],
module.params['safi']))
candidate.add(commands, parents=parents)
def state_absent(module, existing, proposed, candidate):
commands = []
parents = ['vrf context {0}'.format(module.params['vrf'])]
commands.append('no address-family {0} {1}'.format(module.params['afi'],
module.params['safi']))
candidate.add(commands, parents=parents)
def main():
argument_spec = dict(
vrf=dict(required=True, type='str'),
safi=dict(required=True, type='str', choices=['unicast','multicast']),
afi=dict(required=True, type='str', choices=['ipv4','ipv6']),
route_target_both_auto_evpn=dict(required=False, type='bool'),
m_facts=dict(required=False, default=False, type='bool'),
state=dict(choices=['present', 'absent'], default='present',
required=False),
include_defaults=dict(default=False),
config=dict(),
save=dict(type='bool', default=False)
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
state = module.params['state']
args = [
'vrf',
'safi',
'afi',
'route_target_both_auto_evpn'
]
existing = invoke('get_existing', module, args)
end_state = existing
proposed_args = dict((k, v) for k, v in module.params.iteritems()
if v is not None and k in args)
proposed = {}
for key, value in proposed_args.iteritems():
if key != 'interface':
if str(value).lower() == 'default':
value = PARAM_TO_DEFAULT_KEYMAP.get(key)
if value is None:
value = 'default'
if existing.get(key) or (not existing.get(key) and value):
proposed[key] = value
result = {}
if state == 'present' or (state == 'absent' and existing):
candidate = CustomNetworkConfig(indent=3)
invoke('state_%s' % state, module, existing, proposed, candidate)
try:
response = load_config(module, candidate)
result.update(response)
except ShellError:
exc = get_exception()
module.fail_json(msg=str(exc))
else:
result['updates'] = []
result['connected'] = module.connected
if module._verbosity > 0:
end_state = invoke('get_existing', module, args)
result['end_state'] = end_state
result['existing'] = existing
result['proposed'] = proposed_args
if WARNINGS:
result['warnings'] = WARNINGS
module.exit_json(**result)
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
from __future__ import unicode_literals
from wtforms import ValidationError
from sqlalchemy.orm.exc import NoResultFound
class Unique(object):
"""Checks field value unicity against specified table field.
:param get_session:
A function that return a SQAlchemy Session.
:param model:
The model to check unicity against.
:param column:
The unique column.
:param message:
The error message.
"""
field_flags = ('unique', )
def __init__(self, get_session, model, column, message=None):
self.get_session = get_session
self.model = model
self.column = column
self.message = message
def __call__(self, form, field):
try:
obj = self.get_session().query(self.model)\
.filter(self.column == field.data).one()
if not hasattr(form, '_obj') or not form._obj == obj:
if self.message is None:
self.message = field.gettext('Already exists.')
raise ValidationError(self.message)
except NoResultFound:
pass
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package configs
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/terraform/internal/experiments"
)
func TestExperimentsConfig(t *testing.T) {
// The experiment registrations are global, so we need to do some special
// patching in order to get a predictable set for our tests.
current := experiments.Experiment("current")
concluded := experiments.Experiment("concluded")
currentExperiments := experiments.NewSet(current)
concludedExperiments := map[experiments.Experiment]string{
concluded: "Reticulate your splines.",
}
defer experiments.OverrideForTesting(t, currentExperiments, concludedExperiments)()
t.Run("current", func(t *testing.T) {
parser := NewParser(nil)
parser.AllowLanguageExperiments(true)
mod, diags := parser.LoadConfigDir("testdata/experiments/current")
if got, want := len(diags), 1; got != want {
t.Fatalf("wrong number of diagnostics %d; want %d", got, want)
}
got := diags[0]
want := &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: `Experimental feature "current" is active`,
Detail: "Experimental features are available only in alpha releases of Terraform and are subject to breaking changes or total removal in later versions, based on feedback. We recommend against using experimental features in production.\n\nIf you have feedback on the design of this feature, please open a GitHub issue to discuss it.",
Subject: &hcl.Range{
Filename: "testdata/experiments/current/current_experiment.tf",
Start: hcl.Pos{Line: 2, Column: 18, Byte: 29},
End: hcl.Pos{Line: 2, Column: 25, Byte: 36},
},
}
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("wrong warning\n%s", diff)
}
if got, want := len(mod.ActiveExperiments), 1; got != want {
t.Errorf("wrong number of experiments %d; want %d", got, want)
}
if !mod.ActiveExperiments.Has(current) {
t.Errorf("module does not indicate current experiment as active")
}
})
t.Run("concluded", func(t *testing.T) {
parser := NewParser(nil)
parser.AllowLanguageExperiments(true)
_, diags := parser.LoadConfigDir("testdata/experiments/concluded")
if got, want := len(diags), 1; got != want {
t.Fatalf("wrong number of diagnostics %d; want %d", got, want)
}
got := diags[0]
want := &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: `Experiment has concluded`,
Detail: `Experiment "concluded" is no longer available. Reticulate your splines.`,
Subject: &hcl.Range{
Filename: "testdata/experiments/concluded/concluded_experiment.tf",
Start: hcl.Pos{Line: 2, Column: 18, Byte: 29},
End: hcl.Pos{Line: 2, Column: 27, Byte: 38},
},
}
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("wrong error\n%s", diff)
}
})
t.Run("concluded", func(t *testing.T) {
parser := NewParser(nil)
parser.AllowLanguageExperiments(true)
_, diags := parser.LoadConfigDir("testdata/experiments/unknown")
if got, want := len(diags), 1; got != want {
t.Fatalf("wrong number of diagnostics %d; want %d", got, want)
}
got := diags[0]
want := &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: `Unknown experiment keyword`,
Detail: `There is no current experiment with the keyword "unknown".`,
Subject: &hcl.Range{
Filename: "testdata/experiments/unknown/unknown_experiment.tf",
Start: hcl.Pos{Line: 2, Column: 18, Byte: 29},
End: hcl.Pos{Line: 2, Column: 25, Byte: 36},
},
}
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("wrong error\n%s", diff)
}
})
t.Run("invalid", func(t *testing.T) {
parser := NewParser(nil)
parser.AllowLanguageExperiments(true)
_, diags := parser.LoadConfigDir("testdata/experiments/invalid")
if got, want := len(diags), 1; got != want {
t.Fatalf("wrong number of diagnostics %d; want %d", got, want)
}
got := diags[0]
want := &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: `Invalid expression`,
Detail: `A static list expression is required.`,
Subject: &hcl.Range{
Filename: "testdata/experiments/invalid/invalid_experiments.tf",
Start: hcl.Pos{Line: 2, Column: 17, Byte: 28},
End: hcl.Pos{Line: 2, Column: 24, Byte: 35},
},
}
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("wrong error\n%s", diff)
}
})
t.Run("disallowed", func(t *testing.T) {
parser := NewParser(nil)
parser.AllowLanguageExperiments(false) // The default situation for release builds
_, diags := parser.LoadConfigDir("testdata/experiments/current")
if got, want := len(diags), 1; got != want {
t.Fatalf("wrong number of diagnostics %d; want %d", got, want)
}
got := diags[0]
want := &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: `Module uses experimental features`,
Detail: `Experimental features are intended only for gathering early feedback on new language designs, and so are available only in alpha releases of Terraform.`,
Subject: &hcl.Range{
Filename: "testdata/experiments/current/current_experiment.tf",
Start: hcl.Pos{Line: 2, Column: 3, Byte: 14},
End: hcl.Pos{Line: 2, Column: 14, Byte: 25},
},
}
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("wrong error\n%s", diff)
}
})
}
|
go
|
github
|
https://github.com/hashicorp/terraform
|
internal/configs/experiments_test.go
|
{
"packages": {
".": {}
},
"$schema": "https://raw.githubusercontent.com/stainless-api/release-please/main/schemas/config.json",
"include-v-in-tag": true,
"include-component-in-tag": false,
"versioning": "prerelease",
"prerelease": true,
"bump-minor-pre-major": true,
"bump-patch-for-minor-pre-major": false,
"pull-request-header": "Automated Release PR",
"pull-request-title-pattern": "release: ${version}",
"changelog-sections": [
{
"type": "feat",
"section": "Features"
},
{
"type": "fix",
"section": "Bug Fixes"
},
{
"type": "perf",
"section": "Performance Improvements"
},
{
"type": "revert",
"section": "Reverts"
},
{
"type": "chore",
"section": "Chores"
},
{
"type": "docs",
"section": "Documentation"
},
{
"type": "style",
"section": "Styles"
},
{
"type": "refactor",
"section": "Refactors"
},
{
"type": "test",
"section": "Tests",
"hidden": true
},
{
"type": "build",
"section": "Build System"
},
{
"type": "ci",
"section": "Continuous Integration",
"hidden": true
}
],
"release-type": "python",
"extra-files": [
"src/openai/_version.py"
]
}
|
json
|
github
|
https://github.com/openai/openai-python
|
release-please-config.json
|
#
# (c) 2016 Red Hat Inc.
#
# (c) 2017 Dell EMC.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.network.dellos6.dellos6 import dellos6_provider_spec
from ansible.module_utils.network.common.utils import load_provider
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
socket_path = None
if self._play_context.connection == 'network_cli':
provider = self._task.args.get('provider', {})
if any(provider.values()):
display.warning('provider is unnecessary when using network_cli and will be ignored')
elif self._play_context.connection == 'local':
provider = load_provider(dellos6_provider_spec, self._task.args)
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'dellos6'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
pc.become = provider['authorize'] or False
if pc.become:
pc.become_method = 'enable'
pc.become_pass = provider['auth_pass']
display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
# make sure we are in the right cli context which should be
# enable mode and not config module
if socket_path is None:
socket_path = self._connection.socket_path
conn = Connection(socket_path)
out = conn.get_prompt()
while to_text(out, errors='surrogate_then_replace').strip().endswith(')#'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
conn.send_command('exit')
out = conn.get_prompt()
result = super(ActionModule, self).run(task_vars=task_vars)
return result
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test cases for the mock key manager.
"""
import array
from cinder import context
from cinder import exception
from cinder.keymgr import key as keymgr_key
from cinder.tests.keymgr import mock_key_mgr
from cinder.tests.keymgr import test_key_mgr
class MockKeyManagerTestCase(test_key_mgr.KeyManagerTestCase):
def _create_key_manager(self):
return mock_key_mgr.MockKeyManager()
def setUp(self):
super(MockKeyManagerTestCase, self).setUp()
self.ctxt = context.RequestContext('fake', 'fake')
def test_create_key(self):
key_id_1 = self.key_mgr.create_key(self.ctxt)
key_id_2 = self.key_mgr.create_key(self.ctxt)
# ensure that the UUIDs are unique
self.assertNotEqual(key_id_1, key_id_2)
def test_create_key_with_length(self):
for length in [64, 128, 256]:
key_id = self.key_mgr.create_key(self.ctxt, key_length=length)
key = self.key_mgr.get_key(self.ctxt, key_id)
self.assertEqual(length / 8, len(key.get_encoded()))
def test_create_null_context(self):
self.assertRaises(exception.NotAuthorized,
self.key_mgr.create_key, None)
def test_store_key(self):
secret_key = array.array('B', ('0' * 64).decode('hex')).tolist()
_key = keymgr_key.SymmetricKey('AES', secret_key)
key_id = self.key_mgr.store_key(self.ctxt, _key)
actual_key = self.key_mgr.get_key(self.ctxt, key_id)
self.assertEqual(_key, actual_key)
def test_store_null_context(self):
self.assertRaises(exception.NotAuthorized,
self.key_mgr.store_key, None, None)
def test_copy_key(self):
key_id = self.key_mgr.create_key(self.ctxt)
key = self.key_mgr.get_key(self.ctxt, key_id)
copied_key_id = self.key_mgr.copy_key(self.ctxt, key_id)
copied_key = self.key_mgr.get_key(self.ctxt, copied_key_id)
self.assertNotEqual(key_id, copied_key_id)
self.assertEqual(key, copied_key)
def test_copy_null_context(self):
self.assertRaises(exception.NotAuthorized,
self.key_mgr.copy_key, None, None)
def test_get_key(self):
pass
def test_get_null_context(self):
self.assertRaises(exception.NotAuthorized,
self.key_mgr.get_key, None, None)
def test_get_unknown_key(self):
self.assertRaises(KeyError, self.key_mgr.get_key, self.ctxt, None)
def test_delete_key(self):
key_id = self.key_mgr.create_key(self.ctxt)
self.key_mgr.delete_key(self.ctxt, key_id)
self.assertRaises(KeyError, self.key_mgr.get_key, self.ctxt, key_id)
def test_delete_null_context(self):
self.assertRaises(exception.NotAuthorized,
self.key_mgr.delete_key, None, None)
def test_delete_unknown_key(self):
self.assertRaises(KeyError, self.key_mgr.delete_key, self.ctxt, None)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community',
}
DOCUMENTATION = '''
---
module: nxos_vpc
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages global VPC configuration
description:
- Manages global VPC configuration
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- The feature vpc must be enabled before this module can be used
- If not using management vrf, vrf must be globally on the device
before using in the pkl config
- Although source IP isn't required on the command line it is
required when using this module. The PKL VRF must also be configured
prior to using this module.
- Both pkl_src and pkl_dest are needed when changing PKL VRF.
options:
domain:
description:
- VPC domain
required: true
role_priority:
description:
- Role priority for device. Remember lower is better.
required: false
default: null
system_priority:
description:
- System priority device. Remember they must match between peers.
required: false
default: null
pkl_src:
description:
- Source IP address used for peer keepalive link
required: false
default: null
pkl_dest:
description:
- Destination (remote) IP address used for peer keepalive link
required: false
default: null
pkl_vrf:
description:
- VRF used for peer keepalive link
required: false
default: management
peer_gw:
description:
- Enables/Disables peer gateway
required: true
choices: ['true','false']
auto_recovery:
description:
- Enables/Disables auto recovery
required: true
choices: ['true','false']
delay_restore:
description:
- manages delay restore command and config value in seconds
required: false
default: null
state:
description:
- Manages desired state of the resource
required: true
choices: ['present','absent']
'''
EXAMPLES = '''
# configure a simple asn
- nxos_vpc:
domain: 100
role_priority: 1000
system_priority: 2000
pkl_dest: 192.168.100.4
pkl_src: 10.1.100.20
peer_gw: true
auto_recovery: true
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["vpc domain 100",
"peer-keepalive destination 192.168.100.4 source 10.1.100.20 vrf management",
"auto-recovery", "peer-gateway"]
'''
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
CONFIG_ARGS = {
'role_priority': 'role priority {role_priority}',
'system_priority': 'system-priority {system_priority}',
'delay_restore': 'delay restore {delay_restore}',
'peer_gw': '{peer_gw} peer-gateway',
'auto_recovery': '{auto_recovery} auto-recovery',
}
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_vrf_list(module):
try:
body = run_commands(module, ['show vrf all | json'])[0]
vrf_table = body['TABLE_vrf']['ROW_vrf']
except (KeyError, AttributeError):
return []
vrf_list = []
if vrf_table:
for each in vrf_table:
vrf_list.append(str(each['vrf_name'].lower()))
return vrf_list
def get_vpc(module):
body = run_commands(module, ['show vpc | json'])[0]
domain = str(body['vpc-domain-id'])
auto_recovery = 'enabled' in str(body['vpc-auto-recovery-status']).lower()
vpc = {}
if domain != 'not configured':
delay_restore = None
pkl_src = None
role_priority = '32667'
system_priority = None
pkl_dest = None
pkl_vrf = None
peer_gw = False
run = get_config(module, flags=['section vpc'])
if run:
vpc_list = run.split('\n')
for each in vpc_list:
if 'delay restore' in each:
line = each.split()
if len(line) == 5:
delay_restore = line[-1]
if 'peer-keepalive destination' in each:
line = each.split()
pkl_dest = line[2]
for word in line:
if 'source' in word:
index = line.index(word)
pkl_src = line[index + 1]
if 'role priority' in each:
line = each.split()
role_priority = line[-1]
if 'system-priority' in each:
line = each.split()
system_priority = line[-1]
if 'peer-gateway' in each:
peer_gw = True
body = run_commands(module, ['show vpc peer-keepalive | json'])[0]
if body:
pkl_dest = body['vpc-keepalive-dest']
if 'N/A' in pkl_dest:
pkl_dest = None
elif len(pkl_dest) == 2:
pkl_dest = pkl_dest[0]
pkl_vrf = str(body['vpc-keepalive-vrf'])
vpc['domain'] = domain
vpc['auto_recovery'] = auto_recovery
vpc['delay_restore'] = delay_restore
vpc['pkl_src'] = pkl_src
vpc['role_priority'] = role_priority
vpc['system_priority'] = system_priority
vpc['pkl_dest'] = pkl_dest
vpc['pkl_vrf'] = pkl_vrf
vpc['peer_gw'] = peer_gw
return vpc
def get_commands_to_config_vpc(module, vpc, domain, existing):
vpc = dict(vpc)
domain_only = vpc.get('domain')
pkl_src = vpc.get('pkl_src')
pkl_dest = vpc.get('pkl_dest')
pkl_vrf = vpc.get('pkl_vrf') or existing.get('pkl_vrf')
vpc['pkl_vrf'] = pkl_vrf
commands = []
if pkl_src or pkl_dest:
if pkl_src is None:
vpc['pkl_src'] = existing.get('pkl_src')
elif pkl_dest is None:
vpc['pkl_dest'] = existing.get('pkl_dest')
pkl_command = 'peer-keepalive destination {pkl_dest}'.format(**vpc) \
+ ' source {pkl_src} vrf {pkl_vrf}'.format(**vpc)
commands.append(pkl_command)
elif pkl_vrf:
pkl_src = existing.get('pkl_src')
pkl_dest = existing.get('pkl_dest')
if pkl_src and pkl_dest:
pkl_command = ('peer-keepalive destination {0}'
' source {1} vrf {2}'.format(pkl_dest, pkl_src, pkl_vrf))
commands.append(pkl_command)
if vpc.get('auto_recovery') is False:
vpc['auto_recovery'] = 'no'
else:
vpc['auto_recovery'] = ''
if 'peer_gw' in vpc:
if vpc.get('peer_gw') is False:
vpc['peer_gw'] = 'no'
else:
vpc['peer_gw'] = ''
else:
if existing.get('peer_gw') is False:
vpc['peer_gw'] = 'no'
else:
vpc['peer_gw'] = ''
for param in vpc:
command = CONFIG_ARGS.get(param)
if command is not None:
command = command.format(**vpc).strip()
commands.append(command)
if commands or domain_only:
commands.insert(0, 'vpc domain {0}'.format(domain))
return commands
def get_commands_to_remove_vpc_interface(portchannel, config_value):
commands = []
command = 'no vpc {0}'.format(config_value)
commands.append(command)
commands.insert(0, 'interface port-channel{0}'.format(portchannel))
return commands
def main():
argument_spec = dict(
domain=dict(required=True, type='str'),
role_priority=dict(required=False, type='str'),
system_priority=dict(required=False, type='str'),
pkl_src=dict(required=False),
pkl_dest=dict(required=False),
pkl_vrf=dict(required=False, default='management'),
peer_gw=dict(required=True, type='bool'),
auto_recovery=dict(required=True, type='bool'),
delay_restore=dict(required=False, type='str'),
state=dict(choices=['absent', 'present'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
results = {'changed': False, 'warnings': warnings}
domain = module.params['domain']
role_priority = module.params['role_priority']
system_priority = module.params['system_priority']
pkl_src = module.params['pkl_src']
pkl_dest = module.params['pkl_dest']
pkl_vrf = module.params['pkl_vrf']
peer_gw = module.params['peer_gw']
auto_recovery = module.params['auto_recovery']
delay_restore = module.params['delay_restore']
state = module.params['state']
args = dict(domain=domain, role_priority=role_priority,
system_priority=system_priority, pkl_src=pkl_src,
pkl_dest=pkl_dest, pkl_vrf=pkl_vrf, peer_gw=peer_gw,
auto_recovery=auto_recovery,
delay_restore=delay_restore)
if not (pkl_src and pkl_dest and pkl_vrf):
# if only the source or dest is set, it'll fail and ask to set the
# other
if pkl_src or pkl_dest:
module.fail_json(msg='source AND dest IP for pkl are required at '
'this time (although source is technically not '
' required by the device.)')
args.pop('pkl_src')
args.pop('pkl_dest')
args.pop('pkl_vrf')
if pkl_vrf:
if pkl_vrf.lower() not in get_vrf_list(module):
module.fail_json(msg='The VRF you are trying to use for the peer '
'keepalive link is not on device yet. Add it'
' first, please.')
proposed = dict((k, v) for k, v in args.items() if v is not None)
existing = get_vpc(module)
commands = []
if state == 'present':
delta = set(proposed.items()).difference(existing.items())
if delta:
command = get_commands_to_config_vpc(module, delta, domain, existing)
commands.append(command)
elif state == 'absent':
if existing:
if domain != existing['domain']:
module.fail_json(msg="You are trying to remove a domain that "
"does not exist on the device")
else:
commands.append('no vpc domain {0}'.format(domain))
cmds = flatten_list(commands)
results['commands'] = cmds
if cmds:
results['changed'] = True
if not module.check_mode:
load_config(module, cmds)
if 'configure' in cmds:
cmds.pop(0)
module.exit_json(**results)
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
===========
gaussfitter
===========
.. codeauthor:: Adam Ginsburg <adam.g.ginsburg@gmail.com> 3/17/08
Latest version available at <http://code.google.com/p/agpy/source/browse/trunk/agpy/gaussfitter.py>
"""
import numpy
from numpy.ma import median
from numpy import pi
#from scipy import optimize,stats,pi
from scipy.optimize import curve_fit
from mpfit import mpfit
"""
Note about mpfit/leastsq:
I switched everything over to the Markwardt mpfit routine for a few reasons,
but foremost being the ability to set limits on parameters, not just force them
to be fixed. As far as I can tell, leastsq does not have that capability.
The version of mpfit I use can be found here:
http://code.google.com/p/agpy/source/browse/trunk/mpfit
.. todo::
-turn into a class instead of a collection of objects
-implement WCS-based gaussian fitting with correct coordinates
"""
def moments(data,circle,rotate,vheight,estimator=median,**kwargs):
"""Returns (height, amplitude, x, y, width_x, width_y, rotation angle)
the gaussian parameters of a 2D distribution by calculating its
moments. Depending on the input parameters, will only output
a subset of the above.
If using masked arrays, pass estimator=numpy.ma.median
"""
total = numpy.abs(data).sum()
Y, X = numpy.indices(data.shape) # python convention: reverse x,y numpy.indices
y = numpy.argmax((X*numpy.abs(data)).sum(axis=1)/total)
x = numpy.argmax((Y*numpy.abs(data)).sum(axis=0)/total)
col = data[int(y),:]
# FIRST moment, not second!
width_x = numpy.sqrt(numpy.abs((numpy.arange(col.size)-y)*col).sum()/numpy.abs(col).sum())
row = data[:, int(x)]
width_y = numpy.sqrt(numpy.abs((numpy.arange(row.size)-x)*row).sum()/numpy.abs(row).sum())
width = ( width_x + width_y ) / 2.
height = estimator(data.ravel())
amplitude = data.max()-height
mylist = [amplitude,x,y]
if numpy.isnan(width_y) or numpy.isnan(width_x) or numpy.isnan(height) or numpy.isnan(amplitude):
raise ValueError("something is nan")
if vheight==1:
mylist = [height] + mylist
if circle==0:
mylist = mylist + [width_x,width_y]
if rotate==1:
mylist = mylist + [0.] #rotation "moment" is just zero...
# also, circles don't rotate.
else:
mylist = mylist + [width]
return mylist
def twodgaussian(inpars, circle=False, rotate=True, vheight=True, shape=None):
"""Returns a 2d gaussian function of the form:
x' = numpy.cos(rota) * x - numpy.sin(rota) * y
y' = numpy.sin(rota) * x + numpy.cos(rota) * y
(rota should be in degrees)
g = b + a * numpy.exp ( - ( ((x-center_x)/width_x)**2 +
((y-center_y)/width_y)**2 ) / 2 )
inpars = [b,a,center_x,center_y,width_x,width_y,rota]
(b is background height, a is peak amplitude)
where x and y are the input parameters of the returned function,
and all other parameters are specified by this function
However, the above values are passed by list. The list should be:
inpars = (height,amplitude,center_x,center_y,width_x,width_y,rota)
You can choose to ignore / neglect some of the above input parameters
unumpy.sing the following options:
circle=0 - default is an elliptical gaussian (different x, y
widths), but can reduce the input by one parameter if it's a
circular gaussian
rotate=1 - default allows rotation of the gaussian ellipse. Can
remove last parameter by setting rotate=0
vheight=1 - default allows a variable height-above-zero, i.e. an
additive constant for the Gaussian function. Can remove first
parameter by setting this to 0
shape=None - if shape is set (to a 2-parameter list) then returns
an image with the gaussian defined by inpars
"""
inpars_old = inpars
inpars = list(inpars)
if vheight == 1:
height = inpars.pop(0)
height = float(height)
else:
height = float(0)
amplitude, center_y, center_x = inpars.pop(0),inpars.pop(0),inpars.pop(0)
amplitude = float(amplitude)
center_x = float(center_x)
center_y = float(center_y)
if circle == 1:
width = inpars.pop(0)
width_x = float(width)
width_y = float(width)
rotate = 0
else:
width_x, width_y = inpars.pop(0),inpars.pop(0)
width_x = float(width_x)
width_y = float(width_y)
if rotate == 1:
rota = inpars.pop(0)
rota = pi/180. * float(rota)
rcen_x = center_x * numpy.cos(rota) - center_y * numpy.sin(rota)
rcen_y = center_x * numpy.sin(rota) + center_y * numpy.cos(rota)
else:
rcen_x = center_x
rcen_y = center_y
if len(inpars) > 0:
raise ValueError("There are still input parameters:" + str(inpars) + \
" and you've input: " + str(inpars_old) + \
" circle=%d, rotate=%d, vheight=%d" % (circle,rotate,vheight) )
def rotgauss(x,y):
if rotate==1:
xp = x * numpy.cos(rota) - y * numpy.sin(rota)
yp = x * numpy.sin(rota) + y * numpy.cos(rota)
else:
xp = x
yp = y
g = height+amplitude*numpy.exp(
-(((rcen_x-xp)/width_x)**2+
((rcen_y-yp)/width_y)**2)/2.)
return g
if shape is not None:
return rotgauss(*numpy.indices(shape))
else:
return rotgauss
def gaussfit(data,err=None,params=(),autoderiv=True,return_all=False,circle=False,
fixed=numpy.repeat(False,7),limitedmin=[False,False,False,False,True,True,True],
limitedmax=[False,False,False,False,False,False,True],
usemoment=numpy.array([],dtype='bool'),
minpars=numpy.repeat(0,7),maxpars=[0,0,0,0,0,0,360],
rotate=1,vheight=1,quiet=True,returnmp=False,
returnfitimage=False,**kwargs):
"""
Gaussian fitter with the ability to fit a variety of different forms of
2-dimensional gaussian.
Input Parameters:
data - 2-dimensional data array
err=None - error array with same size as data array
params=[] - initial input parameters for Gaussian function.
(height, amplitude, x, y, width_x, width_y, rota)
if not input, these will be determined from the moments of the system,
assuming no rotation
autoderiv=1 - use the autoderiv provided in the lmder.f function (the
alternative is to us an analytic derivative with lmdif.f: this method
is less robust)
return_all=0 - Default is to return only the Gaussian parameters.
1 - fit params, fit error
returnfitimage - returns (best fit params,best fit image)
returnmp - returns the full mpfit struct
circle=0 - default is an elliptical gaussian (different x, y widths),
but can reduce the input by one parameter if it's a circular gaussian
rotate=1 - default allows rotation of the gaussian ellipse. Can remove
last parameter by setting rotate=0. numpy.expects angle in DEGREES
vheight=1 - default allows a variable height-above-zero, i.e. an
additive constant for the Gaussian function. Can remove first
parameter by setting this to 0
usemoment - can choose which parameters to use a moment estimation for.
Other parameters will be taken from params. Needs to be a boolean
array.
Output:
Default output is a set of Gaussian parameters with the same shape as
the input parameters
Can also output the covariance matrix, 'infodict' that contains a lot
more detail about the fit (see scipy.optimize.leastsq), and a message
from leastsq telling what the exit status of the fitting routine was
Warning: Does NOT necessarily output a rotation angle between 0 and 360 degrees.
"""
usemoment=numpy.array(usemoment,dtype='bool')
params=numpy.array(params,dtype='float')
if usemoment.any() and len(params)==len(usemoment):
moment = numpy.array(moments(data,circle,rotate,vheight,**kwargs),dtype='float')
params[usemoment] = moment[usemoment]
elif params == [] or len(params)==0:
params = (moments(data,circle,rotate,vheight,**kwargs))
if vheight==0:
vheight=1
params = numpy.concatenate([[0],params])
fixed[0] = 1
# mpfit will fail if it is given a start parameter outside the allowed range:
for i in xrange(len(params)):
if params[i] > maxpars[i] and limitedmax[i]: params[i] = maxpars[i]
if params[i] < minpars[i] and limitedmin[i]: params[i] = minpars[i]
if err is None:
errorfunction = lambda p: numpy.ravel((twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)) - data))
else:
errorfunction = lambda p: numpy.ravel((twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)) - data)/err)
def mpfitfun(data,err):
if err is None:
def f(p,fjac=None): return [0,numpy.ravel(data-twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)))]
else:
def f(p,fjac=None): return [0,numpy.ravel((data-twodgaussian(p,circle,rotate,vheight)\
(*numpy.indices(data.shape)))/err)]
return f
parinfo = [
{'n':1,'value':params[1],'limits':[minpars[1],maxpars[1]],'limited':[limitedmin[1],limitedmax[1]],'fixed':fixed[1],'parname':"AMPLITUDE",'error':0},
{'n':2,'value':params[2],'limits':[minpars[2],maxpars[2]],'limited':[limitedmin[2],limitedmax[2]],'fixed':fixed[2],'parname':"XSHIFT",'error':0},
{'n':3,'value':params[3],'limits':[minpars[3],maxpars[3]],'limited':[limitedmin[3],limitedmax[3]],'fixed':fixed[3],'parname':"YSHIFT",'error':0},
{'n':4,'value':params[4],'limits':[minpars[4],maxpars[4]],'limited':[limitedmin[4],limitedmax[4]],'fixed':fixed[4],'parname':"XWIDTH",'error':0} ]
if vheight == 1:
parinfo.insert(0,{'n':0,'value':params[0],'limits':[minpars[0],maxpars[0]],'limited':[limitedmin[0],limitedmax[0]],'fixed':fixed[0],'parname':"HEIGHT",'error':0})
if circle == 0:
parinfo.append({'n':5,'value':params[5],'limits':[minpars[5],maxpars[5]],'limited':[limitedmin[5],limitedmax[5]],'fixed':fixed[5],'parname':"YWIDTH",'error':0})
if rotate == 1:
parinfo.append({'n':6,'value':params[6],'limits':[minpars[6],maxpars[6]],'limited':[limitedmin[6],limitedmax[6]],'fixed':fixed[6],'parname':"ROTATION",'error':0})
if autoderiv == 0:
# the analytic derivative, while not terribly difficult, is less
# efficient and useful. I only bothered putting it here because I was
# instructed to do so for a class project - please ask if you would
# like this feature implemented
raise ValueError("I'm sorry, I haven't implemented this feature yet.")
else:
# p, cov, infodict, errmsg, success = optimize.leastsq(errorfunction,\
# params, full_output=1)
mp = mpfit(mpfitfun(data,err),parinfo=parinfo,quiet=quiet)
if returnmp:
returns = (mp)
elif return_all == 0:
returns = mp.params
elif return_all == 1:
returns = mp.params,mp.perror
if returnfitimage:
fitimage = twodgaussian(mp.params,circle,rotate,vheight)(*numpy.indices(data.shape))
returns = (returns,fitimage)
return returns
def onedmoments(Xax,data,vheight=True,estimator=median,negamp=None,
veryverbose=False, **kwargs):
"""Returns (height, amplitude, x, width_x)
the gaussian parameters of a 1D distribution by calculating its
moments. Depending on the input parameters, will only output
a subset of the above.
If using masked arrays, pass estimator=numpy.ma.median
'estimator' is used to measure the background level (height)
negamp can be used to force the peak negative (True), positive (False),
or it will be "autodetected" (negamp=None)
"""
dx = numpy.mean(Xax[1:] - Xax[:-1]) # assume a regular grid
integral = (data*dx).sum()
height = estimator(data)
# try to figure out whether pos or neg based on the minimum width of the pos/neg peaks
Lpeakintegral = integral - height*len(Xax)*dx - (data[data>height]*dx).sum()
Lamplitude = data.min()-height
Lwidth_x = 0.5*(numpy.abs(Lpeakintegral / Lamplitude))
Hpeakintegral = integral - height*len(Xax)*dx - (data[data<height]*dx).sum()
Hamplitude = data.max()-height
Hwidth_x = 0.5*(numpy.abs(Hpeakintegral / Hamplitude))
Lstddev = Xax[data<data.mean()].std()
Hstddev = Xax[data>data.mean()].std()
#print "Lstddev: %10.3g Hstddev: %10.3g" % (Lstddev,Hstddev)
#print "Lwidth_x: %10.3g Hwidth_x: %10.3g" % (Lwidth_x,Hwidth_x)
if negamp: # can force the guess to be negative
xcen,amplitude,width_x = Xax[numpy.argmin(data)],Lamplitude,Lwidth_x
elif negamp is None:
if Hstddev < Lstddev:
xcen,amplitude,width_x, = Xax[numpy.argmax(data)],Hamplitude,Hwidth_x
else:
xcen,amplitude,width_x, = Xax[numpy.argmin(data)],Lamplitude,Lwidth_x
else: # if negamp==False, make positive
xcen,amplitude,width_x = Xax[numpy.argmax(data)],Hamplitude,Hwidth_x
if veryverbose:
print "negamp: %s amp,width,cen Lower: %g, %g Upper: %g, %g Center: %g" %\
(negamp,Lamplitude,Lwidth_x,Hamplitude,Hwidth_x,xcen)
mylist = [amplitude,xcen,width_x]
if numpy.isnan(width_x) or numpy.isnan(height) or numpy.isnan(amplitude):
raise ValueError("something is nan")
if vheight:
mylist = [height] + mylist
return mylist
def onedgaussian(x,H,A,dx,w):
"""
Returns a 1-dimensional gaussian of form
H+A*numpy.exp(-(x-dx)**2/(2*w**2))
"""
return H+A*numpy.exp(-(x-dx)**2/(2*w**2))
def onedgaussfit(xax, data, err=None,
params=[0,1,0,1],fixed=[False,False,False,False],
limitedmin=[False,False,False,True],
limitedmax=[False,False,False,False], minpars=[0,0,0,0],
maxpars=[0,0,0,0], quiet=True, shh=True,
veryverbose=False,
vheight=True, negamp=False,
usemoments=False):
"""
Inputs:
xax - x axis
data - y axis
err - error corresponding to data
params - Fit parameters: Height of background, Amplitude, Shift, Width
fixed - Is parameter fixed?
limitedmin/minpars - set lower limits on each parameter (default: width>0)
limitedmax/maxpars - set upper limits on each parameter
quiet - should MPFIT output each iteration?
shh - output final parameters?
usemoments - replace default parameters with moments
Returns:
Fit parameters
Model
Fit errors
chi2
"""
def mpfitfun(x,y,err):
if err is None:
def f(p,fjac=None): return [0,(y-onedgaussian(x,*p))]
else:
def f(p,fjac=None): return [0,(y-onedgaussian(x,*p))/err]
return f
if xax == None:
xax = numpy.arange(len(data))
if vheight is False:
height = params[0]
fixed[0] = True
if usemoments:
params = onedmoments(xax,data,vheight=vheight,negamp=negamp, veryverbose=veryverbose)
if vheight is False: params = [height]+params
if veryverbose: print "OneD moments: h: %g a: %g c: %g w: %g" % tuple(params)
parinfo = [ {'n':0,'value':params[0],'limits':[minpars[0],maxpars[0]],'limited':[limitedmin[0],limitedmax[0]],'fixed':fixed[0],'parname':"HEIGHT",'error':0} ,
{'n':1,'value':params[1],'limits':[minpars[1],maxpars[1]],'limited':[limitedmin[1],limitedmax[1]],'fixed':fixed[1],'parname':"AMPLITUDE",'error':0},
{'n':2,'value':params[2],'limits':[minpars[2],maxpars[2]],'limited':[limitedmin[2],limitedmax[2]],'fixed':fixed[2],'parname':"SHIFT",'error':0},
{'n':3,'value':params[3],'limits':[minpars[3],maxpars[3]],'limited':[limitedmin[3],limitedmax[3]],'fixed':fixed[3],'parname':"WIDTH",'error':0}]
mp = mpfit(mpfitfun(xax,data,err),parinfo=parinfo,quiet=quiet)
mpp = mp.params
mpperr = mp.perror
chi2 = mp.fnorm
if mp.status == 0:
raise Exception(mp.errmsg)
if (not shh) or veryverbose:
print "Fit status: ",mp.status
for i,p in enumerate(mpp):
parinfo[i]['value'] = p
print parinfo[i]['parname'],p," +/- ",mpperr[i]
print "Chi2: ",mp.fnorm," Reduced Chi2: ",mp.fnorm/len(data)," DOF:",len(data)-len(mpp)
return mpp,onedgaussian(xax,*mpp),mpperr,chi2
def n_gaussian(pars=None,a=None,dx=None,sigma=None):
"""
Returns a function that sums over N gaussians, where N is the length of
a,dx,sigma *OR* N = len(pars) / 3
The background "height" is assumed to be zero (you must "baseline" your
spectrum before fitting)
pars - a list with len(pars) = 3n, assuming a,dx,sigma repeated
dx - offset (velocity center) values
sigma - line widths
a - amplitudes
"""
if len(pars) % 3 == 0:
a = [pars[ii] for ii in xrange(0,len(pars),3)]
dx = [pars[ii] for ii in xrange(1,len(pars),3)]
sigma = [pars[ii] for ii in xrange(2,len(pars),3)]
elif not(len(dx) == len(sigma) == len(a)):
raise ValueError("Wrong array lengths! dx: %i sigma: %i a: %i" % (len(dx),len(sigma),len(a)))
def g(x):
v = numpy.zeros(len(x))
for i in range(len(dx)):
v += a[i] * numpy.exp( - ( x - dx[i] )**2 / (2.0*sigma[i]**2) )
return v
return g
def multigaussfit(xax, data, ngauss=1, err=None, params=[1,0,1],
fixed=[False,False,False], limitedmin=[False,False,True],
limitedmax=[False,False,False], minpars=[0,0,0], maxpars=[0,0,0],
quiet=True, shh=True, veryverbose=False):
"""
An improvement on onedgaussfit. Lets you fit multiple gaussians.
Inputs:
xax - x axis
data - y axis
ngauss - How many gaussians to fit? Default 1 (this could supersede onedgaussfit)
err - error corresponding to data
These parameters need to have length = 3*ngauss. If ngauss > 1 and length = 3, they will
be replicated ngauss times, otherwise they will be reset to defaults:
params - Fit parameters: [amplitude, offset, width] * ngauss
If len(params) % 3 == 0, ngauss will be set to len(params) / 3
fixed - Is parameter fixed?
limitedmin/minpars - set lower limits on each parameter (default: width>0)
limitedmax/maxpars - set upper limits on each parameter
quiet - should MPFIT output each iteration?
shh - output final parameters?
Returns:
Fit parameters
Model
Fit errors
chi2
"""
if len(params) != ngauss and (len(params) / 3) > ngauss:
ngauss = len(params) / 3
if isinstance(params,numpy.ndarray): params=params.tolist()
# make sure all various things are the right length; if they're not, fix them using the defaults
for parlist in (params,fixed,limitedmin,limitedmax,minpars,maxpars):
if len(parlist) != 3*ngauss:
# if you leave the defaults, or enter something that can be multiplied by 3 to get to the
# right number of gaussians, it will just replicate
if len(parlist) == 3:
parlist *= ngauss
elif parlist==params:
parlist[:] = [1,0,1] * ngauss
elif parlist==fixed or parlist==limitedmax:
parlist[:] = [False,False,False] * ngauss
elif parlist==limitedmin:
parlist[:] = [False,False,True] * ngauss
elif parlist==minpars or parlist==maxpars:
parlist[:] = [0,0,0] * ngauss
def mpfitfun(x,y,err):
if err is None:
def f(p,fjac=None): return [0,(y-n_gaussian(pars=p)(x))]
else:
def f(p,fjac=None): return [0,(y-n_gaussian(pars=p)(x))/err]
return f
if xax == None:
xax = numpy.arange(len(data))
parnames = {0:"AMPLITUDE",1:"SHIFT",2:"WIDTH"}
parinfo = [ {'n':ii, 'value':params[ii],
'limits':[minpars[ii],maxpars[ii]],
'limited':[limitedmin[ii],limitedmax[ii]], 'fixed':fixed[ii],
'parname':parnames[ii%3]+str(ii%3), 'error':ii}
for ii in xrange(len(params)) ]
if veryverbose:
print "GUESSES: "
print "\n".join(["%s: %s" % (p['parname'],p['value']) for p in parinfo])
mp = mpfit(mpfitfun(xax,data,err),parinfo=parinfo,quiet=quiet)
mpp = mp.params
mpperr = mp.perror
chi2 = mp.fnorm
if mp.status == 0:
raise Exception(mp.errmsg)
if not shh:
print "Final fit values: "
for i,p in enumerate(mpp):
parinfo[i]['value'] = p
print parinfo[i]['parname'],p," +/- ",mpperr[i]
print "Chi2: ",mp.fnorm," Reduced Chi2: ",mp.fnorm/len(data)," DOF:",len(data)-len(mpp)
return mpp,n_gaussian(pars=mpp)(xax),mpperr,chi2
def collapse_gaussfit(cube,xax=None,axis=2,negamp=False,usemoments=True,nsigcut=1.0,mppsigcut=1.0,
return_errors=False, **kwargs):
import time
std_coll = cube.std(axis=axis)
std_coll[std_coll==0] = numpy.nan # must eliminate all-zero spectra
mean_std = median(std_coll[std_coll==std_coll])
if axis > 0:
cube = cube.swapaxes(0,axis)
width_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
amp_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
chi2_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
offset_arr = numpy.zeros(cube.shape[1:]) + numpy.nan
width_err = numpy.zeros(cube.shape[1:]) + numpy.nan
amp_err = numpy.zeros(cube.shape[1:]) + numpy.nan
offset_err = numpy.zeros(cube.shape[1:]) + numpy.nan
if xax is None:
xax = numpy.arange(cube.shape[0])
starttime = time.time()
print "Cube shape: ",cube.shape
if negamp: extremum=numpy.min
else: extremum=numpy.max
print "Fitting a total of %i spectra with peak signal above %f" % ((numpy.abs(extremum(cube,axis=0)) > (mean_std*nsigcut)).sum(),mean_std*nsigcut)
for i in xrange(cube.shape[1]):
t0 = time.time()
nspec = (numpy.abs(extremum(cube[:,i,:],axis=0)) > (mean_std*nsigcut)).sum()
print "Working on row %d with %d spectra to fit" % (i,nspec) ,
for j in xrange(cube.shape[2]):
if numpy.abs(extremum(cube[:,i,j])) > (mean_std*nsigcut):
mpp,gfit,mpperr,chi2 = onedgaussfit(xax,cube[:,i,j],err=numpy.ones(cube.shape[0])*mean_std,negamp=negamp,usemoments=usemoments,**kwargs)
if numpy.abs(mpp[1]) > (mpperr[1]*mppsigcut):
width_arr[i,j] = mpp[3]
offset_arr[i,j] = mpp[2]
chi2_arr[i,j] = chi2
amp_arr[i,j] = mpp[1]
width_err[i,j] = mpperr[3]
offset_err[i,j] = mpperr[2]
amp_err[i,j] = mpperr[1]
dt = time.time()-t0
if nspec > 0:
print "in %f seconds (average: %f)" % (dt,dt/float(nspec))
else:
print "in %f seconds" % (dt)
print "Total time %f seconds" % (time.time()-starttime)
if return_errors:
return width_arr,offset_arr,amp_arr,width_err,offset_err,amp_err,chi2_arr
else:
return width_arr,offset_arr,amp_arr,chi2_arr
|
unknown
|
codeparrot/codeparrot-clean
| ||
import os
import pymongo
from bson.code import Code
from . import base
# Map-reduce functions for getting and counting the unique fields
# across documents in a collection.
map_fields = Code('''
function() {
for (var key in this) {
emit(key, 1);
}
}
''')
count_fields = Code('''
function(key, values) {
return Array.sum(values);
}
''')
class Client(base.Client):
name = 'MongoDB'
description = '''
Generator for a MongoDB database. The database, collections, and
document fields are extracted as entities.
'''
options = {
'required': ['database'],
'properties': {
'database': {
'description': 'Name of the database.',
'type': 'string',
},
'host': {
'description': 'Host of the server.',
'type': 'string',
'default': 'localhost',
},
'port': {
'description': 'Port of the server.',
'type': 'number',
'default': 27017
},
}
}
def setup(self):
self.conn = pymongo.MongoClient(host=self.options.host,
port=self.options.port)
self.db = self.conn[self.options.database]
def get_collections(self):
"Return a list of collection dicts in the database."
return [{
'name': n,
} for n in self.db.collection_names() if n != 'system.indexes']
def get_fields(self, collection_name):
"""Return a list of field dicts in the collection.
This performs a map-reduce job on the collection to get the unique set
of fields across documents.
"""
output = self.db[collection_name]\
.inline_map_reduce(map_fields,
count_fields,
full_response=True)
# result['value'] / output['counts']['input'] would produce the
# occurrence of the field across documents.
fields = []
for result in output['results']:
fields.append({
'name': result['_id']
})
return fields
def parse_database(self):
name = self.options.database
version = self.conn.server_info()['version']
return {
'origins:ident': name,
'prov:label': name,
'prov:type': 'Database',
'version': version
}
def parse_collection(self, attrs, db):
attrs['origins:ident'] = os.path.join(db['origins:ident'],
attrs['name'])
attrs['prov:label'] = attrs['name']
attrs['prov:type'] = 'Collection'
attrs['database'] = db
return attrs
def parse_field(self, attrs, col):
attrs['origins:ident'] = os.path.join(col['origins:ident'],
attrs['name'])
attrs['prov:label'] = attrs['name']
attrs['prov:type'] = 'Field'
attrs['column'] = col
return attrs
def parse(self):
db = self.parse_database()
self.document.add('entity', db)
for col in self.get_collections():
col = self.parse_collection(col, db)
self.document.add('entity', col)
for field in self.get_fields(col['name']):
field = self.parse_field(field, col)
self.document.add('entity', field)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google BigQuery API."""
from __future__ import absolute_import
try:
from collections import abc as collections_abc
except ImportError: # Python 2.7
import collections as collections_abc
import functools
import gzip
import os
import uuid
import six
from google import resumable_media
from google.resumable_media.requests import MultipartUpload
from google.resumable_media.requests import ResumableUpload
from google.api_core import page_iterator
import google.cloud._helpers
from google.cloud import exceptions
from google.cloud.client import ClientWithProject
from google.cloud.bigquery._helpers import _SCALAR_VALUE_TO_JSON_ROW
from google.cloud.bigquery._helpers import _str_or_none
from google.cloud.bigquery._http import Connection
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import DatasetListItem
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery import job
from google.cloud.bigquery.query import _QueryResults
from google.cloud.bigquery.retry import DEFAULT_RETRY
from google.cloud.bigquery.table import Table
from google.cloud.bigquery.table import TableListItem
from google.cloud.bigquery.table import TableReference
from google.cloud.bigquery.table import RowIterator
from google.cloud.bigquery.table import _TABLE_HAS_NO_SCHEMA
from google.cloud.bigquery.table import _row_from_mapping
_DEFAULT_CHUNKSIZE = 1048576 # 1024 * 1024 B = 1 MB
_MAX_MULTIPART_SIZE = 5 * 1024 * 1024
_DEFAULT_NUM_RETRIES = 6
_BASE_UPLOAD_TEMPLATE = (
u"https://www.googleapis.com/upload/bigquery/v2/projects/"
u"{project}/jobs?uploadType="
)
_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart"
_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"resumable"
_GENERIC_CONTENT_TYPE = u"*/*"
_READ_LESS_THAN_SIZE = (
"Size {:d} was specified but the file-like object only had " "{:d} bytes remaining."
)
class Project(object):
"""Wrapper for resource describing a BigQuery project.
:type project_id: str
:param project_id: Opaque ID of the project
:type numeric_id: int
:param numeric_id: Numeric ID of the project
:type friendly_name: str
:param friendly_name: Display name of the project
"""
def __init__(self, project_id, numeric_id, friendly_name):
self.project_id = project_id
self.numeric_id = numeric_id
self.friendly_name = friendly_name
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct an instance from a resource dict."""
return cls(resource["id"], resource["numericId"], resource["friendlyName"])
class Client(ClientWithProject):
"""Client to bundle configuration needed for API requests.
Args:
project (str):
Project ID for the project which the client acts on behalf of.
Will be passed when creating a dataset / job. If not passed,
falls back to the default inferred from the environment.
credentials (google.auth.credentials.Credentials):
(Optional) The OAuth2 Credentials to use for this client. If not
passed (and if no ``_http`` object is passed), falls back to the
default inferred from the environment.
_http (requests.Session):
(Optional) HTTP object to make requests. Can be any object that
defines ``request()`` with the same interface as
:meth:`requests.Session.request`. If not passed, an ``_http``
object is created that is bound to the ``credentials`` for the
current object.
This parameter should be considered private, and could change in
the future.
location (str):
(Optional) Default location for jobs / datasets / tables.
default_query_job_config (google.cloud.bigquery.job.QueryJobConfig):
(Optional) Default ``QueryJobConfig``.
Will be merged into job configs passed into the ``query`` method.
Raises:
google.auth.exceptions.DefaultCredentialsError:
Raised if ``credentials`` is not specified and the library fails
to acquire default credentials.
"""
SCOPE = (
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
)
"""The scopes required for authenticating as a BigQuery consumer."""
def __init__(
self,
project=None,
credentials=None,
_http=None,
location=None,
default_query_job_config=None,
):
super(Client, self).__init__(
project=project, credentials=credentials, _http=_http
)
self._connection = Connection(self)
self._location = location
self._default_query_job_config = default_query_job_config
@property
def location(self):
"""Default location for jobs / datasets / tables."""
return self._location
def get_service_account_email(self, project=None):
"""Get the email address of the project's BigQuery service account
Note:
This is the service account that BigQuery uses to manage tables
encrypted by a key in KMS.
Args:
project (str, optional):
Project ID to use for retreiving service account email.
Defaults to the client's project.
Returns:
str: service account email address
Example:
>>> from google.cloud import bigquery
>>> client = bigquery.Client()
>>> client.get_service_account_email()
my_service_account@my-project.iam.gserviceaccount.com
"""
if project is None:
project = self.project
path = "/projects/%s/serviceAccount" % (project,)
api_response = self._connection.api_request(method="GET", path=path)
return api_response["email"]
def list_projects(self, max_results=None, page_token=None, retry=DEFAULT_RETRY):
"""List projects for the project associated with this client.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list
:type max_results: int
:param max_results: (Optional) maximum number of projects to return,
If not passed, defaults to a value set by the API.
:type page_token: str
:param page_token:
(Optional) Token representing a cursor into the projects. If
not passed, the API will return the first page of projects.
The token marks the beginning of the iterator to be returned
and the value of the ``page_token`` can be accessed at
``next_page_token`` of the
:class:`~google.api_core.page_iterator.HTTPIterator`.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the RPC.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.client.Project`
accessible to the current client.
"""
return page_iterator.HTTPIterator(
client=self,
api_request=functools.partial(self._call_api, retry),
path="/projects",
item_to_value=_item_to_project,
items_key="projects",
page_token=page_token,
max_results=max_results,
)
def list_datasets(
self,
project=None,
include_all=False,
filter=None,
max_results=None,
page_token=None,
retry=DEFAULT_RETRY,
):
"""List datasets for the project associated with this client.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list
Args:
project (str):
Optional. Project ID to use for retreiving datasets. Defaults
to the client's project.
include_all (bool):
Optional. True if results include hidden datasets. Defaults
to False.
filter (str):
Optional. An expression for filtering the results by label.
For syntax, see
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list#filter.
max_results (int):
Optional. Maximum number of datasets to return.
page_token (str):
Optional. Token representing a cursor into the datasets. If
not passed, the API will return the first page of datasets.
The token marks the beginning of the iterator to be returned
and the value of the ``page_token`` can be accessed at
``next_page_token`` of the
:class:`~google.api_core.page_iterator.HTTPIterator`.
retry (google.api_core.retry.Retry):
Optional. How to retry the RPC.
Returns:
google.api_core.page_iterator.Iterator:
Iterator of
:class:`~google.cloud.bigquery.dataset.DatasetListItem`.
associated with the project.
"""
extra_params = {}
if project is None:
project = self.project
if include_all:
extra_params["all"] = True
if filter:
# TODO: consider supporting a dict of label -> value for filter,
# and converting it into a string here.
extra_params["filter"] = filter
path = "/projects/%s/datasets" % (project,)
return page_iterator.HTTPIterator(
client=self,
api_request=functools.partial(self._call_api, retry),
path=path,
item_to_value=_item_to_dataset,
items_key="datasets",
page_token=page_token,
max_results=max_results,
extra_params=extra_params,
)
def dataset(self, dataset_id, project=None):
"""Construct a reference to a dataset.
:type dataset_id: str
:param dataset_id: ID of the dataset.
:type project: str
:param project: (Optional) project ID for the dataset (defaults to
the project of the client).
:rtype: :class:`google.cloud.bigquery.dataset.DatasetReference`
:returns: a new ``DatasetReference`` instance
"""
if project is None:
project = self.project
return DatasetReference(project, dataset_id)
def create_dataset(self, dataset):
"""API call: create the dataset via a POST request.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert
Args:
dataset (Union[ \
:class:`~google.cloud.bigquery.dataset.Dataset`, \
:class:`~google.cloud.bigquery.dataset.DatasetReference`, \
str, \
]):
A :class:`~google.cloud.bigquery.dataset.Dataset` to create.
If ``dataset`` is a reference, an empty dataset is created
with the specified ID and client's default location.
Returns:
google.cloud.bigquery.dataset.Dataset:
A new ``Dataset`` returned from the API.
Example:
>>> from google.cloud import bigquery
>>> client = bigquery.Client()
>>> dataset = bigquery.Dataset(client.dataset('my_dataset'))
>>> dataset = client.create_dataset(dataset)
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)
if isinstance(dataset, DatasetReference):
dataset = Dataset(dataset)
path = "/projects/%s/datasets" % (dataset.project,)
data = dataset.to_api_repr()
if data.get("location") is None and self.location is not None:
data["location"] = self.location
api_response = self._connection.api_request(method="POST", path=path, data=data)
return Dataset.from_api_repr(api_response)
def create_table(self, table):
"""API call: create a table via a PUT request
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert
Args:
table (Union[ \
:class:`~google.cloud.bigquery.table.Table`, \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
A :class:`~google.cloud.bigquery.table.Table` to create.
If ``table`` is a reference, an empty table is created
with the specified ID. The dataset that the table belongs to
must already exist.
Returns:
google.cloud.bigquery.table.Table:
A new ``Table`` returned from the service.
"""
if isinstance(table, str):
table = TableReference.from_string(table, default_project=self.project)
if isinstance(table, TableReference):
table = Table(table)
path = "/projects/%s/datasets/%s/tables" % (table.project, table.dataset_id)
api_response = self._connection.api_request(
method="POST", path=path, data=table.to_api_repr()
)
return Table.from_api_repr(api_response)
def _call_api(self, retry, **kwargs):
call = functools.partial(self._connection.api_request, **kwargs)
if retry:
call = retry(call)
return call()
def get_dataset(self, dataset_ref, retry=DEFAULT_RETRY):
"""Fetch the dataset referenced by ``dataset_ref``
Args:
dataset_ref (Union[ \
:class:`~google.cloud.bigquery.dataset.DatasetReference`, \
str, \
]):
A reference to the dataset to fetch from the BigQuery API.
If a string is passed in, this method attempts to create a
dataset reference from a string using
:func:`~google.cloud.bigquery.dataset.DatasetReference.from_string`.
retry (:class:`google.api_core.retry.Retry`):
(Optional) How to retry the RPC.
Returns:
google.cloud.bigquery.dataset.Dataset:
A ``Dataset`` instance.
"""
if isinstance(dataset_ref, str):
dataset_ref = DatasetReference.from_string(
dataset_ref, default_project=self.project
)
api_response = self._call_api(retry, method="GET", path=dataset_ref.path)
return Dataset.from_api_repr(api_response)
def get_table(self, table_ref, retry=DEFAULT_RETRY):
"""Fetch the table referenced by ``table_ref``.
Args:
table_ref (Union[ \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
A reference to the table to fetch from the BigQuery API.
If a string is passed in, this method attempts to create a
table reference from a string using
:func:`google.cloud.bigquery.table.TableReference.from_string`.
retry (:class:`google.api_core.retry.Retry`):
(Optional) How to retry the RPC.
Returns:
google.cloud.bigquery.table.Table:
A ``Table`` instance.
"""
if isinstance(table_ref, str):
table_ref = TableReference.from_string(
table_ref, default_project=self.project
)
api_response = self._call_api(retry, method="GET", path=table_ref.path)
return Table.from_api_repr(api_response)
def update_dataset(self, dataset, fields, retry=DEFAULT_RETRY):
"""Change some fields of a dataset.
Use ``fields`` to specify which fields to update. At least one field
must be provided. If a field is listed in ``fields`` and is ``None`` in
``dataset``, it will be deleted.
If ``dataset.etag`` is not ``None``, the update will only
succeed if the dataset on the server has the same ETag. Thus
reading a dataset with ``get_dataset``, changing its fields,
and then passing it to ``update_dataset`` will ensure that the changes
will only be saved if no modifications to the dataset occurred
since the read.
Args:
dataset (google.cloud.bigquery.dataset.Dataset):
The dataset to update.
fields (Sequence[str]):
The properties of ``dataset`` to change (e.g. "friendly_name").
retry (google.api_core.retry.Retry, optional):
How to retry the RPC.
Returns:
google.cloud.bigquery.dataset.Dataset:
The modified ``Dataset`` instance.
"""
partial = dataset._build_resource(fields)
if dataset.etag is not None:
headers = {"If-Match": dataset.etag}
else:
headers = None
api_response = self._call_api(
retry, method="PATCH", path=dataset.path, data=partial, headers=headers
)
return Dataset.from_api_repr(api_response)
def update_table(self, table, fields, retry=DEFAULT_RETRY):
"""Change some fields of a table.
Use ``fields`` to specify which fields to update. At least one field
must be provided. If a field is listed in ``fields`` and is ``None``
in ``table``, it will be deleted.
If ``table.etag`` is not ``None``, the update will only succeed if
the table on the server has the same ETag. Thus reading a table with
``get_table``, changing its fields, and then passing it to
``update_table`` will ensure that the changes will only be saved if
no modifications to the table occurred since the read.
Args:
table (google.cloud.bigquery.table.Table): The table to update.
fields (Sequence[str]):
The fields of ``table`` to change, spelled as the Table
properties (e.g. "friendly_name").
retry (google.api_core.retry.Retry):
(Optional) A description of how to retry the API call.
Returns:
google.cloud.bigquery.table.Table:
The table resource returned from the API call.
"""
partial = table._build_resource(fields)
if table.etag is not None:
headers = {"If-Match": table.etag}
else:
headers = None
api_response = self._call_api(
retry, method="PATCH", path=table.path, data=partial, headers=headers
)
return Table.from_api_repr(api_response)
def list_tables(
self, dataset, max_results=None, page_token=None, retry=DEFAULT_RETRY
):
"""List tables in the dataset.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list
Args:
dataset (Union[ \
:class:`~google.cloud.bigquery.dataset.Dataset`, \
:class:`~google.cloud.bigquery.dataset.DatasetReference`, \
str, \
]):
A reference to the dataset whose tables to list from the
BigQuery API. If a string is passed in, this method attempts
to create a dataset reference from a string using
:func:`google.cloud.bigquery.dataset.DatasetReference.from_string`.
max_results (int):
(Optional) Maximum number of tables to return. If not passed,
defaults to a value set by the API.
page_token (str):
(Optional) Token representing a cursor into the tables. If
not passed, the API will return the first page of tables. The
token marks the beginning of the iterator to be returned and
the value of the ``page_token`` can be accessed at
``next_page_token`` of the
:class:`~google.api_core.page_iterator.HTTPIterator`.
retry (:class:`google.api_core.retry.Retry`):
(Optional) How to retry the RPC.
Returns:
google.api_core.page_iterator.Iterator:
Iterator of
:class:`~google.cloud.bigquery.table.TableListItem` contained
within the requested dataset.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)
if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset, DatasetReference, or string")
path = "%s/tables" % dataset.path
result = page_iterator.HTTPIterator(
client=self,
api_request=functools.partial(self._call_api, retry),
path=path,
item_to_value=_item_to_table,
items_key="tables",
page_token=page_token,
max_results=max_results,
)
result.dataset = dataset
return result
def delete_dataset(self, dataset, delete_contents=False, retry=DEFAULT_RETRY):
"""Delete a dataset.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/delete
Args
dataset (Union[ \
:class:`~google.cloud.bigquery.dataset.Dataset`, \
:class:`~google.cloud.bigquery.dataset.DatasetReference`, \
str, \
]):
A reference to the dataset to delete. If a string is passed
in, this method attempts to create a dataset reference from a
string using
:func:`google.cloud.bigquery.dataset.DatasetReference.from_string`.
retry (:class:`google.api_core.retry.Retry`):
(Optional) How to retry the RPC.
delete_contents (boolean):
(Optional) If True, delete all the tables in the dataset. If
False and the dataset contains tables, the request will fail.
Default is False.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)
if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset or a DatasetReference")
params = {}
if delete_contents:
params["deleteContents"] = "true"
self._call_api(retry, method="DELETE", path=dataset.path, query_params=params)
def delete_table(self, table, retry=DEFAULT_RETRY):
"""Delete a table
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/delete
Args:
table (Union[ \
:class:`~google.cloud.bigquery.table.Table`, \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
A reference to the table to delete. If a string is passed in,
this method attempts to create a table reference from a
string using
:func:`google.cloud.bigquery.table.TableReference.from_string`.
retry (:class:`google.api_core.retry.Retry`):
(Optional) How to retry the RPC.
"""
if isinstance(table, str):
table = TableReference.from_string(table, default_project=self.project)
if not isinstance(table, (Table, TableReference)):
raise TypeError("table must be a Table or a TableReference")
self._call_api(retry, method="DELETE", path=table.path)
def _get_query_results(
self, job_id, retry, project=None, timeout_ms=None, location=None
):
"""Get the query results object for a query job.
Arguments:
job_id (str): Name of the query job.
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
project (str):
(Optional) project ID for the query job (defaults to the
project of the client).
timeout_ms (int):
(Optional) number of milliseconds the the API call should
wait for the query to complete before the request times out.
location (str): Location of the query job.
Returns:
google.cloud.bigquery.query._QueryResults:
A new ``_QueryResults`` instance.
"""
extra_params = {"maxResults": 0}
if project is None:
project = self.project
if timeout_ms is not None:
extra_params["timeoutMs"] = timeout_ms
if location is None:
location = self.location
if location is not None:
extra_params["location"] = location
path = "/projects/{}/queries/{}".format(project, job_id)
# This call is typically made in a polling loop that checks whether the
# job is complete (from QueryJob.done(), called ultimately from
# QueryJob.result()). So we don't need to poll here.
resource = self._call_api(
retry, method="GET", path=path, query_params=extra_params
)
return _QueryResults.from_api_repr(resource)
def job_from_resource(self, resource):
"""Detect correct job type from resource and instantiate.
:type resource: dict
:param resource: one job resource from API response
:rtype: One of:
:class:`google.cloud.bigquery.job.LoadJob`,
:class:`google.cloud.bigquery.job.CopyJob`,
:class:`google.cloud.bigquery.job.ExtractJob`,
or :class:`google.cloud.bigquery.job.QueryJob`
:returns: the job instance, constructed via the resource
"""
config = resource.get("configuration", {})
if "load" in config:
return job.LoadJob.from_api_repr(resource, self)
elif "copy" in config:
return job.CopyJob.from_api_repr(resource, self)
elif "extract" in config:
return job.ExtractJob.from_api_repr(resource, self)
elif "query" in config:
return job.QueryJob.from_api_repr(resource, self)
return job.UnknownJob.from_api_repr(resource, self)
def get_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY):
"""Fetch a job for the project associated with this client.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get
Arguments:
job_id (str): Unique job identifier.
Keyword Arguments:
project (str):
(Optional) ID of the project which ownsthe job (defaults to
the client's project).
location (str): Location where the job was run.
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
Returns:
Union[google.cloud.bigquery.job.LoadJob, \
google.cloud.bigquery.job.CopyJob, \
google.cloud.bigquery.job.ExtractJob, \
google.cloud.bigquery.job.QueryJob]:
Job instance, based on the resource returned by the API.
"""
extra_params = {"projection": "full"}
if project is None:
project = self.project
if location is None:
location = self.location
if location is not None:
extra_params["location"] = location
path = "/projects/{}/jobs/{}".format(project, job_id)
resource = self._call_api(
retry, method="GET", path=path, query_params=extra_params
)
return self.job_from_resource(resource)
def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY):
"""Attempt to cancel a job from a job ID.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel
Arguments:
job_id (str): Unique job identifier.
Keyword Arguments:
project (str):
(Optional) ID of the project which owns the job (defaults to
the client's project).
location (str): Location where the job was run.
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
Returns:
Union[google.cloud.bigquery.job.LoadJob, \
google.cloud.bigquery.job.CopyJob, \
google.cloud.bigquery.job.ExtractJob, \
google.cloud.bigquery.job.QueryJob]:
Job instance, based on the resource returned by the API.
"""
extra_params = {"projection": "full"}
if project is None:
project = self.project
if location is None:
location = self.location
if location is not None:
extra_params["location"] = location
path = "/projects/{}/jobs/{}/cancel".format(project, job_id)
resource = self._call_api(
retry, method="POST", path=path, query_params=extra_params
)
return self.job_from_resource(resource["job"])
def list_jobs(
self,
project=None,
max_results=None,
page_token=None,
all_users=None,
state_filter=None,
retry=DEFAULT_RETRY,
min_creation_time=None,
max_creation_time=None,
):
"""List jobs for the project associated with this client.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list
Args:
project (str, optional):
Project ID to use for retreiving datasets. Defaults
to the client's project.
max_results (int, optional):
Maximum number of jobs to return.
page_token (str, optional):
Opaque marker for the next "page" of jobs. If not
passed, the API will return the first page of jobs. The token
marks the beginning of the iterator to be returned and the
value of the ``page_token`` can be accessed at
``next_page_token`` of
:class:`~google.api_core.page_iterator.HTTPIterator`.
all_users (bool, optional):
If true, include jobs owned by all users in the project.
Defaults to :data:`False`.
state_filter (str, optional):
If set, include only jobs matching the given state. One of:
* ``"done"``
* ``"pending"``
* ``"running"``
retry (google.api_core.retry.Retry, optional):
How to retry the RPC.
min_creation_time (datetime.datetime, optional):
Min value for job creation time. If set, only jobs created
after or at this timestamp are returned. If the datetime has
no time zone assumes UTC time.
max_creation_time (datetime.datetime, optional):
Max value for job creation time. If set, only jobs created
before or at this timestamp are returned. If the datetime has
no time zone assumes UTC time.
Returns:
google.api_core.page_iterator.Iterator:
Iterable of job instances.
"""
extra_params = {
"allUsers": all_users,
"stateFilter": state_filter,
"minCreationTime": _str_or_none(
google.cloud._helpers._millis_from_datetime(min_creation_time)
),
"maxCreationTime": _str_or_none(
google.cloud._helpers._millis_from_datetime(max_creation_time)
),
"projection": "full",
}
extra_params = {
param: value for param, value in extra_params.items() if value is not None
}
if project is None:
project = self.project
path = "/projects/%s/jobs" % (project,)
return page_iterator.HTTPIterator(
client=self,
api_request=functools.partial(self._call_api, retry),
path=path,
item_to_value=_item_to_job,
items_key="jobs",
page_token=page_token,
max_results=max_results,
extra_params=extra_params,
)
def load_table_from_uri(
self,
source_uris,
destination,
job_id=None,
job_id_prefix=None,
location=None,
project=None,
job_config=None,
retry=DEFAULT_RETRY,
):
"""Starts a job for loading data into a table from CloudStorage.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load
Arguments:
source_uris (Union[str, Sequence[str]]):
URIs of data files to be loaded; in format
``gs://<bucket_name>/<object_name_or_glob>``.
destination (Union[ \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
Table into which data is to be loaded. If a string is passed
in, this method attempts to create a table reference from a
string using
:func:`google.cloud.bigquery.table.TableReference.from_string`.
Keyword Arguments:
job_id (str): (Optional) Name of the job.
job_id_prefix (str):
(Optional) the user-provided prefix for a randomly generated
job ID. This parameter will be ignored if a ``job_id`` is
also given.
location (str):
Location where to run the job. Must match the location of the
destination table.
project (str):
Project ID of the project of where to run the job. Defaults
to the client's project.
job_config (google.cloud.bigquery.job.LoadJobConfig):
(Optional) Extra configuration options for the job.
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
Returns:
google.cloud.bigquery.job.LoadJob: A new load job.
"""
job_id = _make_job_id(job_id, job_id_prefix)
if project is None:
project = self.project
if location is None:
location = self.location
job_ref = job._JobReference(job_id, project=project, location=location)
if isinstance(source_uris, six.string_types):
source_uris = [source_uris]
if isinstance(destination, str):
destination = TableReference.from_string(
destination, default_project=self.project
)
load_job = job.LoadJob(job_ref, source_uris, destination, self, job_config)
load_job._begin(retry=retry)
return load_job
def load_table_from_file(
self,
file_obj,
destination,
rewind=False,
size=None,
num_retries=_DEFAULT_NUM_RETRIES,
job_id=None,
job_id_prefix=None,
location=None,
project=None,
job_config=None,
):
"""Upload the contents of this table from a file-like object.
Similar to :meth:`load_table_from_uri`, this method creates, starts and
returns a :class:`~google.cloud.bigquery.job.LoadJob`.
Arguments:
file_obj (file): A file handle opened in binary mode for reading.
destination (Union[ \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
Table into which data is to be loaded. If a string is passed
in, this method attempts to create a table reference from a
string using
:func:`google.cloud.bigquery.table.TableReference.from_string`.
Keyword Arguments:
rewind (bool):
If True, seek to the beginning of the file handle before
reading the file.
size (int):
The number of bytes to read from the file handle. If size is
``None`` or large, resumable upload will be used. Otherwise,
multipart upload will be used.
num_retries (int): Number of upload retries. Defaults to 6.
job_id (str): (Optional) Name of the job.
job_id_prefix (str):
(Optional) the user-provided prefix for a randomly generated
job ID. This parameter will be ignored if a ``job_id`` is
also given.
location (str):
Location where to run the job. Must match the location of the
destination table.
project (str):
Project ID of the project of where to run the job. Defaults
to the client's project.
job_config (google.cloud.bigquery.job.LoadJobConfig):
(Optional) Extra configuration options for the job.
Returns:
google.cloud.bigquery.job.LoadJob: A new load job.
Raises:
ValueError:
If ``size`` is not passed in and can not be determined, or if
the ``file_obj`` can be detected to be a file opened in text
mode.
"""
job_id = _make_job_id(job_id, job_id_prefix)
if project is None:
project = self.project
if location is None:
location = self.location
if isinstance(destination, str):
destination = TableReference.from_string(
destination, default_project=self.project
)
job_ref = job._JobReference(job_id, project=project, location=location)
load_job = job.LoadJob(job_ref, None, destination, self, job_config)
job_resource = load_job.to_api_repr()
if rewind:
file_obj.seek(0, os.SEEK_SET)
_check_mode(file_obj)
try:
if size is None or size >= _MAX_MULTIPART_SIZE:
response = self._do_resumable_upload(
file_obj, job_resource, num_retries
)
else:
response = self._do_multipart_upload(
file_obj, job_resource, size, num_retries
)
except resumable_media.InvalidResponse as exc:
raise exceptions.from_http_response(exc.response)
return self.job_from_resource(response.json())
def load_table_from_dataframe(
self,
dataframe,
destination,
num_retries=_DEFAULT_NUM_RETRIES,
job_id=None,
job_id_prefix=None,
location=None,
project=None,
job_config=None,
):
"""Upload the contents of a table from a pandas DataFrame.
Similar to :meth:`load_table_from_uri`, this method creates, starts and
returns a :class:`~google.cloud.bigquery.job.LoadJob`.
Arguments:
dataframe (pandas.DataFrame):
A :class:`~pandas.DataFrame` containing the data to load.
destination (google.cloud.bigquery.table.TableReference):
The destination table to use for loading the data. If it is an
existing table, the schema of the :class:`~pandas.DataFrame`
must match the schema of the destination table. If the table
does not yet exist, the schema is inferred from the
:class:`~pandas.DataFrame`.
If a string is passed in, this method attempts to create a
table reference from a string using
:func:`google.cloud.bigquery.table.TableReference.from_string`.
Keyword Arguments:
num_retries (int, optional): Number of upload retries.
job_id (str, optional): Name of the job.
job_id_prefix (str, optional):
The user-provided prefix for a randomly generated
job ID. This parameter will be ignored if a ``job_id`` is
also given.
location (str):
Location where to run the job. Must match the location of the
destination table.
project (str, optional):
Project ID of the project of where to run the job. Defaults
to the client's project.
job_config (google.cloud.bigquery.job.LoadJobConfig, optional):
Extra configuration options for the job.
Returns:
google.cloud.bigquery.job.LoadJob: A new load job.
Raises:
ImportError:
If a usable parquet engine cannot be found. This method
requires :mod:`pyarrow` to be installed.
"""
buffer = six.BytesIO()
dataframe.to_parquet(buffer)
if job_config is None:
job_config = job.LoadJobConfig()
job_config.source_format = job.SourceFormat.PARQUET
if location is None:
location = self.location
return self.load_table_from_file(
buffer,
destination,
num_retries=num_retries,
rewind=True,
job_id=job_id,
job_id_prefix=job_id_prefix,
location=location,
project=project,
job_config=job_config,
)
def _do_resumable_upload(self, stream, metadata, num_retries):
"""Perform a resumable upload.
:type stream: IO[bytes]
:param stream: A bytes IO object open for reading.
:type metadata: dict
:param metadata: The metadata associated with the upload.
:type num_retries: int
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the final chunk
is uploaded.
"""
upload, transport = self._initiate_resumable_upload(
stream, metadata, num_retries
)
while not upload.finished:
response = upload.transmit_next_chunk(transport)
return response
def _initiate_resumable_upload(self, stream, metadata, num_retries):
"""Initiate a resumable upload.
:type stream: IO[bytes]
:param stream: A bytes IO object open for reading.
:type metadata: dict
:param metadata: The metadata associated with the upload.
:type num_retries: int
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:rtype: tuple
:returns:
Pair of
* The :class:`~google.resumable_media.requests.ResumableUpload`
that was created
* The ``transport`` used to initiate the upload.
"""
chunk_size = _DEFAULT_CHUNKSIZE
transport = self._http
headers = _get_upload_headers(self._connection.USER_AGENT)
upload_url = _RESUMABLE_URL_TEMPLATE.format(project=self.project)
# TODO: modify ResumableUpload to take a retry.Retry object
# that it can use for the initial RPC.
upload = ResumableUpload(upload_url, chunk_size, headers=headers)
if num_retries is not None:
upload._retry_strategy = resumable_media.RetryStrategy(
max_retries=num_retries
)
upload.initiate(
transport, stream, metadata, _GENERIC_CONTENT_TYPE, stream_final=False
)
return upload, transport
def _do_multipart_upload(self, stream, metadata, size, num_retries):
"""Perform a multipart upload.
:type stream: IO[bytes]
:param stream: A bytes IO object open for reading.
:type metadata: dict
:param metadata: The metadata associated with the upload.
:type size: int
:param size: The number of bytes to be uploaded (which will be read
from ``stream``). If not provided, the upload will be
concluded once ``stream`` is exhausted (or :data:`None`).
:type num_retries: int
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the multipart
upload request.
:raises: :exc:`ValueError` if the ``stream`` has fewer than ``size``
bytes remaining.
"""
data = stream.read(size)
if len(data) < size:
msg = _READ_LESS_THAN_SIZE.format(size, len(data))
raise ValueError(msg)
headers = _get_upload_headers(self._connection.USER_AGENT)
upload_url = _MULTIPART_URL_TEMPLATE.format(project=self.project)
upload = MultipartUpload(upload_url, headers=headers)
if num_retries is not None:
upload._retry_strategy = resumable_media.RetryStrategy(
max_retries=num_retries
)
response = upload.transmit(self._http, data, metadata, _GENERIC_CONTENT_TYPE)
return response
def copy_table(
self,
sources,
destination,
job_id=None,
job_id_prefix=None,
location=None,
project=None,
job_config=None,
retry=DEFAULT_RETRY,
):
"""Copy one or more tables to another table.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy
Arguments:
sources (Union[ \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
Sequence[ \
:class:`~google.cloud.bigquery.table.TableReference`], \
]):
Table or tables to be copied.
destination (Union[
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
Table into which data is to be copied.
Keyword Arguments:
job_id (str): (Optional) The ID of the job.
job_id_prefix (str)
(Optional) the user-provided prefix for a randomly generated
job ID. This parameter will be ignored if a ``job_id`` is
also given.
location (str):
Location where to run the job. Must match the location of any
source table as well as the destination table.
project (str):
Project ID of the project of where to run the job. Defaults
to the client's project.
job_config (google.cloud.bigquery.job.CopyJobConfig):
(Optional) Extra configuration options for the job.
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
Returns:
google.cloud.bigquery.job.CopyJob: A new copy job instance.
"""
job_id = _make_job_id(job_id, job_id_prefix)
if project is None:
project = self.project
if location is None:
location = self.location
job_ref = job._JobReference(job_id, project=project, location=location)
if isinstance(sources, str):
sources = TableReference.from_string(sources, default_project=self.project)
if isinstance(destination, str):
destination = TableReference.from_string(
destination, default_project=self.project
)
if not isinstance(sources, collections_abc.Sequence):
sources = [sources]
copy_job = job.CopyJob(
job_ref, sources, destination, client=self, job_config=job_config
)
copy_job._begin(retry=retry)
return copy_job
def extract_table(
self,
source,
destination_uris,
job_id=None,
job_id_prefix=None,
location=None,
project=None,
job_config=None,
retry=DEFAULT_RETRY,
):
"""Start a job to extract a table into Cloud Storage files.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract
Arguments:
source (Union[ \
:class:`google.cloud.bigquery.table.TableReference`, \
src, \
]):
Table to be extracted.
destination_uris (Union[str, Sequence[str]]):
URIs of Cloud Storage file(s) into which table data is to be
extracted; in format
``gs://<bucket_name>/<object_name_or_glob>``.
Keyword Arguments:
job_id (str): (Optional) The ID of the job.
job_id_prefix (str)
(Optional) the user-provided prefix for a randomly generated
job ID. This parameter will be ignored if a ``job_id`` is
also given.
location (str):
Location where to run the job. Must match the location of the
source table.
project (str):
Project ID of the project of where to run the job. Defaults
to the client's project.
job_config (google.cloud.bigquery.job.ExtractJobConfig):
(Optional) Extra configuration options for the job.
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
:type source: :class:`google.cloud.bigquery.table.TableReference`
:param source: table to be extracted.
Returns:
google.cloud.bigquery.job.ExtractJob: A new extract job instance.
"""
job_id = _make_job_id(job_id, job_id_prefix)
if project is None:
project = self.project
if location is None:
location = self.location
job_ref = job._JobReference(job_id, project=project, location=location)
if isinstance(source, str):
source = TableReference.from_string(source, default_project=self.project)
if isinstance(destination_uris, six.string_types):
destination_uris = [destination_uris]
extract_job = job.ExtractJob(
job_ref, source, destination_uris, client=self, job_config=job_config
)
extract_job._begin(retry=retry)
return extract_job
def query(
self,
query,
job_config=None,
job_id=None,
job_id_prefix=None,
location=None,
project=None,
retry=DEFAULT_RETRY,
):
"""Run a SQL query.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query
Arguments:
query (str):
SQL query to be executed. Defaults to the standard SQL
dialect. Use the ``job_config`` parameter to change dialects.
Keyword Arguments:
job_config (google.cloud.bigquery.job.QueryJobConfig):
(Optional) Extra configuration options for the job.
To override any options that were previously set in
the ``default_query_job_config`` given to the
``Client`` constructor, manually set those options to ``None``,
or whatever value is preferred.
job_id (str): (Optional) ID to use for the query job.
job_id_prefix (str):
(Optional) The prefix to use for a randomly generated job ID.
This parameter will be ignored if a ``job_id`` is also given.
location (str):
Location where to run the job. Must match the location of the
any table used in the query as well as the destination table.
project (str):
Project ID of the project of where to run the job. Defaults
to the client's project.
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
Returns:
google.cloud.bigquery.job.QueryJob: A new query job instance.
"""
job_id = _make_job_id(job_id, job_id_prefix)
if project is None:
project = self.project
if location is None:
location = self.location
if self._default_query_job_config:
if job_config:
# anything that's not defined on the incoming
# that is in the default,
# should be filled in with the default
# the incoming therefore has precedence
job_config = job_config._fill_from_default(
self._default_query_job_config
)
else:
job_config = self._default_query_job_config
job_ref = job._JobReference(job_id, project=project, location=location)
query_job = job.QueryJob(job_ref, query, client=self, job_config=job_config)
query_job._begin(retry=retry)
return query_job
def insert_rows(self, table, rows, selected_fields=None, **kwargs):
"""Insert rows into a table via the streaming API.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll
Args:
table (Union[ \
:class:`~google.cloud.bigquery.table.Table`, \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
The destination table for the row data, or a reference to it.
rows (Union[ \
Sequence[Tuple], \
Sequence[dict], \
]):
Row data to be inserted. If a list of tuples is given, each
tuple should contain data for each schema field on the
current table and in the same order as the schema fields. If
a list of dictionaries is given, the keys must include all
required fields in the schema. Keys which do not correspond
to a field in the schema are ignored.
selected_fields (Sequence[ \
:class:`~google.cloud.bigquery.schema.SchemaField`, \
]):
The fields to return. Required if ``table`` is a
:class:`~google.cloud.bigquery.table.TableReference`.
kwargs (dict):
Keyword arguments to
:meth:`~google.cloud.bigquery.client.Client.insert_rows_json`.
Returns:
Sequence[Mappings]:
One mapping per row with insert errors: the "index" key
identifies the row, and the "errors" key contains a list of
the mappings describing one or more problems with the row.
Raises:
ValueError: if table's schema is not set
"""
if isinstance(table, str):
table = TableReference.from_string(table, default_project=self.project)
if selected_fields is not None:
schema = selected_fields
elif isinstance(table, TableReference):
raise ValueError("need selected_fields with TableReference")
elif isinstance(table, Table):
if len(table.schema) == 0:
raise ValueError(_TABLE_HAS_NO_SCHEMA)
schema = table.schema
else:
raise TypeError("table should be Table or TableReference")
json_rows = []
for index, row in enumerate(rows):
if isinstance(row, dict):
row = _row_from_mapping(row, schema)
json_row = {}
for field, value in zip(schema, row):
converter = _SCALAR_VALUE_TO_JSON_ROW.get(field.field_type)
if converter is not None: # STRING doesn't need converting
value = converter(value)
json_row[field.name] = value
json_rows.append(json_row)
return self.insert_rows_json(table, json_rows, **kwargs)
def insert_rows_json(
self,
table,
json_rows,
row_ids=None,
skip_invalid_rows=None,
ignore_unknown_values=None,
template_suffix=None,
retry=DEFAULT_RETRY,
):
"""Insert rows into a table without applying local type conversions.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll
table (Union[ \
:class:`~google.cloud.bigquery.table.Table` \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
The destination table for the row data, or a reference to it.
json_rows (Sequence[dict]):
Row data to be inserted. Keys must match the table schema fields
and values must be JSON-compatible representations.
row_ids (Sequence[str]):
(Optional) Unique ids, one per row being inserted. If omitted,
unique IDs are created.
skip_invalid_rows (bool):
(Optional) Insert all valid rows of a request, even if invalid
rows exist. The default value is False, which causes the entire
request to fail if any invalid rows exist.
ignore_unknown_values (bool):
(Optional) Accept rows that contain values that do not match the
schema. The unknown values are ignored. Default is False, which
treats unknown values as errors.
template_suffix (str):
(Optional) treat ``name`` as a template table and provide a suffix.
BigQuery will create the table ``<name> + <template_suffix>`` based
on the schema of the template table. See
https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables
retry (:class:`google.api_core.retry.Retry`):
(Optional) How to retry the RPC.
Returns:
Sequence[Mappings]:
One mapping per row with insert errors: the "index" key
identifies the row, and the "errors" key contains a list of
the mappings describing one or more problems with the row.
"""
if isinstance(table, str):
table = TableReference.from_string(table, default_project=self.project)
rows_info = []
data = {"rows": rows_info}
for index, row in enumerate(json_rows):
info = {"json": row}
if row_ids is not None:
info["insertId"] = row_ids[index]
else:
info["insertId"] = str(uuid.uuid4())
rows_info.append(info)
if skip_invalid_rows is not None:
data["skipInvalidRows"] = skip_invalid_rows
if ignore_unknown_values is not None:
data["ignoreUnknownValues"] = ignore_unknown_values
if template_suffix is not None:
data["templateSuffix"] = template_suffix
# We can always retry, because every row has an insert ID.
response = self._call_api(
retry, method="POST", path="%s/insertAll" % table.path, data=data
)
errors = []
for error in response.get("insertErrors", ()):
errors.append({"index": int(error["index"]), "errors": error["errors"]})
return errors
def list_partitions(self, table, retry=DEFAULT_RETRY):
"""List the partitions in a table.
Arguments:
table (Union[ \
:class:`~google.cloud.bigquery.table.Table`, \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
The table or reference from which to get partition info
retry (google.api_core.retry.Retry):
(Optional) How to retry the RPC.
Returns:
List[str]:
A list of the partition ids present in the partitioned table
"""
if isinstance(table, str):
table = TableReference.from_string(table, default_project=self.project)
meta_table = self.get_table(
TableReference(
self.dataset(table.dataset_id, project=table.project),
"%s$__PARTITIONS_SUMMARY__" % table.table_id,
)
)
subset = [col for col in meta_table.schema if col.name == "partition_id"]
return [
row[0]
for row in self.list_rows(meta_table, selected_fields=subset, retry=retry)
]
def list_rows(
self,
table,
selected_fields=None,
max_results=None,
page_token=None,
start_index=None,
page_size=None,
retry=DEFAULT_RETRY,
):
"""List the rows of the table.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/list
.. note::
This method assumes that the provided schema is up-to-date with the
schema as defined on the back-end: if the two schemas are not
identical, the values returned may be incomplete. To ensure that the
local copy of the schema is up-to-date, call ``client.get_table``.
Args:
table (Union[ \
:class:`~google.cloud.bigquery.table.Table`, \
:class:`~google.cloud.bigquery.table.TableReference`, \
str, \
]):
The table to list, or a reference to it.
selected_fields (Sequence[ \
:class:`~google.cloud.bigquery.schema.SchemaField` \
]):
The fields to return. Required if ``table`` is a
:class:`~google.cloud.bigquery.table.TableReference`.
max_results (int):
(Optional) maximum number of rows to return.
page_token (str):
(Optional) Token representing a cursor into the table's rows.
If not passed, the API will return the first page of the
rows. The token marks the beginning of the iterator to be
returned and the value of the ``page_token`` can be accessed
at ``next_page_token`` of the
:class:`~google.cloud.bigquery.table.RowIterator`.
start_index (int):
(Optional) The zero-based index of the starting row to read.
page_size (int):
(Optional) The maximum number of items to return per page in
the iterator.
retry (:class:`google.api_core.retry.Retry`):
(Optional) How to retry the RPC.
Returns:
google.cloud.bigquery.table.RowIterator:
Iterator of row data
:class:`~google.cloud.bigquery.table.Row`-s. During each
page, the iterator will have the ``total_rows`` attribute
set, which counts the total number of rows **in the table**
(this is distinct from the total number of rows in the
current page: ``iterator.page.num_items``).
"""
if isinstance(table, str):
table = TableReference.from_string(table, default_project=self.project)
if selected_fields is not None:
schema = selected_fields
elif isinstance(table, TableReference):
raise ValueError("need selected_fields with TableReference")
elif isinstance(table, Table):
if len(table.schema) == 0 and table.created is None:
raise ValueError(_TABLE_HAS_NO_SCHEMA)
schema = table.schema
else:
raise TypeError("table should be Table or TableReference")
params = {}
if selected_fields is not None:
params["selectedFields"] = ",".join(field.name for field in selected_fields)
if start_index is not None:
params["startIndex"] = start_index
row_iterator = RowIterator(
client=self,
api_request=functools.partial(self._call_api, retry),
path="%s/data" % (table.path,),
schema=schema,
page_token=page_token,
max_results=max_results,
page_size=page_size,
extra_params=params,
)
return row_iterator
# pylint: disable=unused-argument
def _item_to_project(iterator, resource):
"""Convert a JSON project to the native object.
:type iterator: :class:`~google.api_core.page_iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a project.
:rtype: :class:`.Project`
:returns: The next project in the page.
"""
return Project.from_api_repr(resource)
# pylint: enable=unused-argument
def _item_to_dataset(iterator, resource):
"""Convert a JSON dataset to the native object.
:type iterator: :class:`~google.api_core.page_iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a dataset.
:rtype: :class:`.DatasetListItem`
:returns: The next dataset in the page.
"""
return DatasetListItem(resource)
def _item_to_job(iterator, resource):
"""Convert a JSON job to the native object.
:type iterator: :class:`~google.api_core.page_iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a job.
:rtype: job instance.
:returns: The next job in the page.
"""
return iterator.client.job_from_resource(resource)
def _item_to_table(iterator, resource):
"""Convert a JSON table to the native object.
:type iterator: :class:`~google.api_core.page_iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a table.
:rtype: :class:`~google.cloud.bigquery.table.Table`
:returns: The next table in the page.
"""
return TableListItem(resource)
def _make_job_id(job_id, prefix=None):
"""Construct an ID for a new job.
:type job_id: str or ``NoneType``
:param job_id: the user-provided job ID
:type prefix: str or ``NoneType``
:param prefix: (Optional) the user-provided prefix for a job ID
:rtype: str
:returns: A job ID
"""
if job_id is not None:
return job_id
elif prefix is not None:
return str(prefix) + str(uuid.uuid4())
else:
return str(uuid.uuid4())
def _check_mode(stream):
"""Check that a stream was opened in read-binary mode.
:type stream: IO[bytes]
:param stream: A bytes IO object open for reading.
:raises: :exc:`ValueError` if the ``stream.mode`` is a valid attribute
and is not among ``rb``, ``r+b`` or ``rb+``.
"""
mode = getattr(stream, "mode", None)
if isinstance(stream, gzip.GzipFile):
if mode != gzip.READ:
raise ValueError(
"Cannot upload gzip files opened in write mode: use "
"gzip.GzipFile(filename, mode='rb')"
)
else:
if mode is not None and mode not in ("rb", "r+b", "rb+"):
raise ValueError(
"Cannot upload files opened in text mode: use "
"open(filename, mode='rb') or open(filename, mode='r+b')"
)
def _get_upload_headers(user_agent):
"""Get the headers for an upload request.
:type user_agent: str
:param user_agent: The user-agent for requests.
:rtype: dict
:returns: The headers to be used for the request.
"""
return {
"Accept": "application/json",
"Accept-Encoding": "gzip, deflate",
"User-Agent": user_agent,
"content-type": "application/json",
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
'''
This is a tool for that makes it easy to understand what a given KojiPkgSpec
syntax will expand to.
The main use case is making sure the packages specified in a KojiInstaller
will match the packages you intended to install.
'''
import sys, optparse
import common
from virttest import utils_koji, cartesian_config
class OptionParser(optparse.OptionParser):
'''
KojiPkgSpec App option parser
'''
def __init__(self):
optparse.OptionParser.__init__(self,
usage=('Usage: %prog [options] '
'[koji-pkg-spec]'))
general = optparse.OptionGroup(self, 'GENERAL OPTIONS')
general.add_option('-a', '--arch', dest='arch', default='x86_64',
help=('architecture of packages to list, together '
'with "noarch". defaults to "x86_64"'))
general.add_option('-t', '--tag', dest='tag', help='default koji tag')
self.add_option_group(general)
cartesian_config = optparse.OptionGroup(self, 'CARTESIAN CONFIG')
cartesian_config.add_option('-c', '--config', dest='config',
help=('use a cartesian configuration file '
'for fetching package values'))
self.add_option_group(cartesian_config)
class App:
'''
KojiPkgSpec app
'''
def __init__(self):
self.opt_parser = OptionParser()
def usage(self):
self.opt_parser.print_help()
sys.exit(1)
def parse_cmdline(self):
self.options, self.args = self.opt_parser.parse_args()
# Check for a control file if not in prebuild mode.
if (len(self.args) < 1) and not self.options.config:
print "Missing Package Specification!"
self.usage()
def get_koji_qemu_kvm_tag_pkgs(self, config_file):
tag = None
pkgs = None
parser = cartesian_config.Parser(config_file)
for d in parser.get_dicts():
if tag is not None and pkgs is not None:
break
if d.has_key('koji_qemu_kvm_tag'):
if tag is None:
tag = d.get('koji_qemu_kvm_tag')
if d.has_key('koji_qemu_kvm_pkgs'):
if pkgs is None:
pkgs = d.get('koji_qemu_kvm_pkgs')
return (tag, pkgs)
def check_koji_pkg_spec(self, koji_pkg_spec):
if not koji_pkg_spec.is_valid():
print 'ERROR:', koji_pkg_spec.describe_invalid()
sys.exit(-1)
def print_koji_pkg_spec_info(self, koji_pkg_spec):
info = self.koji_client.get_pkg_info(koji_pkg_spec)
if not info:
print 'ERROR: could not find info about "%s"' % koji_pkg_spec.to_text()
return
name = info.get('name', 'unknown')
pkgs = self.koji_client.get_pkg_rpm_file_names(koji_pkg_spec,
arch=self.options.arch)
print 'Package name: %s' % name
print 'Package files:'
for p in pkgs:
print '\t* %s' % p
print
def main(self):
self.parse_cmdline()
self.koji_client = utils_koji.KojiClient()
pkgs = []
if self.options.tag:
utils_koji.set_default_koji_tag(self.options.tag)
if self.options.config:
tag, pkgs = self.get_koji_qemu_kvm_tag_pkgs(self.options.config)
if tag is not None:
utils_koji.set_default_koji_tag(tag)
if pkgs is not None:
pkgs = pkgs.split()
else:
pkgs = self.args
if pkgs:
for p in pkgs:
koji_pkg_spec = utils_koji.KojiPkgSpec(p)
self.check_koji_pkg_spec(koji_pkg_spec)
self.print_koji_pkg_spec_info(koji_pkg_spec)
if __name__ == '__main__':
app = App()
app.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.conf.urls import patterns
from bedrock.mozorg.util import page
urlpatterns = patterns('',
page('', 'foundation/index.html'),
page('about', 'foundation/about.html'),
# Older annual report financial faqs - these are linked from blog posts
# was e.g.: http://www.mozilla.org/foundation/documents/mozilla-2008-financial-faq.html
page('documents/mozilla-2006-financial-faq', 'foundation/documents/mozilla-2006-financial-faq.html'),
page('documents/mozilla-2007-financial-faq', 'foundation/documents/mozilla-2007-financial-faq.html'),
page('documents/mozilla-2008-financial-faq', 'foundation/documents/mozilla-2008-financial-faq.html'),
# ported from PHP in Bug 960689
page('documents/bylaws-amendment-1', 'foundation/documents/bylaws-amendment-1.html'),
page('documents/bylaws-amendment-2', 'foundation/documents/bylaws-amendment-2.html'),
page('documents/articles-of-incorporation', 'foundation/documents/articles-of-incorporation.html'),
page('documents/articles-of-incorporation/amendment', 'foundation/documents/articles-of-incorporation-amendment.html'),
page('documents/bylaws', 'foundation/documents/bylaws.html'),
# was https://www.mozilla.org/foundation/annualreport/2009/
page('annualreport/2009', 'foundation/annualreport/2009/index.html'),
# was .html
page('annualreport/2009/a-competitive-world', 'foundation/annualreport/2009/a-competitive-world.html'),
# was .html
page('annualreport/2009/broadening-our-scope', 'foundation/annualreport/2009/broadening-our-scope.html'),
# was .html
page('annualreport/2009/sustainability', 'foundation/annualreport/2009/sustainability.html'),
# was https://www.mozilla.org/foundation/annualreport/2009/faq.html
# changing to https://www.mozilla.org/foundation/annualreport/2009/faq/
page('annualreport/2009/faq', 'foundation/annualreport/2009/faq.html'),
page('annualreport/2010', 'foundation/annualreport/2010/index.html'),
page('annualreport/2010/ahead', 'foundation/annualreport/2010/ahead.html'),
page('annualreport/2010/opportunities', 'foundation/annualreport/2010/opportunities.html'),
page('annualreport/2010/people', 'foundation/annualreport/2010/people.html'),
page('annualreport/2010/faq', 'foundation/annualreport/2010/faq.html'),
page('annualreport/2011', 'foundation/annualreport/2011.html'),
page('annualreport/2011/faq', 'foundation/annualreport/2011faq.html'),
page('annualreport/2012', 'foundation/annualreport/2012/index.html'),
page('annualreport/2012/faq', 'foundation/annualreport/2012/faq.html'),
page('annualreport/2013', 'foundation/annualreport/2013/index.html'),
page('annualreport/2013/faq', 'foundation/annualreport/2013/faq.html'),
page('feed-icon-guidelines', 'foundation/feed-icon-guidelines/index.html'),
page('feed-icon-guidelines/faq', 'foundation/feed-icon-guidelines/faq.html'),
page('licensing', 'foundation/licensing.html'),
page('licensing/website-content', 'foundation/licensing/website-content.html'),
page('licensing/website-markup', 'foundation/licensing/website-markup.html'),
page('licensing/binary-components', 'foundation/licensing/binary-components/index.html'),
page('licensing/binary-components/rationale', 'foundation/licensing/binary-components/rationale.html'),
page('moco', 'foundation/moco.html'),
page('mocosc', 'foundation/mocosc.html'),
page('openwebfund/more', 'foundation/openwebfund/more.html'),
page('openwebfund/thanks', 'foundation/openwebfund/thanks.html'),
page('trademarks', 'foundation/trademarks/index.html'),
page('trademarks/policy', 'foundation/trademarks/policy.html'),
page('trademarks/list', 'foundation/trademarks/list.html'),
page('trademarks/faq', 'foundation/trademarks/faq.html'),
page('trademarks/l10n-website-policy', 'foundation/trademarks/l10n-website-policy.html'),
page('trademarks/distribution-policy', 'foundation/trademarks/distribution-policy.html'),
page('trademarks/community-edition-permitted-changes', 'foundation/trademarks/community-edition-permitted-changes.html'),
page('trademarks/community-edition-policy', 'foundation/trademarks/community-edition-policy.html'),
page('trademarks/poweredby/faq', 'foundation/trademarks/poweredby/faq.html'),
# documents
page('documents', 'foundation/documents/index.html'),
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#include "test/jemalloc_test.h"
#ifdef _WIN32
void
thd_create(thd_t *thd, void *(*proc)(void *), void *arg) {
LPTHREAD_START_ROUTINE routine = (LPTHREAD_START_ROUTINE)proc;
*thd = CreateThread(NULL, 0, routine, arg, 0, NULL);
if (*thd == NULL) {
test_fail("Error in CreateThread()\n");
}
}
void
thd_join(thd_t thd, void **ret) {
if (WaitForSingleObject(thd, INFINITE) == WAIT_OBJECT_0 && ret) {
DWORD exit_code;
GetExitCodeThread(thd, (LPDWORD) &exit_code);
*ret = (void *)(uintptr_t)exit_code;
}
}
#else
void
thd_create(thd_t *thd, void *(*proc)(void *), void *arg) {
if (pthread_create(thd, NULL, proc, arg) != 0) {
test_fail("Error in pthread_create()\n");
}
}
void
thd_join(thd_t thd, void **ret) {
pthread_join(thd, ret);
}
#endif
|
c
|
github
|
https://github.com/redis/redis
|
deps/jemalloc/test/src/thd.c
|
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.cache;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Supplier;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListenableFutureTask;
import java.io.Serializable;
import java.util.Map;
import java.util.concurrent.Executor;
/**
* Computes or retrieves values, based on a key, for use in populating a {@link LoadingCache}.
*
* <p>Most implementations will only need to implement {@link #load}. Other methods may be
* overridden as desired.
*
* <p>Usage example:
*
* {@snippet :
* CacheLoader<Key, Graph> loader = new CacheLoader<Key, Graph>() {
* public Graph load(Key key) throws AnyException {
* return createExpensiveGraph(key);
* }
* };
* LoadingCache<Key, Graph> cache = CacheBuilder.newBuilder().build(loader);
* }
*
* <p>Since this example doesn't support reloading or bulk loading, if you're able to use lambda
* expressions it can be specified even more easily:
*
* {@snippet :
* CacheLoader<Key, Graph> loader = CacheLoader.from(key -> createExpensiveGraph(key));
* }
*
* @author Charles Fry
* @since 10.0
*/
@GwtCompatible
public abstract class CacheLoader<K, V> {
/** Constructor for use by subclasses. */
protected CacheLoader() {}
/**
* Computes or retrieves the value corresponding to {@code key}.
*
* @param key the non-null key whose value should be loaded
* @return the value associated with {@code key}; <b>must not be null</b>
* @throws Exception if unable to load the result
* @throws InterruptedException if this method is interrupted. {@code InterruptedException} is
* treated like any other {@code Exception} in all respects except that, when it is caught,
* the thread's interrupted status is set
*/
public abstract V load(K key) throws Exception;
/**
* Computes or retrieves a replacement value corresponding to an already-cached {@code key}. This
* method is called when an existing cache entry is refreshed by {@link
* CacheBuilder#refreshAfterWrite}, or through a call to {@link LoadingCache#refresh}.
*
* <p>This implementation synchronously delegates to {@link #load}. It is recommended that it be
* overridden with an asynchronous implementation when using {@link
* CacheBuilder#refreshAfterWrite}.
*
* <p><b>Note:</b> <i>all exceptions thrown by this method will be logged and then swallowed</i>.
*
* @param key the non-null key whose value should be loaded
* @param oldValue the non-null old value corresponding to {@code key}
* @return the future new value associated with {@code key}; <b>must not be null, must not return
* null</b>
* @throws Exception if unable to reload the result
* @throws InterruptedException if this method is interrupted. {@code InterruptedException} is
* treated like any other {@code Exception} in all respects except that, when it is caught,
* the thread's interrupted status is set
* @since 11.0
*/
@GwtIncompatible // Futures
public ListenableFuture<V> reload(K key, V oldValue) throws Exception {
checkNotNull(key);
checkNotNull(oldValue);
return immediateFuture(load(key));
}
/**
* Computes or retrieves the values corresponding to {@code keys}. This method is called by {@link
* LoadingCache#getAll}.
*
* <p>If the returned map doesn't contain all requested {@code keys} then the entries it does
* contain will be cached, but {@code getAll} will throw an exception. If the returned map
* contains extra keys not present in {@code keys} then all returned entries will be cached, but
* only the entries for {@code keys} will be returned from {@code getAll}.
*
* <p>This method should be overridden when bulk retrieval is significantly more efficient than
* many individual lookups. Note that {@link LoadingCache#getAll} will defer to individual calls
* to {@link LoadingCache#get} if this method is not overridden.
*
* @param keys the unique, non-null keys whose values should be loaded
* @return a map from each key in {@code keys} to the value associated with that key; <b>may not
* contain null values</b>
* @throws Exception if unable to load the result
* @throws InterruptedException if this method is interrupted. {@code InterruptedException} is
* treated like any other {@code Exception} in all respects except that, when it is caught,
* the thread's interrupted status is set
* @since 11.0
*/
public Map<K, V> loadAll(Iterable<? extends K> keys) throws Exception {
// This will be caught by getAll(), causing it to fall back to multiple calls to
// LoadingCache.get
throw new UnsupportedLoadingOperationException();
}
/**
* Returns a cache loader that uses {@code function} to load keys, and without supporting either
* reloading or bulk loading. This is most useful when you can pass a lambda expression. Otherwise
* it is useful mostly when you already have an existing function instance.
*
* <p>The returned object is serializable if {@code function} is serializable.
*
* @param function the function to be used for loading values; must never return {@code null}
* @return a cache loader that loads values by passing each key to {@code function}
*/
public static <K, V> CacheLoader<K, V> from(Function<K, V> function) {
return new FunctionToCacheLoader<>(function);
}
/**
* Returns a cache loader based on an <i>existing</i> supplier instance. Note that there's no need
* to create a <i>new</i> supplier just to pass it in here; just subclass {@code CacheLoader} and
* implement {@link #load load} instead.
*
* <p>The returned object is serializable if {@code supplier} is serializable.
*
* @param supplier the supplier to be used for loading values; must never return {@code null}
* @return a cache loader that loads values by calling {@link Supplier#get}, irrespective of the
* key
*/
public static <V> CacheLoader<Object, V> from(Supplier<V> supplier) {
return new SupplierToCacheLoader<>(supplier);
}
private static final class FunctionToCacheLoader<K, V> extends CacheLoader<K, V>
implements Serializable {
private final Function<K, V> computingFunction;
FunctionToCacheLoader(Function<K, V> computingFunction) {
this.computingFunction = checkNotNull(computingFunction);
}
@Override
public V load(K key) {
return computingFunction.apply(checkNotNull(key));
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
/**
* Returns a {@code CacheLoader} which wraps {@code loader}, executing calls to {@link
* CacheLoader#reload} using {@code executor}.
*
* <p>This method is useful only when {@code loader.reload} has a synchronous implementation, such
* as {@linkplain #reload the default implementation}.
*
* @since 17.0
*/
@GwtIncompatible // Executor + Futures
public static <K, V> CacheLoader<K, V> asyncReloading(
CacheLoader<K, V> loader, Executor executor) {
checkNotNull(loader);
checkNotNull(executor);
return new CacheLoader<K, V>() {
@Override
public V load(K key) throws Exception {
return loader.load(key);
}
@Override
public ListenableFuture<V> reload(K key, V oldValue) {
ListenableFutureTask<V> task =
ListenableFutureTask.create(() -> loader.reload(key, oldValue).get());
executor.execute(task);
return task;
}
@Override
public Map<K, V> loadAll(Iterable<? extends K> keys) throws Exception {
return loader.loadAll(keys);
}
};
}
private static final class SupplierToCacheLoader<V> extends CacheLoader<Object, V>
implements Serializable {
private final Supplier<V> computingSupplier;
SupplierToCacheLoader(Supplier<V> computingSupplier) {
this.computingSupplier = checkNotNull(computingSupplier);
}
@Override
public V load(Object key) {
checkNotNull(key);
return computingSupplier.get();
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
/**
* Exception thrown by {@code loadAll()} to indicate that it is not supported.
*
* @since 19.0
*/
public static final class UnsupportedLoadingOperationException
extends UnsupportedOperationException {
// Package-private because this should only be thrown by loadAll() when it is not overridden.
// Cache implementors may want to catch it but should not need to be able to throw it.
UnsupportedLoadingOperationException() {}
}
/**
* Thrown to indicate that an invalid response was returned from a call to {@link CacheLoader}.
*
* @since 11.0
*/
public static final class InvalidCacheLoadException extends RuntimeException {
public InvalidCacheLoadException(String message) {
super(message);
}
}
}
|
java
|
github
|
https://github.com/google/guava
|
android/guava/src/com/google/common/cache/CacheLoader.java
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Abinit Post Process Application
author: Martin Alexandre
last edited: May 2013
"""
import sys,os,time,commands
import string, math
#GUI
import gui.graph as Graph
import gui.conv as Conv
#Utility
import utility.write as Write
import utility.analysis as Analysis
try:
from PyQt4 import Qt,QtGui,QtCore
except:
pass;
from numpy import *
#----------------------------------------------------------------#
#---------------WINDOWS-MEAN SQUARED DEPLACEMENT-----------------#
#----------------------------------------------------------------#
class winMSD(QtGui.QWidget):
PTOE = Analysis.PeriodicTableElement()
def __init__(self, file, parent = None,name =''):
self.file = file
self.name = name
self.initUI(parent)
self.displayGraph()
self.raise_()
def initUI(self, parent):
#-----------------Creation of the windows----------------------------#
QtGui.QWidget.__init__(self, parent)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.setWindowTitle(self.name + ' MSD option')
self.setFixedSize(200, 150)
self.center()
self.layout = QtGui.QGridLayout()
self.setLayout(self.layout)
self.lbl1 = QtGui.QLabel(" Atom type 1 :", self)
self.lbl1.setFixedWidth(95)
self.CBox1 = QtGui.QComboBox()
self.CBox1.setFixedWidth(70)
for i in range(len(self.file.getZnucl())):
self.CBox1.addItem(str(self.PTOE.getName(self.file.getZnucl()[i])))
self.connect(self.CBox1,QtCore.SIGNAL('currentIndexChanged(const QString&)'),self.displayGraph)
self.pbClose = QtGui.QPushButton("close")
self.pbClose.setFixedSize(70,20)
self.connect(self.pbClose,QtCore.SIGNAL("clicked()"),QtCore.SLOT('close()'))
self.layout.addWidget(self.lbl1 , 1, 0, 1, 1, QtCore.Qt.AlignRight)
self.layout.addWidget(self.CBox1 , 1, 1, 1, 1, QtCore.Qt.AlignCenter)
self.layout.addWidget(self.pbClose , 7, 0, 1, 2, QtCore.Qt.AlignCenter)
self.show()
#------------------------------------------------------------------------#
def displayGraph(self):
atom = self.CBox1.currentIndex() + 1
self.MeanSquaredDeplacement = Analysis.MSD(self.file,atom)
x = self.MeanSquaredDeplacement.getX()
y = self.MeanSquaredDeplacement.getMSD()
try:
self.graphMSD.update(x,y,'step', "Mean squared deplacement",name = self.name)
self.graphMSD.addPlot(x,linspace(1,1,len(x)))
self.graphMSD.show()
except:
self.graphMSD = Graph.graphic(x,y,'step', "Mean squared deplacement", average=False,name = self.name)
self.connect(self.graphMSD, QtCore.SIGNAL("myCustomizedSignal()"), self.close)
self.graphMSD.show()
def update(self,pfile):
self.file = pfile
atom = self.CBox1.currentIndex() + 1
try:
self.MeanSquaredDeplacement = Analysis.MSD(self.file,atom)
x = self.MeanSquaredDeplacement.getX()
y = self.MeanSquaredDeplacement.getMSD()
self.graphMSD.update(x,y,'step', "Mean squared deplacement",name = self.name)
self.graphMSD.addPlot(x,linspace(1,1,len(x)))
except:
pass
def close(self):
del self.graphMSD
del self
def closeEvent(self, event):
try:
del self.graphMSD
except:
pass
try:
del self
except:
pass
def center(self):
screen = QtGui.QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width()-size.width())/2, (screen.height()-size.height())/2)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#import
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.ensemble import RandomForestClassifier
from sklearn.cross_validation import cross_val_score
# load training data
traindata = pd.read_csv('C:/Users/sound/Desktop/Kaggle/Leaf Classfication/data/train.csv')
x_train = traindata.values[:, 2:]
y_train = traindata.values[:, 1]
#set the number of trees in random forest
num_trees = [10, 50, 100, 200, 300, 400, 500]
#calculate the cross validation scores and std
cr_val_scores = list()
cr_val_scores_std = list()
for n_tree in num_trees:
recognizer = RandomForestClassifier(n_tree)
cr_val_score = cross_val_score(recognizer, x_train, y_train)
cr_val_scores.append(np.mean(cr_val_score))
cr_val_scores_std.append(np.std(cr_val_score))
#plot cross_val_score and std
sc_array = np.array(cr_val_scores)
std_array = np.array(cr_val_scores_std)
plt.plot(num_trees, cr_val_scores)
plt.plot(num_trees, sc_array + std_array, 'b--')
plt.plot(num_trees, sc_array - std_array, 'b--')
plt.ylabel('cross_val_scores')
plt.xlabel('num_of_trees')
plt.savefig('random_forest_benchmark.png')
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright 2016-2019 Cargo-Bundle developers <https://github.com/burtonageo/cargo-bundle>
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::bundle::Settings;
use crate::utils::{self, fs_utils};
use std::{
cmp::min,
ffi::OsStr,
fs::{self, File},
io::{self, BufWriter},
path::{Path, PathBuf},
};
use image::GenericImageView;
// Given a list of icon files, try to produce an ICNS file in the out_dir
// and return the path to it. Returns `Ok(None)` if no usable icons
// were provided.
pub fn create_icns_file(out_dir: &Path, settings: &Settings) -> crate::Result<Option<PathBuf>> {
if settings.icon_files().count() == 0 {
return Ok(None);
}
// If one of the icon files is already an ICNS file, just use that.
for icon_path in settings.icon_files() {
let icon_path = icon_path?;
if icon_path.extension() == Some(OsStr::new("icns")) {
let mut dest_path = out_dir.to_path_buf();
dest_path.push(icon_path.file_name().expect("Could not get icon filename"));
fs_utils::copy_file(&icon_path, &dest_path)?;
return Ok(Some(dest_path));
}
}
// Otherwise, read available images and pack them into a new ICNS file.
let mut family = icns::IconFamily::new();
fn add_icon_to_family(
icon: image::DynamicImage,
density: u32,
family: &mut icns::IconFamily,
) -> io::Result<()> {
// Try to add this image to the icon family. Ignore images whose sizes
// don't map to any ICNS icon type; print warnings and skip images that
// fail to encode.
match icns::IconType::from_pixel_size_and_density(icon.width(), icon.height(), density) {
Some(icon_type) => {
if !family.has_icon_with_type(icon_type) {
let icon = make_icns_image(icon)?;
family.add_icon_with_type(&icon, icon_type)?;
}
Ok(())
}
None => Err(io::Error::new(
io::ErrorKind::InvalidData,
"No matching IconType",
)),
}
}
let mut images_to_resize: Vec<(image::DynamicImage, u32, u32)> = vec![];
for icon_path in settings.icon_files() {
let icon_path = icon_path?;
let icon = image::open(&icon_path)?;
let density = if utils::is_retina(&icon_path) { 2 } else { 1 };
let (w, h) = icon.dimensions();
let orig_size = min(w, h);
let next_size_down = 2f32.powf((orig_size as f32).log2().floor()) as u32;
if orig_size > next_size_down {
images_to_resize.push((icon, next_size_down, density));
} else {
add_icon_to_family(icon, density, &mut family)?;
}
}
for (icon, next_size_down, density) in images_to_resize {
let icon = icon.resize_exact(
next_size_down,
next_size_down,
image::imageops::FilterType::Lanczos3,
);
add_icon_to_family(icon, density, &mut family)?;
}
if !family.is_empty() {
fs::create_dir_all(out_dir)?;
let mut dest_path = out_dir.to_path_buf();
dest_path.push(settings.product_name());
dest_path.set_extension("icns");
let icns_file = BufWriter::new(File::create(&dest_path)?);
family.write(icns_file)?;
Ok(Some(dest_path))
} else {
Err(crate::Error::GenericError(
"No usable Icon files found".to_owned(),
))
}
}
// Converts an image::DynamicImage into an icns::Image.
fn make_icns_image(img: image::DynamicImage) -> io::Result<icns::Image> {
let pixel_format = match img.color() {
image::ColorType::Rgba8 => icns::PixelFormat::RGBA,
image::ColorType::Rgb8 => icns::PixelFormat::RGB,
image::ColorType::La8 => icns::PixelFormat::GrayAlpha,
image::ColorType::L8 => icns::PixelFormat::Gray,
_ => {
let msg = format!("unsupported ColorType: {:?}", img.color());
return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
}
};
icns::Image::from_data(pixel_format, img.width(), img.height(), img.into_bytes())
}
|
rust
|
github
|
https://github.com/tauri-apps/tauri
|
crates/tauri-bundler/src/bundle/macos/icon.rs
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.